pax_global_header00006660000000000000000000000064147617325330014525gustar00rootroot0000000000000052 comment=a367ec1040494cb1fbdf2ff8656a42f82d0b43b8 usql-0.19.19/000077500000000000000000000000001476173253300126725ustar00rootroot00000000000000usql-0.19.19/.github/000077500000000000000000000000001476173253300142325ustar00rootroot00000000000000usql-0.19.19/.github/dependabot.yml000066400000000000000000000002571476173253300170660ustar00rootroot00000000000000version: 2 updates: - package-ecosystem: "github-actions" directory: "/" schedule: # Check for updates to GitHub Actions every weekday interval: "daily" usql-0.19.19/.github/workflows/000077500000000000000000000000001476173253300162675ustar00rootroot00000000000000usql-0.19.19/.github/workflows/announce.yml000066400000000000000000000111601476173253300206170ustar00rootroot00000000000000name: Announce Release on: release: types: [published] env: VER: ${{ github.ref_name }} HOMEBREW_REPO: https://kenshaw:${{ secrets.HOMEBREW_TOKEN }}@github.com/xo/homebrew-xo.git AUR_REPO: aur@aur.archlinux.org:usql.git jobs: bump-aur-package: name: Bump AUR Package runs-on: ubuntu-24.04 steps: - name: Add AUR SSH key uses: shimataro/ssh-key-action@v2 with: key: ${{ secrets.AUR_SSH_KEY }} name: id_ed25519 known_hosts: | aur.archlinux.org ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIEuBKrPzbawxA/k2g6NcyV5jmqwJ2s+zpgZGZ7tpLIcN aur.archlinux.org ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDKF9vAFWdgm9Bi8uc+tYRBmXASBb5cB5iZsB7LOWWFeBrLp3r14w0/9S2vozjgqY5sJLDPONWoTTaVTbhe3vwO8CBKZTEt1AcWxuXNlRnk9FliR1/eNB9uz/7y1R0+c1Md+P98AJJSJWKN12nqIDIhjl2S1vOUvm7FNY43fU2knIhEbHybhwWeg+0wxpKwcAd/JeL5i92Uv03MYftOToUijd1pqyVFdJvQFhqD4v3M157jxS5FTOBrccAEjT+zYmFyD8WvKUa9vUclRddNllmBJdy4NyLB8SvVZULUPrP3QOlmzemeKracTlVOUG1wsDbxknF1BwSCU7CmU6UFP90kpWIyz66bP0bl67QAvlIc52Yix7pKJPbw85+zykvnfl2mdROsaT8p8R9nwCdFsBc9IiD0NhPEHcyHRwB8fokXTajk2QnGhL+zP5KnkmXnyQYOCUYo3EKMXIlVOVbPDgRYYT/XqvBuzq5S9rrU70KoI/S5lDnFfx/+lPLdtcnnEPk= aur.archlinux.org ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBLMiLrP8pVi5BFX2i3vepSUnpedeiewE5XptnUnau+ZoeUOPkpoCgZZuYfpaIQfhhJJI5qgnjJmr4hyJbe/zxow= - name: Bump AUR Package run: | export WORKDIR=$(mktemp -d /tmp/aur-usql.XXXXXX) export REPO_PATH=$WORKDIR/aur-usql wget -O $WORKDIR/archive.tar.gz https://github.com/xo/usql/archive/${VER}.tar.gz export SHA256SUM=$(sha256sum $WORKDIR/archive.tar.gz|awk '{print $1}') export CHANGELOG=$( curl \ -s \ -H 'Accept: application/vnd.github+json' \ https://api.github.com/repos/xo/usql/releases/tags/$VER \ |jq -r .body \ |sed -e 's/\\r//g' -e 's/\[VirusTotal.*//' ) git clone $AUR_REPO $REPO_PATH git -C $REPO_PATH config user.name 'Kenneth Shaw' git -C $REPO_PATH config user.email 'kenshaw@gmail.com' sed -i "s/pkgver=.*$/pkgver=${VER#v}/" $REPO_PATH/PKGBUILD sed -i "s/sha256sums=.*$/sha256sums=('$SHA256SUM')/" $REPO_PATH/PKGBUILD sed -i "s/pkgrel=.*$/pkgrel=1/" $REPO_PATH/PKGBUILD sed -i "s/pkgver =.*$/pkgver = ${VER#v}/" $REPO_PATH/.SRCINFO sed -i "s%source =.*$%source = usql-${VER#v}.tar.gz::https://github.com/xo/usql/archive/${VER}.tar.gz%" $REPO_PATH/.SRCINFO sed -i "s/sha256sums =.*$/sha256sums = $SHA256SUM/" $REPO_PATH/.SRCINFO sed -i "s/pkgrel =.*$/pkgrel = 1/" $REPO_PATH/.SRCINFO git -C $REPO_PATH add PKGBUILD .SRCINFO git -C $REPO_PATH commit -m "$(printf %b "Update usql version to ${VER}\n\n${CHANGELOG}")" git -C $REPO_PATH show -C git -C $REPO_PATH push origin master bump-homebrew-formula: name: Bump Homebrew Formula runs-on: ubuntu-24.04 steps: - name: Bump Homebrew Formula run: | export WORKDIR=$(mktemp -d /tmp/homebrew-xo.XXXXXX) export REPO_PATH=$WORKDIR/homebrew-xo wget -O $WORKDIR/archive.tar.gz https://github.com/xo/usql/archive/${VER}.tar.gz export SHA256SUM=$(sha256sum $WORKDIR/archive.tar.gz|awk '{print $1}') export CHANGELOG=$( curl \ -s \ -H 'Accept: application/vnd.github+json' \ https://api.github.com/repos/xo/usql/releases/tags/$VER \ |jq -r .body \ |sed -e 's/\\r//g' -e 's/\[VirusTotal.*//' ) git clone $HOMEBREW_REPO $REPO_PATH git -C $REPO_PATH config user.name 'Kenneth Shaw' git -C $REPO_PATH config user.email 'ken@usql.app' sed -i "s%url \".*$%url \"https://github.com/xo/usql/archive/${VER}.tar.gz\"%" $REPO_PATH/Formula/usql.rb sed -i "s/sha256 \".*$/sha256 \"$SHA256SUM\"/" $REPO_PATH/Formula/usql.rb git -C $REPO_PATH add Formula/usql.rb git -C $REPO_PATH commit -m "$(printf %b "Update usql version to ${VER}\n\n${CHANGELOG}")" git -C $REPO_PATH show -C git -C $REPO_PATH push origin master announce-discord: name: Announce Discord runs-on: ubuntu-24.04 steps: - name: Announce Discord run: | curl \ -H 'Content-Type: application/json' \ -d '{"username": "usql", "content": "> *usql ${{ github.ref_name }}* has been released!\n\nGet it here: https://github.com/xo/usql/releases/${{ github.ref_name }}"}' \ ${{ secrets.DISCORD_WEBHOOK_URL }} usql-0.19.19/.github/workflows/release.yml000066400000000000000000000161751476173253300204440ustar00rootroot00000000000000name: Release on: push env: APP: usql VER: ${{ github.ref_name }} GO_VERSION: stable jobs: build_for_linux: name: Build for Linux runs-on: ubuntu-24.04 strategy: fail-fast: false matrix: arch: [amd64, arm, arm64] steps: - name: Install build dependencies run: | sudo apt-get -qq update sudo apt-get install -y \ build-essential \ qemu-user \ gcc-arm-linux-gnueabihf \ g++-arm-linux-gnueabihf \ gcc-aarch64-linux-gnu \ g++-aarch64-linux-gnu \ libstdc++6-armhf-cross \ libstdc++6-arm64-cross \ libc6-dev-armhf-cross \ libc6-dev-arm64-cross \ file - name: Checkout uses: actions/checkout@v4 - name: Setup Go uses: actions/setup-go@v5 with: go-version: ${{ env.GO_VERSION }} - name: Build ${{ matrix.arch }} run: | ./build.sh -v $VER -a ${{ matrix.arch }} - name: Build ${{ matrix.arch }} (static) if: matrix.arch != 'arm' run: | ./build.sh -v $VER -a ${{ matrix.arch }} -s - name: Archive artifacts uses: actions/upload-artifact@v4 with: name: dist-linux-${{ matrix.arch }} path: build/linux/**/* if-no-files-found: error build_for_macos: name: Build for macOS runs-on: macos-latest strategy: matrix: arch: [amd64, arm64] steps: - name: Install build dependencies run: | brew install coreutils - name: Checkout uses: actions/checkout@v4 - name: Setup Go uses: actions/setup-go@v5 with: go-version: ${{ env.GO_VERSION }} - name: Build ${{ matrix.arch }} run: | ./build.sh -v $VER -a ${{ matrix.arch }} - name: Archive artifacts uses: actions/upload-artifact@v4 with: name: dist-darwin-${{ matrix.arch }} path: build/darwin/**/* if-no-files-found: error build_for_macos_universal: name: Build for macOS (universal) needs: - build_for_macos runs-on: macos-latest steps: - name: Install build dependencies run: | brew install coreutils - name: Download artifacts uses: actions/download-artifact@v4 - name: Build universal run: | if [ "$VER" = "master" ]; then VER=0.0.0-dev fi export WORKDIR=$PWD/build/darwin/universal/$VER mkdir -p $WORKDIR gtar -jxvf dist-darwin-amd64/*/*/*.tar.bz2 -C $WORKDIR $APP gtar -jxvf dist-darwin-amd64/*/*/*.tar.bz2 -C $WORKDIR LICENSE mv $WORKDIR/$APP $WORKDIR/$APP-amd64 gtar -jxvf dist-darwin-arm64/*/*/*.tar.bz2 -C $WORKDIR $APP mv $WORKDIR/$APP $WORKDIR/$APP-arm64 file $WORKDIR/$APP-{amd64,arm64} lipo -create -output $WORKDIR/$APP $WORKDIR/$APP-amd64 $WORKDIR/$APP-arm64 chmod +x $WORKDIR/$APP file $WORKDIR/$APP rm $WORKDIR/$APP-{amd64,arm64} sudo /usr/sbin/purge gtar -C $WORKDIR -cjf $WORKDIR/$APP-${VER#v}-darwin-universal.tar.bz2 $APP LICENSE ls -alh $WORKDIR/* sha256sum $WORKDIR/* - name: Archive artifacts uses: actions/upload-artifact@v4 with: name: dist-darwin-universal path: build/darwin/**/* if-no-files-found: error build_for_windows: name: Build for Windows runs-on: windows-latest steps: - name: Install build dependencies run: choco install zip - name: Checkout uses: actions/checkout@v4 - name: Setup Go uses: actions/setup-go@v5 with: go-version: ${{ env.GO_VERSION }} - name: Build amd64 shell: bash run: | ./build.sh -v $VER - name: Archive artifacts uses: actions/upload-artifact@v4 with: name: dist-windows path: build/windows/**/* if-no-files-found: error draft_release: name: Draft Release needs: - build_for_linux - build_for_macos - build_for_macos_universal - build_for_windows runs-on: ubuntu-24.04 steps: - name: Download artifacts uses: actions/download-artifact@v4 - name: Extract artifacts run: | mkdir /tmp/scan mkdir scan for i in dist-*/*/*/*.tar.bz2; do name=$(basename $i|cut -d- -f1) ver=$(sed -e 's/\.tar\.bz2$//' <<< $(basename $i)|cut -d- -f2-) echo "extracting $i ($name $ver)" tar -C /tmp/scan -jv -f $i -x $name hash=$(sha256sum /tmp/scan/$name|awk '{print $1}') mv /tmp/scan/$name ./scan/$name-$ver-${hash:0:8} done for i in dist-*/*/*/*.zip; do name=$(basename $i|cut -d- -f1) ver=$(sed -e 's/\.zip$//' <<< $(basename $i)|cut -d- -f2-) echo "extracting $i ($name $ver)" unzip -d /tmp/scan $i $name.exe hash=$(sha256sum /tmp/scan/$name.exe|awk '{print $1}') mv /tmp/scan/$name.exe ./scan/$name-$ver-${hash:0:8}.exe done file ./scan/* sha256sum ./scan/* - name: Submit to VirusTotal id: virustotal uses: crazy-max/ghaction-virustotal@v4 with: vt_api_key: ${{ secrets.VIRUSTOTAL_API_KEY }} request_rate: 4 files: | ./scan/* - name: Generate Release Notes id: generate_release_notes uses: softprops/action-gh-release@v2 if: startsWith(github.ref, 'refs/tags/v') with: name: ${{ env.APP }} ${{ env.VER }} draft: true generate_release_notes: true files: | dist-*/*/*/*.tar.bz2 dist-*/*/*/*.zip - name: Add VirusTotal Info to Release Notes if: startsWith(github.ref, 'refs/tags/v') run: | # github api url url=https://api.github.com/repos/${GITHUB_REPOSITORY}/releases/${{ steps.generate_release_notes.outputs.id }} # get release notes body=$( curl \ -s \ -L \ -H "Accept: application/vnd.github+json" \ -H "Authorization: Bearer ${{ secrets.GITHUB_TOKEN }}" \ -H "X-GitHub-Api-Version: 2022-11-28" \ $url \ | jq -r .body ) echo -e ">>>\n$body\n<<<" # append virustotal details to release notes nl=$'\n' body+="$nl$nl[VirusTotal](https://www.virustotal.com) analysis:$nl" while read -r -d, line; do name=$(sed -e 's/^\.\/scan\/\([^=]\+\)=.*/\1/' <<< "$line") vturl=$(sed -e 's/.*=\(https.*\)/\1/' <<< "$line") body+="* [$name]($vturl)$nl" done <<< "${{ steps.virustotal.outputs.analysis }}" # update release notes export body echo -e ">>>\n$body\n<<<" curl \ -s \ -L \ -X PATCH \ -H "Accept: application/vnd.github+json" \ -H "Authorization: Bearer ${{ secrets.GITHUB_TOKEN }}" \ -H "X-GitHub-Api-Version: 2022-11-28" \ --variable '%body' \ --expand-data '{"body": "{{body:trim:json}}"}' \ $url usql-0.19.19/.github/workflows/test.yml000066400000000000000000000026561476173253300200020ustar00rootroot00000000000000on: [push, pull_request] name: Test usql jobs: test: name: Build and Test usql runs-on: ubuntu-24.04 services: cassandra: image: docker.io/usql/cassandra:latest ports: - 9042:9042 postgres: image: docker.io/usql/postgres:latest env: POSTGRES_PASSWORD: P4ssw0rd ports: - 5432:5432 mysql: image: docker.io/library/mariadb env: MYSQL_ROOT_PASSWORD: P4ssw0rd ports: - 3306:3306 sqlserver: image: mcr.microsoft.com/mssql/server:2022-latest env: ACCEPT_EULA: Y MSSQL_PID: Express SA_PASSWORD: Adm1nP@ssw0rd ports: - 1433:1433 steps: - name: Install Go uses: actions/setup-go@v5 with: go-version: stable - name: Install Packages run: | sudo apt-get -qq update sudo apt-get install -y build-essential libicu-dev unixodbc unixodbc-dev - name: Checkout code uses: actions/checkout@v4 - name: Unit Tests run: | go test -v ./stmt - name: Build with all drivers run: | ./build.sh -b -t all - name: Shell Tests run: | go run testcli.go &> output.log ls -alh output.log - name: Archive output uses: actions/upload-artifact@v4 with: name: output path: output.log if-no-files-found: error usql-0.19.19/.gitignore000066400000000000000000000002721476173253300146630ustar00rootroot00000000000000/usql /usql.exe /build/ /coverage.out /*.sql /*.txt .usql_history* .[a-f0-9]* *.ini *.csv *.db *.zip *.out *.sqlite3 *.sqlite3-journal *.duckdb *.wal /instantclient* /*.pc .vscode/ usql-0.19.19/CONTRIBUTING.md000066400000000000000000000060231476173253300151240ustar00rootroot00000000000000Contributing to usql ==================== Any contributions are welcome. If you found a bug, or a missing feature, take a look at existing [issues](https://github.com/xo/usql/issues) and create a new one if needed. You can also open up a [pull request](https://github.com/xo/usql/pulls) (PR) with code or documentation changes. # Adding a new driver 1. Add a new schema in [dburl](https://github.com/xo/dburl). 1. Create a new go package in `drivers`. It should have an `init()` function, that would call `drivers.Register()`. 1. Regenerate code in the `internal` package by running `internal/gen.sh`. 1. Add any new required modules using `go get` or by editing `go.mod` manually and running `go mod tidy`. 1. Run all tests, build `usql` and see if the new driver works. 1. Update `README.md`. > Tip: check out closed PRs for examples, and/or search the codebase for names of databases you're familiar with. # Enabling metadata introspection for a driver For `\d*` commands to work, `usql` needs to know how to read the structure of a database. A driver must provide a metadata reader, by setting the `NewMetadataReader` property in the `drivers.Driver` structure passed to `drivers.Register()`. This needs to be a function that given a database and reader options, returns a reader instance for this particular driver. If the database has a `information_schema` schema, with standard tables like `tables` and `columns`, you can use an existing reader from the `drivers/informationschema` package. Since there are usually minor difference in objects defined in that schema in different databases, there's a set of options to configure this reader. Refer to the [package docs](https://pkg.go.dev/github.com/xo/usql/drivers/metadata/informationschema) for details. If you can't use the `informationschema` reader, consider implementing a new one. It should implement at least one of the following reader interfaces: * CatalogReader * SchemaReader * TableReader * ColumnReader * IndexReader * IndexColumnReader * FunctionReader * FunctionColumnReader * SequenceReader Every of these interfaces consist of a single function, that takes a `Filter` structure as an argument, and returns a set of results and an error. Example drivers using their own readers include: * `sqlite3` * `oracle` and `godror` sharing the same reader If you want to use the `informationschema` reader, but need to override one or more readers, use the `metadata.NewPluginReader(readers ...Reader)` function. It returns an object calling reader functions from the last reader passed in the arguments, that implements it. Example drivers extending an `informationschema` reader using a plugin reader: * `postgres` `\d*` commands are actually implemented by a metadata writer. There's currently only one, but it too can be replaced and/or extended. # Enabling autocomplete for a driver If a driver provides a metadata reader, the default completer will use it. A driver can provide it's own completer, by setting the `NewCompleter` property in the `drivers.Driver` structure passed to `drivers.Register()`. usql-0.19.19/LICENSE000066400000000000000000000020741476173253300137020ustar00rootroot00000000000000The MIT License (MIT) Copyright (c) 2016-2024 Kenneth Shaw Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. usql-0.19.19/README.md000066400000000000000000001757631476173253300141740ustar00rootroot00000000000000
Installing | Building | Database Support | Using | Features and Compatibility | Releases | Contributing

`usql` is a universal command-line interface for PostgreSQL, MySQL, Oracle Database, SQLite3, Microsoft SQL Server, [and many other databases][databases] including NoSQL and non-relational databases! `usql` provides a simple way to work with [SQL and NoSQL databases][databases] via a command-line inspired by PostgreSQL's `psql`. `usql` supports most of the core `psql` features, such as [variables][variables], [backticks][backticks], [backslash commands][commands] and has additional features that `psql` does not, such as [multiple database support][databases], [copying between databases][copying], [syntax highlighting][highlighting], [context-based completion][completion], and [terminal graphics][termgraphics]. Database administrators and developers that would prefer to work with a tool like `psql` with non-PostgreSQL databases, will find `usql` intuitive, easy-to-use, and a great replacement for the command-line clients/tools for other databases. [![Unit Tests][usql-ci-status]][usql-ci] [![Go Reference][goref-usql-status]][goref-usql] [![Releases][release-status]][Releases] [![Discord Discussion][discord-status]][discord] [usql-ci]: https://github.com/xo/usql/actions/workflows/test.yml "Test CI" [usql-ci-status]: https://github.com/xo/usql/actions/workflows/test.yml/badge.svg "Test CI" [goref-usql]: https://pkg.go.dev/github.com/xo/usql "Go Reference" [goref-usql-status]: https://pkg.go.dev/badge/github.com/xo/usql.svg "Go Reference" [release-status]: https://img.shields.io/github/v/release/xo/usql?display_name=tag&sort=semver "Latest Release" [discord]: https://discord.gg/WDWAgXwJqN "Discord Discussion" [discord-status]: https://img.shields.io/discord/829150509658013727.svg?label=Discord&logo=Discord&colorB=7289da&style=flat-square "Discord Discussion" [installing]: #installing "Installing" [databases]: #database-support "Database Support" [releases]: https://github.com/xo/usql/releases "Releases" ## Installing `usql` can be installed [via Release][], [via Homebrew][], [via AUR][], [via Scoop][], [via Go][], or [via Docker][]: [via Release]: #installing-via-release [via Homebrew]: #installing-via-homebrew-macos-and-linux [via AUR]: #installing-via-aur-arch-linux [via Scoop]: #installing-via-scoop-windows [via Go]: #installing-via-go [via Docker]: #installing-via-docker ### Installing via Release 1. [Download a release for your platform][releases] 2. Extract the `usql` or `usql.exe` file from the `.tar.bz2` or `.zip` file 3. Move the extracted executable to somewhere on your `$PATH` (Linux/macOS) or `%PATH%` (Windows) ### Installing via Homebrew (macOS and Linux) Install `usql` from the [`xo/xo` tap][xo-tap] in the usual way with the [`brew` command][homebrew]: ```sh # install usql with most drivers $ brew install xo/xo/usql ``` Support for [ODBC databases][databases] is available through the `--with-odbc` install flag: ```sh # add xo tap $ brew tap xo/xo # install usql with odbc support $ brew install --with-odbc usql ``` ### Installing via AUR (Arch Linux) Install `usql` from the [Arch Linux AUR][aur] in the usual way with the [`yay` command][yay]: ```sh # install usql with most drivers $ yay -S usql ``` Alternately, build and [install using `makepkg`][arch-makepkg]: ```sh $ git clone https://aur.archlinux.org/usql.git && cd usql $ makepkg -si ==> Making package: usql 0.12.10-1 (Fri 26 Aug 2022 05:56:09 AM WIB) ==> Checking runtime dependencies... ==> Checking buildtime dependencies... ==> Retrieving sources... -> Downloading usql-0.12.10.tar.gz... ... ``` ### Installing via Scoop (Windows) Install `usql` using [Scoop](https://scoop.sh): ```powershell # Optional: Needed to run a remote script the first time > Set-ExecutionPolicy RemoteSigned -Scope CurrentUser # install scoop if not already installed > irm get.scoop.sh | iex # install usql with scoop > scoop install usql ``` ### Installing via Go Install `usql` in the usual Go fashion: ```sh # install latest usql version with base drivers $ go install github.com/xo/usql@latest # alternately, install usql with most drivers (see below for info about build tags) $ go install -tags most github.com/xo/usql@latest ``` See [below for information](#building) on `usql` build tags. ### Installing via Docker An [official container image (`docker.io/usql/usql`)][docker-hub] is maintained by the `usql` team, and can be used with Docker, Podman, or other container runtime. [docker-hub]: https://hub.docker.com/r/usql/usql Install `usql` with Docker, Podman, or other container runtime: ```sh # run interactive shell and mount the $PWD/data directory as a volume for use # within the container $ docker run --rm -it --volume $(pwd)/data:/data docker.io/usql/usql:latest sqlite3://data/test.db Trying to pull docker.io/usql/usql:latest... Getting image source signatures Copying blob af48168d69d8 done | Copying blob efc2b5ad9eec skipped: already exists Copying config 917ceb411d done | Writing manifest to image destination Connected with driver sqlite3 (SQLite3 3.45.1) Type "help" for help. sq:data/test.db=> \q # run postgres locally $ docker run --detach --rm --name=postgres --publish=5432:5432 --env=POSTGRES_PASSWORD=P4ssw0rd docker.io/usql/postgres # connect to local postgres instance $ docker run --rm --network host -it docker.io/usql/usql:latest postgres://postgres:P4ssw0rd@localhost Connected with driver postgres (PostgreSQL 16.3 (Debian 16.3-1.pgdg120+1)) Type "help" for help. pg:postgres@localhost=> \q # run specific usql version $ docker run --rm -it docker.io/usql/usql:0.19.3 ``` ## Building When building `usql` out-of-the-box with `go build` or `go install`, only the [`base` drivers][databases] for PostgreSQL, MySQL, SQLite3, Microsoft SQL Server, Oracle, CSVQ will be included in the build: ```sh # build/install with base drivers (PostgreSQL, MySQL, SQLite3, Microsoft SQL Server, # Oracle, CSVQ) $ go install github.com/xo/usql@master ``` Other databases can be enabled by specifying the [build tag for their database driver][databases]. ```sh # build/install with base, Avatica, and ODBC drivers $ go install -tags 'avatica odbc' github.com/xo/usql@master ``` For every build tag ``, there is also a `no_` build tag that will disable the driver: ```sh # build/install most drivers, excluding Avatica, Couchbase, and PostgreSQL $ go install -tags 'most no_avatica no_couchbase no_postgres' github.com/xo/usql@master ``` By specifying the build tags `most` or `all`, the build will include most, and all SQL drivers, respectively: ```sh # build/install with most drivers (excludes CGO drivers and problematic drivers) $ go install -tags most github.com/xo/usql@master # build/install all drivers (includes CGO drivers and problematic drivers) $ go install -tags all github.com/xo/usql@master ``` ## Database Support `usql` works with all Go standard library compatible SQL drivers supported by [`github.com/xo/dburl`][dburl]. The list of drivers that `usql` was built with can be displayed with the [`\drivers` command][commands]: ```sh $ cd $GOPATH/src/github.com/xo/usql # build excluding the base drivers, and including cassandra and moderncsqlite $ go build -tags 'no_postgres no_oracle no_sqlserver no_sqlite3 cassandra moderncsqlite' # show built driver support $ ./usql -c '\drivers' Available Drivers: cql [ca, scy, scylla, datastax, cassandra] memsql (mysql) [me] moderncsqlite [mq, sq, file, sqlite, sqlite3, modernsqlite] mysql [my, maria, aurora, mariadb, percona] tidb (mysql) [ti] vitess (mysql) [vt] ``` The above shows that `usql` was built with only the `mysql`, `cassandra` (ie, `cql`), and `moderncsqlite` drivers. The output above reflects information about the drivers available to `usql`, specifically the internal driver name, its primary URL scheme, the driver's available scheme aliases (shown in `[...]`), and the real/underlying driver (shown in `(...)`) for wire compatible drivers. ### Supported Database Schemes and Aliases The following are the [Go SQL drivers][go-sql] that `usql` supports, the associated database, scheme / build tag, and scheme aliases: | Database | Scheme / Tag | Scheme Aliases | Driver Package / Notes | | -------------------- | --------------- | ----------------------------------------------- | --------------------------------------------------------------------------- | | PostgreSQL | `postgres` | `pg`, `pgsql`, `postgresql` | [github.com/lib/pq][d-postgres] | | MySQL | `mysql` | `my`, `maria`, `aurora`, `mariadb`, `percona` | [github.com/go-sql-driver/mysql][d-mysql] | | Microsoft SQL Server | `sqlserver` | `ms`, `mssql`, `azuresql` | [github.com/microsoft/go-mssqldb][d-sqlserver] | | Oracle Database | `oracle` | `or`, `ora`, `oci`, `oci8`, `odpi`, `odpi-c` | [github.com/sijms/go-ora/v2][d-oracle] | | SQLite3 | `sqlite3` | `sq`, `sqlite`, `file` | [github.com/mattn/go-sqlite3][d-sqlite3] [†][f-cgo] | | ClickHouse | `clickhouse` | `ch` | [github.com/ClickHouse/clickhouse-go/v2][d-clickhouse] | | CSVQ | `csvq` | `cs`, `csv`, `tsv`, `json` | [github.com/mithrandie/csvq-driver][d-csvq] | | | | | | | Alibaba MaxCompute | `maxcompute` | `mc` | [sqlflow.org/gomaxcompute][d-maxcompute] | | Alibaba Tablestore | `ots` | `ot`, `tablestore` | [github.com/aliyun/aliyun-tablestore-go-sql-driver][d-ots] | | Apache Avatica | `avatica` | `av`, `phoenix` | [github.com/apache/calcite-avatica-go/v5][d-avatica] | | Apache H2 | `h2` | | [github.com/jmrobles/h2go][d-h2] | | Apache Hive | `hive` | `hi`, `hive2` | [sqlflow.org/gohive][d-hive] | | Apache Ignite | `ignite` | `ig`, `gridgain` | [github.com/amsokol/ignite-go-client/sql][d-ignite] | | AWS Athena | `athena` | `s3`, `aws`, `awsathena` | [github.com/uber/athenadriver/go][d-athena] | | Azure CosmosDB | `cosmos` | `cm` | [github.com/btnguyen2k/gocosmos][d-cosmos] | | Cassandra | `cassandra` | `ca`, `scy`, `scylla`, `datastax`, `cql` | [github.com/MichaelS11/go-cql-driver][d-cassandra] | | ChaiSQL | `chai` | `ci`, `genji`, `chaisql` | [github.com/chaisql/chai/driver][d-chai] | | Couchbase | `couchbase` | `n1`, `n1ql` | [github.com/couchbase/go_n1ql][d-couchbase] | | Cznic QL | `ql` | `cznic`, `cznicql` | [modernc.org/ql][d-ql] | | Databend | `databend` | `dd`, `bend` | [github.com/datafuselabs/databend-go][d-databend] | | Databricks | `databricks` | `br`, `brick`, `bricks`, `databrick` | [github.com/databricks/databricks-sql-go][d-databricks] | | DuckDB | `duckdb` | `dk`, `ddb`, `duck`, `file` | [github.com/marcboeker/go-duckdb][d-duckdb] [†][f-cgo] | | DynamoDb | `dynamodb` | `dy`, `dyn`, `dynamo`, `dynamodb` | [github.com/btnguyen2k/godynamo][d-dynamodb] | | Exasol | `exasol` | `ex`, `exa` | [github.com/exasol/exasol-driver-go][d-exasol] | | Firebird | `firebird` | `fb`, `firebirdsql` | [github.com/nakagami/firebirdsql][d-firebird] | | FlightSQL | `flightsql` | `fl`, `flight` | [github.com/apache/arrow/go/v17/arrow/flight/flightsql/driver][d-flightsql] | | Google BigQuery | `bigquery` | `bq` | [gorm.io/driver/bigquery/driver][d-bigquery] | | Google Spanner | `spanner` | `sp` | [github.com/googleapis/go-sql-spanner][d-spanner] | | Microsoft ADODB | `adodb` | `ad`, `ado` | [github.com/mattn/go-adodb][d-adodb] | | ModernC SQLite3 | `moderncsqlite` | `mq`, `modernsqlite` | [modernc.org/sqlite][d-moderncsqlite] | | MySQL MyMySQL | `mymysql` | `zm`, `mymy` | [github.com/ziutek/mymysql/godrv][d-mymysql] | | Netezza | `netezza` | `nz`, `nzgo` | [github.com/IBM/nzgo/v12][d-netezza] | | PostgreSQL PGX | `pgx` | `px` | [github.com/jackc/pgx/v5/stdlib][d-pgx] | | Presto | `presto` | `pr`, `prs`, `prestos`, `prestodb`, `prestodbs` | [github.com/prestodb/presto-go-client/presto][d-presto] | | RamSQL | `ramsql` | `rm`, `ram` | [github.com/proullon/ramsql/driver][d-ramsql] | | SAP ASE | `sapase` | `ax`, `ase`, `tds` | [github.com/thda/tds][d-sapase] | | SAP HANA | `saphana` | `sa`, `sap`, `hana`, `hdb` | [github.com/SAP/go-hdb/driver][d-saphana] | | Snowflake | `snowflake` | `sf` | [github.com/snowflakedb/gosnowflake][d-snowflake] | | Trino | `trino` | `tr`, `trs`, `trinos` | [github.com/trinodb/trino-go-client/trino][d-trino] | | Vertica | `vertica` | `ve` | [github.com/vertica/vertica-sql-go][d-vertica] | | VoltDB | `voltdb` | `vo`, `vdb`, `volt` | [github.com/VoltDB/voltdb-client-go/voltdbclient][d-voltdb] | | YDB | `ydb` | `yd`, `yds`, `ydbs` | [github.com/ydb-platform/ydb-go-sdk/v3][d-ydb] | | | | | | | GO DRiver for ORacle | `godror` | `gr` | [github.com/godror/godror][d-godror] [†][f-cgo] | | ODBC | `odbc` | `od` | [github.com/alexbrainman/odbc][d-odbc] [†][f-cgo] | | | | | | | Amazon Redshift | `postgres` | `rs`, `redshift` | [github.com/lib/pq][d-postgres] [‡][f-wire] | | CockroachDB | `postgres` | `cr`, `cdb`, `crdb`, `cockroach`, `cockroachdb` | [github.com/lib/pq][d-postgres] [‡][f-wire] | | OLE ODBC | `adodb` | `oo`, `ole`, `oleodbc` | [github.com/mattn/go-adodb][d-adodb] [‡][f-wire] | | SingleStore MemSQL | `mysql` | `me`, `memsql` | [github.com/go-sql-driver/mysql][d-mysql] [‡][f-wire] | | TiDB | `mysql` | `ti`, `tidb` | [github.com/go-sql-driver/mysql][d-mysql] [‡][f-wire] | | Vitess Database | `mysql` | `vt`, `vitess` | [github.com/go-sql-driver/mysql][d-mysql] [‡][f-wire] | | | | | | | Apache Impala | `impala` | `im` | [github.com/bippio/go-impala][d-impala] | | | | | | | **NO DRIVERS** | `no_base` | | _no base drivers (useful for development)_ | | **MOST DRIVERS** | `most` | | _all stable drivers_ | | **ALL DRIVERS** | `all` | | _all drivers, excluding bad drivers_ | | **BAD DRIVERS** | `bad` | | _bad drivers (broken/non-working drivers)_ | | **NO <TAG>** | `no_` | | _exclude driver with ``_ | [d-adodb]: https://github.com/mattn/go-adodb [d-athena]: https://github.com/uber/athenadriver [d-avatica]: https://github.com/apache/calcite-avatica-go [d-bigquery]: https://github.com/go-gorm/bigquery [d-cassandra]: https://github.com/MichaelS11/go-cql-driver [d-chai]: https://github.com/chaisql/chai [d-clickhouse]: https://github.com/ClickHouse/clickhouse-go [d-cosmos]: https://github.com/btnguyen2k/gocosmos [d-couchbase]: https://github.com/couchbase/go_n1ql [d-csvq]: https://github.com/mithrandie/csvq-driver [d-databend]: https://github.com/datafuselabs/databend-go [d-databricks]: https://github.com/databricks/databricks-sql-go [d-duckdb]: https://github.com/marcboeker/go-duckdb [d-dynamodb]: https://github.com/btnguyen2k/godynamo [d-exasol]: https://github.com/exasol/exasol-driver-go [d-firebird]: https://github.com/nakagami/firebirdsql [d-flightsql]: https://github.com/apache/arrow/tree/main/go/arrow/flight/flightsql/driver [d-godror]: https://github.com/godror/godror [d-h2]: https://github.com/jmrobles/h2go [d-hive]: https://github.com/sql-machine-learning/gohive [d-ignite]: https://github.com/amsokol/ignite-go-client [d-impala]: https://github.com/bippio/go-impala [d-maxcompute]: https://github.com/sql-machine-learning/gomaxcompute [d-moderncsqlite]: https://gitlab.com/cznic/sqlite [d-mymysql]: https://github.com/ziutek/mymysql [d-mysql]: https://github.com/go-sql-driver/mysql [d-netezza]: https://github.com/IBM/nzgo [d-odbc]: https://github.com/alexbrainman/odbc [d-oracle]: https://github.com/sijms/go-ora [d-ots]: https://github.com/aliyun/aliyun-tablestore-go-sql-driver [d-pgx]: https://github.com/jackc/pgx [d-postgres]: https://github.com/lib/pq [d-presto]: https://github.com/prestodb/presto-go-client [d-ql]: https://gitlab.com/cznic/ql [d-ramsql]: https://github.com/proullon/ramsql [d-sapase]: https://github.com/thda/tds [d-saphana]: https://github.com/SAP/go-hdb [d-snowflake]: https://github.com/snowflakedb/gosnowflake [d-spanner]: https://github.com/googleapis/go-sql-spanner [d-sqlite3]: https://github.com/mattn/go-sqlite3 [d-sqlserver]: https://github.com/microsoft/go-mssqldb [d-trino]: https://github.com/trinodb/trino-go-client [d-vertica]: https://github.com/vertica/vertica-sql-go [d-voltdb]: https://github.com/VoltDB/voltdb-client-go [d-ydb]: https://github.com/ydb-platform/ydb-go-sdk [f-cgo]: #f-cgo "Requires CGO" [f-wire]: #f-wire "Wire compatible"

Requires CGO
Wire compatible (see respective driver)

Any of the protocol schemes/aliases above can be used in conjunction when connecting to a database via the command-line or with the [`\connect` and `\copy` commands][commands]: ```sh # connect to a vitess database: $ usql vt://user:pass@host:3306/mydatabase $ usql (not connected)=> \c vitess://user:pass@host:3306/mydatabase $ usql (not connected)=> \copy csvq://. pg://localhost/ 'select * ....' 'myTable' ``` See [the section below on connecting to databases][connecting] for further details building DSNs/URLs for use with `usql`. ## Using After [installing][], `usql` can be used similarly to the following: ```sh # connect to a postgres database $ usql postgres://booktest@localhost/booktest # connect to an oracle database $ usql oracle://user:pass@host/oracle.sid # connect to a postgres database and run the commands contained in script.sql $ usql pg://localhost/ -f script.sql ``` ### Command-line Options Supported command-line options: ```sh $ usql --help usql, the universal command-line interface for SQL databases Usage: usql [flags]... [DSN] Arguments: DSN database url or connection name Flags: -c, --command COMMAND run only single command (SQL or internal) and exit -f, --file FILE execute commands from file and exit -w, --no-password never prompt for password -X, --no-init do not execute initialization scripts (aliases: --no-rc --no-psqlrc --no-usqlrc) -o, --out FILE output file -W, --password force password prompt (should happen automatically) -1, --single-transaction execute as a single transaction (if non-interactive) -v, --set NAME=VALUE set variable NAME to VALUE (see \set command, aliases: --var --variable) -N, --cset NAME=DSN set named connection NAME to DSN (see \cset command) -P, --pset VAR=ARG set printing option VAR to ARG (see \pset command) -F, --field-separator FIELD-SEPARATOR field separator for unaligned and CSV output (default "|" and ",") -R, --record-separator RECORD-SEPARATOR record separator for unaligned and CSV output (default \n) -T, --table-attr TABLE-ATTR set HTML table tag attributes (e.g., width, border) -A, --no-align unaligned table output mode -H, --html HTML table output mode -t, --tuples-only print rows only -x, --expanded turn on expanded table output -z, --field-separator-zero set field separator for unaligned and CSV output to zero byte -0, --record-separator-zero set record separator for unaligned and CSV output to zero byte -J, --json JSON output mode -C, --csv CSV output mode -G, --vertical vertical output mode -q, --quiet run quietly (no messages, only query output) --config string config file -V, --version output version information, then exit -?, --help show this help, then exit ``` ### Connecting to Databases `usql` opens a database connection by [parsing a URL][dburl] and passing the resulting connection string to [a database driver][databases]. Database connection strings (aka "data source name" or DSNs) have the same parsing rules as URLs, and can be passed to `usql` via command-line, or to the [`\connect`, `\c`, and `\copy` commands][commands]. Database connections can be defined with [the `\cset` command][connection-vars] or in [the `config.yaml` configuration file][config]. #### Database Connection Strings Database connection strings look like the following: ```txt driver+transport://user:pass@host/dbname?opt1=a&opt2=b driver:/path/to/file /path/to/file name ``` Where the above are: | Component | Description | | ------------------------------- | ------------------------------------------------------------------------------------ | | `driver` | driver scheme name or scheme alias | | `transport` | `tcp`, `udp`, `unix` or driver name (for ODBC and ADODB) | | `user` | username | | `pass` | password | | `host` | hostname | | `dbname` [±][f-path] | database name, instance, or service name/ID | | `?opt1=a&...` | additional database driver options (see respective SQL driver for available options) | | `/path/to/file` | a path on disk | | `name` | a connection name set by [`\cset`][connection-vars] or in [`config.yaml`][config] | [f-path]: #f-path "URL Paths for Databases"

± Some databases, such as Microsoft SQL Server, or Oracle Database support a path component (ie, /dbname) in the form of /instance/dbname, where /instance is the optional service identifier (aka "SID") or database instance

#### Driver Aliases `usql` supports the same driver names and aliases as [the `dburl` package][dburl]. Databases have at least one or more aliases. See [`dburl`'s scheme documentation][dburl-schemes] for a list of all supported aliases. ##### Short Aliases All database drivers have a two character short form that is usually the first two letters of the database driver. For example, `pg` for `postgres`, `my` for `mysql`, `ms` for `sqlserver`, `or` for `oracle`, or `sq` for `sqlite3`. #### Passing Driver Options Driver options are specified as standard URL query options in the form of `?opt1=a&opt2=b`. Refer to the [relevant database driver's documentation][databases] for available options. #### Paths on Disk If a URL does not have a `driver:` scheme, `usql` will check if it is a path on disk. If the path exists, `usql` will attempt to use an appropriate database driver to open the path. When the path is a Unix Domain Socket, `usql` will attempt to open it with the MySQL driver. When the path is a directory, `usql` will attempt to open it using the PostgreSQL driver. And, lastly, when the path is a regular file, `usql` will attempt to open the file using the SQLite3 or DuckDB drivers. #### Driver Defaults As with URLs, most components in the URL are optional and many components can be left out. `usql` will attempt connecting using defaults where possible: ```sh # connect to postgres using the local $USER and the unix domain socket in /var/run/postgresql $ usql pg:// ``` See the relevant documentation [on database drivers][databases] for more information. ### Connection Examples The following are example connection strings and additional ways to connect to databases using `usql`: ```sh # connect to a postgres database $ usql pg://user:pass@host/dbname $ usql pgsql://user:pass@host/dbname $ usql postgres://user:pass@host:port/dbname $ usql pg:// $ usql /var/run/postgresql $ usql pg://user:pass@host/dbname?sslmode=disable # Connect without SSL # connect to a mysql database $ usql my://user:pass@host/dbname $ usql mysql://user:pass@host:port/dbname $ usql my:// $ usql /var/run/mysqld/mysqld.sock # connect to a sqlserver database $ usql sqlserver://user:pass@host/instancename/dbname $ usql ms://user:pass@host/dbname $ usql ms://user:pass@host/instancename/dbname $ usql mssql://user:pass@host:port/dbname $ usql ms:// # connect to a sqlserver database using Windows domain authentication $ runas /user:ACME\wiley /netonly "usql mssql://host/dbname/" # connect to a oracle database $ usql or://user:pass@host/sid $ usql oracle://user:pass@host:port/sid $ usql or:// # connect to a cassandra database $ usql ca://user:pass@host/keyspace $ usql cassandra://host/keyspace $ usql cql://host/ $ usql ca:// # connect to a sqlite database that exists on disk $ usql dbname.sqlite3 # Note: when connecting to a SQLite database, if the "driver://" or # "driver:" scheme/alias is omitted, the file must already exist on disk. # # if the file does not yet exist, the URL must incorporate file:, sq:, sqlite3:, # or any other recognized sqlite3 driver alias to force usql to create a new, # empty database at the specified path: $ usql sq://path/to/dbname.sqlite3 $ usql sqlite3://path/to/dbname.sqlite3 $ usql file:/path/to/dbname.sqlite3 # connect to a adodb ole resource (windows only) $ usql adodb://Microsoft.Jet.OLEDB.4.0/myfile.mdb $ usql "adodb://Microsoft.ACE.OLEDB.12.0/?Extended+Properties=\"Text;HDR=NO;FMT=Delimited\"" # connect to a named connection in $HOME/.config/usql/config.yaml $ cat $HOME/.config/usql/config.yaml connections: my_named_connection: sqlserver://user:pass@localhost/ $ usql my_named_connection # connect with ODBC driver (requires building with odbc tag) $ cat /etc/odbcinst.ini [DB2] Description=DB2 driver Driver=/opt/db2/clidriver/lib/libdb2.so FileUsage = 1 DontDLClose = 1 [PostgreSQL ANSI] Description=PostgreSQL ODBC driver (ANSI version) Driver=psqlodbca.so Setup=libodbcpsqlS.so Debug=0 CommLog=1 UsageCount=1 # connect to db2, postgres databases using odbc config above $ usql odbc+DB2://user:pass@localhost/dbname $ usql odbc+PostgreSQL+ANSI://user:pass@localhost/dbname?TraceFile=/path/to/trace.log ``` See the [section on connection variables][connection-vars] for information on defining connection names. ### Executing Queries and Commands The interactive interpreter reads queries and [backslash meta (`\`) commands][commands], sending the query to the connected database: ```sh $ usql sqlite://example.sqlite3 Connected with driver sqlite3 (SQLite3 3.17.0) Type "help" for help. sq:example.sqlite3=> create table test (test_id int, name string); CREATE TABLE sq:example.sqlite3=> insert into test (test_id, name) values (1, 'hello'); INSERT 1 sq:example.sqlite3=> select * from test; test_id | name +---------+-------+ 1 | hello (1 rows) sq:example.sqlite3=> select * from test sq:example.sqlite3-> \p select * from test sq:example.sqlite3-> \g test_id | name +---------+-------+ 1 | hello (1 rows) sq:example.sqlite3=> \c postgres://booktest@localhost error: pq: 28P01: password authentication failed for user "booktest" Enter password: Connected with driver postgres (PostgreSQL 9.6.6) pg:booktest@localhost=> select * from authors; author_id | name +-----------+----------------+ 1 | Unknown Master 2 | blah 3 | foobar (3 rows) pg:booktest@localhost=> ``` Commands may accept one or more parameter, and can be quoted using either `'` or `"`. Command parameters [may also be backticked][backticks]. ### Backslash Commands `usql` supports interleaved backslash (`\`) meta commands to modify or alter the way that `usql` interprets queries, formats its output, and changes the resulting interactive flow. ```sh (not connected)=> \c postgres://user:pass@localhost pg:user@localhost=> select * from my_table \G ``` Available backslash meta commands can be displayed with `\?`: ```sh $ usql Type "help" for help. (not connected)=> \? General \q quit usql \copyright show usql usage and distribution terms \drivers display information about available database drivers Query Execute \g [(OPTIONS)] [FILE] or ; execute query (and send results to file or |pipe) \crosstabview [(OPTIONS)] [COLUMNS] execute query and display results in crosstab \G [(OPTIONS)] [FILE] as \g, but forces vertical output mode \gexec execute query and execute each value of the result \gset [PREFIX] execute query and store results in usql variables \gx [(OPTIONS)] [FILE] as \g, but forces expanded output mode \watch [(OPTIONS)] [DURATION] execute query every specified interval \bind [PARAM]... set query parameters Query Buffer \e [FILE] [LINE] edit the query buffer (or file) with external editor \p show the contents of the query buffer \raw show the raw (non-interpolated) contents of the query buffer \r reset (clear) the query buffer \w FILE write query buffer to file Help \? [commands] show help on backslash commands \? options show help on usql command-line options \? variables show help on special variables Input/Output \copy SRC DST QUERY TABLE copy query from source url to table on destination url \copy SRC DST QUERY TABLE(A,...) copy query from source url to columns of table on destination url \echo [-n] [STRING] write string to standard output (-n for no newline) \qecho [-n] [STRING] write string to \o output stream (-n for no newline) \warn [-n] [STRING] write string to standard error (-n for no newline) \o [FILE] send all query results to file or |pipe \i FILE execute commands from file \ir FILE as \i, but relative to location of current script Informational \d[S+] [NAME] list tables, views, and sequences or describe table, view, sequence, or index \da[S+] [PATTERN] list aggregates \df[S+] [PATTERN] list functions \di[S+] [PATTERN] list indexes \dm[S+] [PATTERN] list materialized views \dn[S+] [PATTERN] list schemas \dp[S] [PATTERN] list table, view, and sequence access privileges \ds[S+] [PATTERN] list sequences \dt[S+] [PATTERN] list tables \dv[S+] [PATTERN] list views \l[+] list databases \ss[+] [TABLE|QUERY] [k] show stats for a table or a query Formatting \pset [NAME [VALUE]] set table output option \a toggle between unaligned and aligned output mode \C [STRING] set table title, or unset if none \f [STRING] show or set field separator for unaligned query output \H toggle HTML output mode \T [STRING] set HTML tag attributes, or unset if none \t [on|off] show only rows \x [on|off|auto] toggle expanded output Transaction \begin begin a transaction \begin [-read-only] [ISOLATION] begin a transaction with isolation level \commit commit current transaction \rollback rollback (abort) current transaction Connection \c DSN connect to database url \c DRIVER PARAMS... connect to database with driver and parameters \cset [NAME [DSN]] set named connection, or list all if no parameters \cset NAME DRIVER PARAMS... define named connection for database driver \Z close database connection \password [USERNAME] change the password for a user \conninfo display information about the current database connection Operating System \cd [DIR] change the current working directory \getenv VARNAME ENVVAR fetch environment variable \setenv NAME [VALUE] set or unset environment variable \! [COMMAND] execute command in shell or start interactive shell \timing [on|off] toggle timing of commands Variables \prompt [-TYPE] VAR [PROMPT] prompt user to set variable \set [NAME [VALUE]] set internal variable, or list all if no parameters \unset NAME unset (delete) internal variable ``` Parameters passed to commands [can be backticked][backticks]. ## Features and Compatibility An overview of `usql`'s features, functionality, and compatibility with `psql`: - [Configuration][config] - [Variables][variables] - [Backticks][backticks] - [Copying Between Databases][copying] - [Syntax Highlighting][highlighting] - [Time Formatting][timefmt] - [Context Completion][completion] - [Host Connection Information](#host-connection-information) - [Passwords][usqlpass] - [Runtime Configuration (RC) File][usqlrc] The `usql` project's goal is to support as much of `psql`'s core features and functionality, and aims to be as compatible as possible - [contributions are always appreciated][contributing]! #### Configuration During its initialization phase, `usql` reads a standard [YAML configuration][yaml] file `config.yaml`. On Windows this is `%AppData%/usql/config.yaml`, on macOS this is `$HOME/Library/Application Support/usql/config.yaml`, and on Linux and other Unix systems this is normally `$HOME/.config/usql/config.yaml`. ##### `connections:` [Named connection DSNs][connecting] can be defined under `connections:` as a string or as a map: ```yaml connections: my_couchbase_conn: couchbase://Administrator:P4ssw0rd@localhost my_clickhouse_conn: clickhouse://clickhouse:P4ssw0rd@localhost my_godror_conn: protocol: godror username: system password: P4ssw0rd hostname: localhost port: 1521 database: free ``` Defined `connections:` can be used on the command-line with `\connect`, `\c`, `\copy`, and [other commands][commands]: ```sh $ usql my_godror_conn Connected with driver godror (Oracle Database 23.0.0.0.0) Type "help" for help. gr:system@localhost/free=> ``` ##### `init:` An initialization script can be defined as `init:`: ```yaml init: | \echo welcome to the jungle `date` \set SYNTAX_HL_STYLE paraiso-dark ``` The `init:` script is commonly used to set [environment variables][variables] or other configuration, and can be disabled on the command-line using the `--no-init` / `-X` flag. The script will be executed prior to any `-c` / `--command` / `-f` / `--file` flag and before starting the interactive interpreter. ##### Other Options Please see [`contrib/config.yaml`](contrib/config.yaml) for an overview of available configuration options. #### Variables `usql` supports [runtime][runtime-vars], [connection][connection-vars], and [display formatting][print-vars] variables that can be `\set`, `\cset`, or `\pset` respectively. ##### Runtime Variables Runtime variables are managed with the `\set` and `\unset` [commands][commands]: ```sh (not connected)=> \unset FOO (not connected)=> \set FOO bar ``` Runtime variables can be displayed with `\set`: ```sh (not connected)=> \set FOO = 'bar' ``` ###### Variable Interpolation When a runtime variable `NAME` has been `\set`, then `:NAME`, `:'NAME'`, and `:"NAME"` will be interpolated into the query buffer: ```sh pg:booktest@localhost=> \set FOO bar pg:booktest@localhost=> select * from authors where name = :'FOO'; author_id | name +-----------+------+ 7 | bar (1 rows) ``` Where a runtime variable is used as `:'NAME'` or `:"NAME"` the interpolated value will be quoted using `'` or `"` respectively: ```sh pg:booktest@localhost=> \set TBLNAME authors pg:booktest@localhost=> \set COLNAME name pg:booktest@localhost=> \set FOO bar pg:booktest@localhost=> select * from :TBLNAME where :"COLNAME" = :'FOO' ``` The query buffer and interpolated values can be displayed with `\p` and `\print`, or the raw query buffer can be displayed with `\raw`: ```sh pg:booktest@localhost-> \p select * from authors where "name" = 'bar' pg:booktest@localhost-> \raw select * from :TBLNAME where :"COLNAME" = :'FOO' ```
> **Note** > > Variables contained within other strings will not be interpolated: ```sh pg:booktest@localhost=> select ':FOO'; ?column? +----------+ :FOO (1 rows) pg:booktest@localhost=> \p select ':FOO'; ```
##### Connection Variables Connection variables work similarly to runtime variables, and are managed with `\cset`. Connection variables can be used with the `\c`, `\connect`, `\copy`, or [other commands][commands]: ```sh (not connected)=> \cset my_conn postgres://user:pass@localhost (not connected)=> \c my_conn Connected with driver postgres (PostgreSQL 16.2 (Debian 16.2-1.pgdg120+2)) pg:postgres@localhost=> ``` Connection variables are not interpolated into queries. See the [configuration section for information on defining persistent connection variables][config]. Connection variables can be displayed with `\cset`: ```sh (not connected)=> \cset my_conn = 'postgres://user:pass@localhost' ``` ##### Display Formatting (Print) Variables Display formatting variables can be set using `\pset` and [other commands][commands]: ```sh (not connected)=> \pset time Kitchen Time display is "Kitchen" ("3:04PM"). (not connected)=> \a Output format is unaligned. ``` Display formatting variables can be displayed with `\pset`: ```sh (not connected)=> \pset time Kitchen ``` ##### Other Variables Runtime behavior, such as [enabling or disabling syntax highlighting][highlighting] can be modified through special variables like [`SYNTAX_HL`][highlighting]. Use the `\? variables` [command][commands] to display variable help information and to list special variables recognized by `usql`: ```sh (not connected)=> \? variables ``` #### Backticks [Backslash (`\`) meta commands][commands] support backticks on parameters: ```sh (not connected)=> \echo Welcome `echo $USER` -- 'currently:' "(" `date` ")" Welcome ken -- currently: ( Wed Jun 13 12:10:27 WIB 2018 ) (not connected)=> ``` Backticked parameters will be passed to the user's `SHELL`, exactly as written, and can be combined with `\set`: ```sh pg:booktest@localhost=> \set MYVAR `date` pg:booktest@localhost=> \set MYVAR = 'Wed Jun 13 12:17:11 WIB 2018' pg:booktest@localhost=> \echo :MYVAR Wed Jun 13 12:17:11 WIB 2018 pg:booktest@localhost=> ``` #### Copying Between Databases `usql` provides a `\copy` command that reads data from a source database DSN and writes to a destination database DSN: ```sh (not connected)=> \cset PGDSN postgres://user:pass@localhost (not connected)=> \cset MYDSN mysql://user:pass@localhost (not connected)=> \copy PGDSN MYDSN 'select book_id, author_id from books' 'books(id, author_id)' ``` As demonstrated above, the `\copy` command does not require being connected to a database, and will not modify or change the current open database connection or state. Any valid URL or DSN name maybe used for the source and destination database: ```sh (not connected)=> \cset MYDSN mysql://user:pass@localhost (not connected)=> \copy postgres://user:pass@localhost MYDSN 'select book_id, author_id from books' 'books(id, author_id)' ```
> **Note** > > `usql`'s `\copy` is distinct from and does not function like > `psql`'s `\copy`.
##### Copy Parameters The `\copy` command has two parameter forms: ```txt \copy SRC DST QUERY TABLE \copy SRC DST QUERY TABLE(COL1, COL2, ..., COLN) ``` Where: - `SRC` - is the [source database URL][connecting] to connect to, and where the `QUERY` will be executed - `DST` - is the [destination database URL][connecting] to connect to, and where the destination `TABLE` resides - `QUERY` - is the query to execute on the `SRC` connection, the results of which will be copied to `TABLE` - `TABLE` - is the destination table name, followed by an optional SQL-like column list of the form `(COL1, COL2, ..., COLN)` - `(COL1, COL2, ..., COLN)` - a list of the destination column names, 1-to-N The usual rules for [variables, interpolation, and quoting][variables] apply to `\copy`'s parameters. ###### Quoting `QUERY` and `TABLE` **_must_** be quoted when containing spaces: ```sh $ usql (not connected)=> echo :SOURCE_DSN :DESTINATION_DSN pg://postgres:P4ssw0rd@localhost/ mysql://localhost (not connected)=> \copy :SOURCE_DSN :DESTINATION_DSN 'select * from mySourceTable' 'myDestination(colA, colB)' COPY 2 ``` ###### Column Counts The `QUERY` **_must_** return the same number of columns as defined by the `TABLE` expression: ```sh $ usql (not connected)=> \copy csvq:. sq:test.db 'select * from authors' authors error: failed to prepare insert query: 2 values for 1 columns (not connected)=> \copy csvq:. sq:test.db 'select name from authors' authors(name) COPY 2 ``` ###### Datatype Compatibility and Casting The `\copy` command does not attempt to perform any kind of datatype conversion. If a `QUERY` returns columns with different datatypes than expected by the `TABLE`'s column, the `QUERY` can use the source database's conversion/casting functionality to cast columns to a datatype that will work for `TABLE`'s columns: ```sh $ usql (not connected)=> \copy postgres://user:pass@localhost mysql://user:pass@localhost 'SELECT uuid_column::TEXT FROM myPgTable' myMyTable COPY 1 ``` ###### Importing Data from CSV The `\copy` command is capable of importing data from CSV's (or any other database!) using the `csvq` driver: ```sh $ cat authors.csv author_id,name 1,Isaac Asimov 2,Stephen King $ cat books.csv book_id,author_id,title 1,1,I Robot 2,2,Carrie 3,2,Cujo $ usql (not connected)=> -- setting variables to make connections easier (not connected)=> \set SOURCE_DSN csvq://. (not connected)=> \set DESTINATION_DSN sqlite3:booktest.db (not connected)=> -- connecting to the destination and creating the schema (not connected)=> \c :DESTINATION_DSN Connected with driver sqlite3 (SQLite3 3.38.5) (sq:booktest.db)=> create table authors (author_id integer, name text); CREATE TABLE (sq:booktest.db)=> create table books (book_id integer not null primary key autoincrement, author_id integer, title text); CREATE TABLE (sq:booktest.db)=> -- adding an extra row to books prior to copying (sq:booktest.db)=> insert into books (author_id, title) values (1, 'Foundation'); INSERT 1 (sq:booktest.db)=> -- disconnecting to demonstrate that \copy opens new database connections (sq:booktest.db)=> \disconnect (not connected)=> -- copying data from SOURCE -> DESTINATION (not connected)=> \copy :SOURCE_DSN :DESTINATION_DSN 'select * from authors' authors COPY 2 (not connected)=> \copy :SOURCE_DSN :DESTINATION_DSN 'select author_id, title from books' 'books(author_id, title)' COPY 3 (not connected)=> \c :DESTINATION_DSN Connected with driver sqlite3 (SQLite3 3.38.5) (sq:booktest.db)=> select * from authors; author_id | name -----------+-------------- 1 | Isaac Asimov 2 | Stephen King (2 rows) sq:booktest.db=> select * from books; book_id | author_id | title ---------+-----------+------------ 1 | 1 | Foundation 2 | 1 | I Robot 3 | 2 | Carrie 4 | 2 | Cujo (4 rows) ```
> **Note** > > When importing large datasets (> 1GiB) from one database to another, it is > better to use a database's native clients and tools.
###### Reusing Connections with Copy The `\copy` command (and all `usql` commands) [works with variables][variables]. When scripting, or when needing to perform multiple `\copy` operations from/to multiple sources/destinations, the best practice is to `\set` connection variables either in a script or in [the `$HOME/.usqlrc` RC script][usqlrc]. Similarly, passwords can be stored for easy reuse (and kept out of scripts) by storing in [the `$HOME/.usqlpass` password file][usqlpass]. For example: ```sh $ cat $HOME/.usqlpass postgres:*:*:*:postgres:P4ssw0rd godror:*:*:*:system:P4ssw0rd $ usql Type "help" for help. (not connected)=> \set pglocal postgres://postgres@localhost:49153?sslmode=disable (not connected)=> \set orlocal godror://system@localhost:1521/orasid (not connected)=> \copy :pglocal :orlocal 'select staff_id, first_name from staff' 'staff(staff_id, first_name)' COPY 18 ``` #### Syntax Highlighting Interactive queries will be syntax highlighted by default, using [Chroma][chroma]. There are a number of [variables][] that control syntax highlighting: | Variable | Default | Values | Description | | ----------------------- | ------------------------------- | ----------------- | ------------------------------------------------------------ | | `SYNTAX_HL` | `true` | `true` or `false` | enables syntax highlighting | | `SYNTAX_HL_FORMAT` | _dependent on terminal support_ | formatter name | [Chroma formatter name][chroma-formatter] | | `SYNTAX_HL_OVERRIDE_BG` | `true` | `true` or `false` | enables overriding the background color of the chroma styles | | `SYNTAX_HL_STYLE` | `monokai` | style name | [Chroma style name][chroma-style] | The `SYNTAX_*` variables are regular `usql` variables, and can be `\set` and `\unset`: ```sh $ usql (not connected)=> \set SYNTAX_HL_STYLE dracula (not connected)=> \unset SYNTAX_HL_OVERRIDE_BG ``` #### Context Completion When using the interactive shell, context completion is available in `usql` by hitting the `` key. For example, hitting `` can complete some parts of `SELECT` queries on a PostgreSQL databases: ```sh $ usql Connected with driver postgres (PostgreSQL 14.4 (Debian 14.4-1.pgdg110+1)) Type "help" for help. pg:postgres@=> select * f fetch from full outer join ``` Or, for example completing [backslash commands][commands] while connected to a database: ```sh $ usql my:// Connected with driver mysql (10.8.3-MariaDB-1:10.8.3+maria~jammy) Type "help" for help. my:root@=> \g \g \gexec \gset \gx ``` Not all commands, contexts, or databases support completion. If you're interested in helping to make `usql`'s completion better, see [the section below on contributing][contributing]. Command completion can be canceled with ``. #### Time Formatting Some databases support time/date columns that [support formatting][go-time]. By default, `usql` formats time/date columns as [RFC3339Nano][go-time], and can be set using `\pset time FORMAT`: ```sh $ usql pg:// Connected with driver postgres (PostgreSQL 13.2 (Debian 13.2-1.pgdg100+1)) Type "help" for help. pg:postgres@=> \pset time RFC3339Nano pg:postgres@=> select now(); now ----------------------------- 2021-05-01T22:21:44.710385Z (1 row) pg:postgres@=> \pset time Kitchen Time display is "Kitchen" ("3:04PM"). pg:postgres@=> select now(); now --------- 10:22PM (1 row) pg:postgres@=> ``` `usql`'s time format supports any [Go supported time format][go-time], or can be any standard Go const name, such as `Kitchen` above. See below for an overview of the [available time constants](#time-constants). ##### Time Constants The following are the time constant names available in `usql`, corresponding time format value, and example display output: | Constant | Format | Display [↓][f-ts] | | ----------- | ------------------------------------: | ----------------------------------: | | ANSIC | `Mon Jan _2 15:04:05 2006` | `Wed Aug 3 20:12:48 2022` | | UnixDate | `Mon Jan _2 15:04:05 MST 2006` | `Wed Aug 3 20:12:48 UTC 2022` | | RubyDate | `Mon Jan 02 15:04:05 -0700 2006` | `Wed Aug 03 20:12:48 +0000 2022` | | RFC822 | `02 Jan 06 15:04 MST` | `03 Aug 22 20:12 UTC` | | RFC822Z | `02 Jan 06 15:04 -0700` | `03 Aug 22 20:12 +0000` | | RFC850 | `Monday, 02-Jan-06 15:04:05 MST` | `Wednesday, 03-Aug-22 20:12:48 UTC` | | RFC1123 | `Mon, 02 Jan 2006 15:04:05 MST` | `Wed, 03 Aug 2022 20:12:48 UTC` | | RFC1123Z | `Mon, 02 Jan 2006 15:04:05 -0700` | `Wed, 03 Aug 2022 20:12:48 +0000` | | RFC3339 | `2006-01-02T15:04:05Z07:00` | `2022-08-03T20:12:48Z` | | RFC3339Nano | `2006-01-02T15:04:05.999999999Z07:00` | `2022-08-03T20:12:48.693257Z` | | Kitchen | `3:04PM` | `8:12PM` | | Stamp | `Jan _2 15:04:05` | `Aug 3 20:12:48` | | StampMilli | `Jan _2 15:04:05.000` | `Aug 3 20:12:48.693` | | StampMicro | `Jan _2 15:04:05.000000` | `Aug 3 20:12:48.693257` | | StampNano | `Jan _2 15:04:05.000000000` | `Aug 3 20:12:48.693257000` | [f-ts]: #f-ts "Timestamp Value"

Generated using timestamp 2022-08-03T20:12:48.693257Z

#### Host Connection Information By default, `usql` displays connection information when connecting to a database. This might cause problems with some databases or connections. This can be disabled by setting the system environment variable `USQL_SHOW_HOST_INFORMATION` to `false`: ```sh $ export USQL_SHOW_HOST_INFORMATION=false $ usql pg://booktest@localhost Type "help" for help. pg:booktest@=> ``` `SHOW_HOST_INFORMATION` is a standard [`usql` variable][variables], and can be `\set` or `\unset`. Additionally, it can be passed via the command-line using `-v` or `--set`: ```sh $ usql --set SHOW_HOST_INFORMATION=false pg:// Type "help" for help. pg:booktest@=> \set SHOW_HOST_INFORMATION true pg:booktest@=> \connect pg:// Connected with driver postgres (PostgreSQL 9.6.9) pg:booktest@=> ``` #### Terminal Graphics `usql` supports terminal graphics for [Kitty][kitty-graphics], [iTerm][iterm-graphics], and [Sixel][sixel-graphics] enabled terminals using the [`github.com/kenshaw/rasterm` package][rasterm]. Terminal graphics are only available when using the interactive shell. ##### Detection and Support `usql` will attempt to detect when terminal graphics support is available using the `USQL_TERM_GRAPHICS`, `TERM_GRAPHICS` and other environment variables unique to various terminals. When support is available, the logo will be displayed at the start of an interactive session:
##### Charts and Graphs The [`\chart` command][chart-command] can be used to display a chart directly in the terminal:
See [the section on the `\chart` meta command][chart-command] for details. ##### Enabling/Disabling Terminal Graphics Terminal graphics can be forced enabled or disabled by setting the `USQL_TERM_GRAPHICS` or the `TERM_GRAPHICS` environment variable: ```sh # disable $ USQL_TERM_GRAPHICS=none usql # force iterm graphics $ TERM_GRAPHICS=iterm usql ``` | Variable | Default | Values | Description | | --------------- | ------- | ------------------------------------- | ------------------------------ | | `TERM_GRAPHICS` | `` | ``, `kitty`, `iterm`, `sixel`, `none` | enables/disables term graphics | ##### Terminals with Graphics Support The following terminals have been tested with `usql`: - [WezTerm][wezterm] is a cross-platform terminal for Windows, macOS, Linux, and many other platforms that supports [iTerm][iterm-graphics] graphics - [iTerm2][iterm2] is a macOS terminal that supports [iTerm][iterm-graphics] graphics - [kitty][kitty] is a terminal for Linux, macOS, and various BSDs that supports [Kitty][kitty-graphics] graphics - [foot][foot] is a Wayland terminal for Linux (and other Wayland hosts) that supports [Sixel][sixel-graphics] graphics Additional terminals that support [Sixel][sixel-graphics] graphics are catalogued on the [Are We Sixel Yet?][arewesixelyet] website. #### Passwords `usql` supports reading passwords for databases from a `.usqlpass` file contained in the user's `HOME` directory at startup: ```sh $ cat $HOME/.usqlpass # format is: # protocol:host:port:dbname:user:pass postgres:*:*:*:booktest:booktest $ usql pg:// Connected with driver postgres (PostgreSQL 9.6.9) Type "help" for help. pg:booktest@=> ``` While the `.usqlpass` functionality will not be removed, it is recommended to [define named connections][connection-vars] preferrably via [the `config.yaml` file][config].
> **Note** > > The `.usqlpass` file cannot be readable by other users, and the permissions > should be set accordingly: ```sh chmod 0600 ~/.usqlpass ```
#### Runtime Configuration (RC) File `usql` supports executing a `.usqlrc` runtime configuration (RC) file contained in the user's `HOME` directory: ```sh $ cat $HOME/.usqlrc \echo WELCOME TO THE JUNGLE `date` \set SYNTAX_HL_STYLE paraiso-dark # display color prompt (default is prompt is "%S%m%/%R%#" ) \set PROMPT1 "\033[32m%S%m%/%R%#\033[0m" $ usql WELCOME TO THE JUNGLE Thu Jun 14 02:36:53 WIB 2018 Type "help" for help. (not connected)=> \set SYNTAX_HL_STYLE = 'paraiso-dark' (not connected)=> ``` The `.usqlrc` file is read at startup in the same way as a file passed on the command-line with `-f` / `--file`. It is commonly used to set startup environment variables and settings. RC-file execution can be temporarily disabled at startup by passing `-X` or `--no-init` on the command-line: ```sh $ usql --no-init pg:// ``` While the `.usqlrc` functionality will not be removed, it is recommended to set an `init` script in [the `config.yaml` file][config]. ## Additional Notes The following are additional notes and miscellania related to `usql`: ### Release Builds [Release builds][releases] are built with the `most` build tag and with additional [SQLite3 build tags (see: `build.sh`)](build.sh). ### macOS The recommended installation method on macOS is [via `brew`][via Homebrew] due to the way library dependencies for the `sqlite3` driver are done on macOS. If the following (or similar) error is encountered when attempting to run `usql`: ```sh $ usql dyld: Library not loaded: /usr/local/opt/icu4c/lib/libicuuc.68.dylib Referenced from: /Users/user/.local/bin/usql Reason: image not found Abort trap: 6 ``` Then missing library dependency can be fixed by installing [`icu4c`](http://site.icu-project.org) using `brew`: ```sh $ brew install icu4c Running `brew update --auto-update`... ==> Downloading ... ... $ usql (not connected)=> ``` ## Contributing `usql` is currently a WIP, and is aiming towards a 1.0 release soon. Well-written PRs are always welcome -- and there is a clear backlog of issues marked `help wanted` on the GitHub issue tracker! For [technical details on contributing, see CONTRIBUTING.md](CONTRIBUTING.md). [_Pick up an issue today, and submit a PR tomorrow!_][help-wanted] ## Related Projects - [dburl][dburl] - Go package providing a standard, URL-style mechanism for parsing and opening database connection URLs - [xo][xo] - Go command-line tool to generate Go code from a database schema [dburl]: https://github.com/xo/dburl [dburl-schemes]: https://github.com/xo/dburl#protocol-schemes-and-aliases [go-time]: https://pkg.go.dev/time#pkg-constants [go-sql]: https://pkg.go.dev/database/sql [homebrew]: https://brew.sh/ [xo]: https://github.com/xo/xo [xo-tap]: https://github.com/xo/homebrew-xo [chroma]: https://github.com/alecthomas/chroma [chroma-formatter]: https://github.com/alecthomas/chroma#formatters [chroma-style]: https://xyproto.github.io/splash/docs/all.html [help-wanted]: https://github.com/xo/usql/issues?q=is:open+is:issue+label:%22help+wanted%22 [aur]: https://aur.archlinux.org/packages/usql [yay]: https://github.com/Jguer/yay [arch-makepkg]: https://wiki.archlinux.org/title/makepkg [backticks]: #backticks "Backticks" [config]: #configuration "Configuration" [commands]: #backslash-commands "Backslash Commands" [completion]: #context-completion "Context Completion" [connecting]: #connecting-to-databases "Connecting to Databases" [contributing]: #contributing "Contributing" [copying]: #copying-between-databases "Copying Between Databases" [highlighting]: #syntax-highlighting "Syntax Highlighting" [termgraphics]: #terminal-graphics "Terminal Graphics" [timefmt]: #time-formatting "Time Formatting" [usqlpass]: #passwords "Passwords" [usqlrc]: #runtime-configuration-rc-file "Runtime Configuration File" [variables]: #variables "Variables" [runtime-vars]: #runtime-variables "Runtime Variables" [connection-vars]: #connection-variables "Connection Variables" [print-vars]: #display-formatting-(print)-variables "Display Formatting (print) Variables" [kitty-graphics]: https://sw.kovidgoyal.net/kitty/graphics-protocol.html [iterm-graphics]: https://iterm2.com/documentation-images.html [sixel-graphics]: https://saitoha.github.io/libsixel/ [rasterm]: https://github.com/kenshaw/rasterm [wezterm]: https://wezfurlong.org/wezterm/ [iterm2]: https://iterm2.com [foot]: https://codeberg.org/dnkl/foot [kitty]: https://sw.kovidgoyal.net/kitty/ [arewesixelyet]: https://www.arewesixelyet.com [yaml]: https://yaml.org [chart-command]: #chart-command "\\chart meta command" usql-0.19.19/build.sh000077500000000000000000000115411476173253300143320ustar00rootroot00000000000000#!/bin/bash set -e SRC=$(realpath $(cd -P "$(dirname "${BASH_SOURCE[0]}")" && pwd)) NAME=$(basename $SRC) VER= STATIC=0 FORCE=0 CHECK=1 INSTALL=0 BUILDONLY=0 VERBOSE=false CGO_ENABLED=1 LDNAME=github.com/xo/usql/text.CommandName LDVERSION=github.com/xo/usql/text.CommandVersion PLATFORM=$(go env GOOS) ARCH=$(go env GOARCH) GOARCH=$ARCH TAGS=( most sqlite_app_armor sqlite_fts5 sqlite_introspect sqlite_json1 sqlite_math_functions sqlite_stat4 sqlite_userauth sqlite_vtable ) latest_tag() { # get latest tag version pushd $SRC &> /dev/null git tag -l|grep -E '^v[0-9]+\.[0-9]+\.[0-9]+(\.[0-9]+)?$'|sort -r -V|head -1||: popd &> /dev/null } OPTIND=1 while getopts "a:v:sfnibxt:r" opt; do case "$opt" in a) ARCH=$OPTARG ;; v) VER=$OPTARG ;; s) STATIC=1 ;; f) FORCE=1 ;; n) CHECK=0 ;; i) INSTALL=1 ;; b) BUILDONLY=1 ;; x) VERBOSE=true ;; t) TAGS=($OPTARG) ;; r) VER=$(latest_tag) ;; esac done # neither -v or -r specified, or -v=master, set FORCE and VER if [[ "$VER" = "" || "$VER" == "master" ]]; then VER=0.0.0-dev FORCE=1 fi VER="${VER#v}" BUILD=$SRC/build DIR=$BUILD/$PLATFORM/$ARCH/$VER TAR=tar EXT=tar.bz2 BIN=$DIR/$NAME case $PLATFORM in linux) TAGS+=(no_adodb) ;; windows) EXT=zip BIN=$BIN.exe ;; darwin) TAGS+=(no_adodb) TAR=gtar ;; esac OUT=$DIR/$NAME-$VER-$PLATFORM-$ARCH.$EXT CARCH= QEMUARCH= GNUTYPE= CC= CXX= EXTLD=g++ if [[ "$PLATFORM" == "linux" && "$ARCH" != "$GOARCH" ]]; then case $ARCH in arm) CARCH=armhf QEMUARCH=arm GNUTYPE=gnueabihf ;; arm64) CARCH=aarch64 QEMUARCH=aarch64 GNUTYPE=gnu ;; *) echo "error: unknown arch $ARCH" exit 1 ;; esac LDARCH=$CARCH if [[ "$ARCH" == "arm" ]]; then TAGS+=(no_netezza no_chai) if [ -d /usr/arm-linux-$GNUTYPE ]; then LDARCH=arm elif [ -d /usr/arm-none-linux-$GNUTYPE ]; then LDARCH=arm-none fi fi CC=$LDARCH-linux-$GNUTYPE-gcc CXX=$LDARCH-linux-$GNUTYPE-c++ EXTLD=$LDARCH-linux-$GNUTYPE-g++ fi if [[ "$PLATFORM" == "linux" && "$ARCH" != "amd64" ]] || [[ "$PLATFORM" == "windows" ]]; then TAGS+=(no_duckdb) fi LDFLAGS=( -s -w -X $LDNAME=$NAME -X $LDVERSION=$VER ) if [ "$STATIC" = "1" ]; then OUT=$DIR/${NAME}_static-$VER-$PLATFORM-$ARCH.$EXT BIN=$DIR/${NAME}_static case $PLATFORM in linux) TAGS+=( netgo osusergo ) EXTLDFLAGS=( -static -lm -ldl ) EXTLDFLAGS="${EXTLDFLAGS[@]}" LDFLAGS+=( -linkmode=external -extldflags \'$EXTLDFLAGS\' -extld $EXTLD ) ;; *) echo "ERROR: fully static builds not currently supported for $PLATFORM/$ARCH" exit 1 ;; esac fi # check not overwriting existing build artifacts if [[ -e $OUT && "$FORCE" != "1" && "$INSTALL" == "0" ]]; then echo "ERROR: $OUT exists and FORCE != 1 (try $0 -f)" exit 1 fi TAGS="${TAGS[@]}" LDFLAGS="${LDFLAGS[@]}" echo "APP: $NAME/${VER} ($PLATFORM/$ARCH)" if [ "$STATIC" = "1" ]; then echo "STATIC: yes" fi echo "BUILD TAGS: $TAGS" echo "LDFLAGS: $LDFLAGS" pushd $SRC &> /dev/null if [ -f $OUT ]; then echo "REMOVING: $OUT" rm -rf $OUT fi mkdir -p $DIR echo "BUILDING: $BIN" # build echo "BUILD:" VERB=build OUTPUT="-o $BIN" if [ "$INSTALL" = "1" ]; then VERB=install OUTPUT="" elif [ "$BUILDONLY" = "1" ]; then OUTPUT="" fi (set -x; CC=$CC \ CXX=$CXX \ CGO_ENABLED=$CGO_ENABLED \ GOARCH=$ARCH \ go $VERB \ -v=$VERBOSE \ -x=$VERBOSE \ -ldflags="$LDFLAGS" \ -tags="$TAGS" \ -trimpath \ $OUTPUT ) if [[ "$INSTALL" == "1" || "$BUILDONLY" == "1" ]]; then exit fi (set -x; file $BIN ) if [[ "$PLATFORM" != "windows" ]]; then (set -x; chmod +x $BIN ) fi # purge disk cache if [[ "$PLATFORM" == "darwin" && "$CI" == "true" ]]; then (set -x; sudo /usr/sbin/purge ) fi built_ver() { if [[ "$PLATFORM" == "linux" && "$ARCH" != "$GOARCH" ]]; then EXTRA= if [ -d /usr/$LDARCH-linux-$GNUTYPE/libc ]; then EXTRA="-L /usr/$LDARCH-linux-$GNUTYPE/libc" fi qemu-$QEMUARCH \ -L /usr/$LDARCH-linux-$GNUTYPE \ $EXTRA \ $BIN --version elif [[ "$PLATFORM" == "darwin" && "$ARCH" != "$GOARCH" ]]; then echo "$NAME ${VER#v}" else $BIN --version fi } # check build if [[ "$CHECK" == "1" ]]; then BUILT_VER=$(built_ver) if [ "$BUILT_VER" != "$NAME ${VER#v}" ]; then echo -e "\n\nERROR: expected $NAME --version to report '$NAME ${VER#v}', got: '$BUILT_VER'" exit 1 fi echo "REPORTED: $BUILT_VER" fi # pack cp $SRC/LICENSE $DIR case $EXT in tar.bz2) $TAR -C $DIR -cjf $OUT $(basename $BIN) LICENSE ;; zip) zip $OUT -j $BIN LICENSE ;; esac # report echo "PACKED: $OUT ($(du -sh $OUT|awk '{print $1}'))" case $EXT in tar.bz2) (set -x; $TAR -jvtf $OUT) ;; zip) (set -x; unzip -l $OUT) ;; esac (set -x; sha256sum $DIR/* ) popd &> /dev/null usql-0.19.19/contrib/000077500000000000000000000000001476173253300143325ustar00rootroot00000000000000usql-0.19.19/contrib/adodb/000077500000000000000000000000001476173253300154035ustar00rootroot00000000000000usql-0.19.19/contrib/adodb/adodb.sh000077500000000000000000000004421476173253300170130ustar00rootroot00000000000000#!/bin/bash rm -f example.csv usql "adodb://Microsoft.ACE.OLEDB.12.0/?Extended+Properties=\"Text;HDR=NO;FMT=Delimited\"" \ -c "create table example.csv(f1 text, f2 text, f3 text);" \ -c "insert into example.csv(f1, f2, f3) values ('a', 'b', 'c');" \ -c "select * from example.csv;" usql-0.19.19/contrib/adodb/usql-config000066400000000000000000000000001476173253300175430ustar00rootroot00000000000000usql-0.19.19/contrib/cassandra/000077500000000000000000000000001476173253300162715ustar00rootroot00000000000000usql-0.19.19/contrib/cassandra/podman-config000066400000000000000000000001001476173253300207240ustar00rootroot00000000000000NAME=cassandra IMAGE=docker.io/usql/cassandra PUBLISH=9042:9042 usql-0.19.19/contrib/cassandra/test.sql000066400000000000000000001273051476173253300200010ustar00rootroot00000000000000USE cycling; CREATE KEYSPACE IF NOT EXISTS cycling WITH REPLICATION = { 'class' : 'SimpleStrategy', 'replication_factor' : 1 }; // Q1: -- Find a cyclist's name given an ID number // CREATE TABLE SIMPLE PRIMARY KEY CREATE TABLE cycling.cyclist_name ( id UUID PRIMARY KEY, lastname text, firstname text ); INSERT INTO cycling.cyclist_name (id, lastname, firstname) VALUES (5b6962dd-3f90-4c93-8f61-eabfa4a803e2, 'VOS','Marianne'); INSERT INTO cycling.cyclist_name (id, lastname, firstname) VALUES (e7cd5752-bc0d-4157-a80f-7523add8dbcd, 'VAN DER BREGGEN','Anna'); INSERT INTO cycling.cyclist_name (id, lastname, firstname) VALUES (e7ae5cf3-d358-4d99-b900-85902fda9bb0, 'FRAME','Alex'); INSERT INTO cycling.cyclist_name (id, lastname, firstname) VALUES (220844bf-4860-49d6-9a4b-6b5d3a79cbfb, 'TIRALONGO','Paolo'); INSERT INTO cycling.cyclist_name (id, lastname, firstname) VALUES (6ab09bec-e68e-48d9-a5f8-97e6fb4c9b47, 'KRUIKSWIJK','Steven'); INSERT INTO cycling.cyclist_name (id, lastname, firstname) VALUES (fb372533-eb95-4bb4-8685-6ef61e994caa, 'MATTHEWS', 'Michael'); SELECT * FROM cycling.cyclist_name; SELECT lastname, firstname FROM cycling.cyclist_name WHERE id = 6ab09bec-e68e-48d9-a5f8-97e6fb4c9b47; -- Q2: -- Find cyclists that fit a particular category // CREATE TABLE CLUSTERING ORDER, PRIMARY KEY: PARTITION KEY + 1 CLUSTERING COLUMN, SIMPLE WHERE QUERY CREATE TABLE cycling.cyclist_category ( category text, points int, id UUID, lastname text, PRIMARY KEY (category, points)) WITH CLUSTERING ORDER BY (points DESC); INSERT INTO cycling.cyclist_category (category, points, id, lastname) VALUES ('GC',1269,220844bf-4860-49d6-9a4b-6b5d3a79cbfb,'TIRALONGO'); INSERT INTO cycling.cyclist_category (category, points, id, lastname) VALUES ('One-day-races',367,220844bf-4860-49d6-9a4b-6b5d3a79cbfb,'TIRALONGO'); INSERT INTO cycling.cyclist_category (category, points, id, lastname) VALUES ('Time-trial',182,220844bf-4860-49d6-9a4b-6b5d3a79cbfb,'TIRALONGO'); INSERT INTO cycling.cyclist_category (category, points, id, lastname) VALUES ('Sprint',0,220844bf-4860-49d6-9a4b-6b5d3a79cbfb,'TIRALONGO'); INSERT INTO cycling.cyclist_category (category, points, id, lastname) VALUES ('GC',1324,6ab09bec-e68e-48d9-a5f8-97e6fb4c9b47,'KRUIJSWIJK'); INSERT INTO cycling.cyclist_category (category, points, id, lastname) VALUES ('One-day-races',198,6ab09bec-e68e-48d9-a5f8-97e6fb4c9b47,'KRUIJSWIJK'); INSERT INTO cycling.cyclist_category (category, points, id, lastname) VALUES ('Sprint',39,6ab09bec-e68e-48d9-a5f8-97e6fb4c9b47,'KRUIJSWIJK'); INSERT INTO cycling.cyclist_category (category, points, id, lastname) VALUES ('Time-trial',3,6ab09bec-e68e-48d9-a5f8-97e6fb4c9b47,'KRUIJSWIJK'); SELECT * FROM cycling.cyclist_category; SELECT lastname, points FROM cycling.cyclist_category WHERE category = 'One-day-races'; -- Q3: -- Store race information by year and race name using a COMPOSITE PARTITION KEY CREATE TABLE cycling.rank_by_year_and_name ( race_year int, race_name text, cyclist_name text, rank int, PRIMARY KEY ((race_year, race_name), rank) ); INSERT INTO cycling.rank_by_year_and_name (race_year, race_name, cyclist_name, rank) VALUES (2015, 'Tour of Japan - Stage 4 - Minami > Shinshu', 'Benjamin PRADES', 1); INSERT INTO cycling.rank_by_year_and_name (race_year, race_name, cyclist_name, rank) VALUES (2015, 'Tour of Japan - Stage 4 - Minami > Shinshu', 'Adam PHELAN', 2); INSERT INTO cycling.rank_by_year_and_name (race_year, race_name, cyclist_name, rank) VALUES (2015, 'Tour of Japan - Stage 4 - Minami > Shinshu', 'Thomas LEBAS', 3); INSERT INTO cycling.rank_by_year_and_name (race_year, race_name, cyclist_name, rank) VALUES (2015, 'Giro d''Italia - Stage 11 - Forli > Imola', 'Ilnur ZAKARIN', 1); INSERT INTO cycling.rank_by_year_and_name (race_year, race_name, cyclist_name, rank) VALUES (2015, 'Giro d''Italia - Stage 11 - Forli > Imola', 'Carlos BETANCUR', 2); INSERT INTO cycling.rank_by_year_and_name (race_year, race_name, cyclist_name, rank) VALUES (2014, '4th Tour of Beijing', 'Phillippe GILBERT', 1); INSERT INTO cycling.rank_by_year_and_name (race_year, race_name, cyclist_name, rank) VALUES (2014, '4th Tour of Beijing', 'Daniel MARTIN', 2); INSERT INTO cycling.rank_by_year_and_name (race_year, race_name, cyclist_name, rank) VALUES (2014, '4th Tour of Beijing', 'Johan Esteban CHAVES', 3); SELECT * FROM cycling.rank_by_year_and_name; SELECT * FROM cycling.rank_by_year_and_name WHERE race_year=2015 AND race_name='Tour of Japan - Stage 4 - Minami > Shinshu'; -- New C* 3.6 -- PER PARTITION LIMIT -- To get the Top Two for each race_year-race_name pair SELECT * FROM cycling.rank_by_year_and_name PER PARTITION LIMIT 2; -- Q4: -- Find a cyclist's id given lastname and firstname -- Another CREATE TABLE using COMPOSITE PARTITION KEY -- 2i INDEX ALSO GOOD FOR THIS TABLE CREATE TABLE cycling.cyclist_id ( lastname text, firstname text, age int, id UUID, PRIMARY KEY ((lastname, firstname), age) ); INSERT INTO cycling.cyclist_id (lastname, firstname, age, id) VALUES ('EENKHOORN','Pascal',18, ffdfa2a7-5fc6-49a7-bfdc-3fcdcfdd7156); INSERT INTO cycling.cyclist_id (lastname, firstname, age, id) VALUES ('WELTEN','Bram',18, 18f471bf-f631-4bc4-a9a2-d6f6cf5ea503); INSERT INTO cycling.cyclist_id (lastname, firstname, age, id) VALUES ('COSTA','Adrien',17, 15a116fc-b833-4da6-ab9a-4a7775752836); SELECT * FROM cycling.cyclist_id WHERE lastname = 'COSTA' AND firstname = 'Adrien'; -- If you want to search by age, an index can be added CREATE INDEX c_age ON cycling.cyclist_id (age); SELECT * FROM cycling.cyclist_id WHERE age = 18; -- Q5: -- Display flag for riders -- CREATE TABLE WITH STATIC COLUMN, example uses an integer to identify flag, but it could be a blob CREATE TABLE cycling.country_flag (country text, cyclist_name text, flag int STATIC, PRIMARY KEY (country, cyclist_name)); INSERT INTO cycling.country_flag (country, cyclist_name, flag) VALUES ('Belgium', 'Jacques', 1); INSERT INTO cycling.country_flag (country, cyclist_name) VALUES ('Belgium', 'Andre'); INSERT INTO cycling.country_flag (country, cyclist_name, flag) VALUES ('France', 'Andre', 2); INSERT INTO cycling.country_flag (country, cyclist_name, flag) VALUES ('France', 'George', 3); -- USE SELECT REPEATEDLY TO SHOW CHANGING (OR UNCHANGING) NATURE OF the column 'flag' SELECT * FROM cycling.country_flag; -- Q6: -- Find all teams that a cyclist has been a member of --CREATE TABLE WITH SET CREATE TABLE cycling.cyclist_career_teams ( id UUID PRIMARY KEY, lastname text, teams set ); INSERT INTO cycling.cyclist_career_teams (id,lastname,teams) VALUES (5b6962dd-3f90-4c93-8f61-eabfa4a803e2, 'VOS', { 'Rabobank-Liv Woman Cycling Team','Rabobank-Liv Giant','Rabobank Women Team','Nederland bloeit' } ); INSERT INTO cycling.cyclist_career_teams (id,lastname,teams) VALUES (e7cd5752-bc0d-4157-a80f-7523add8dbcd, 'VAN DER BREGGEN', { 'Rabobank-Liv Woman Cycling Team','Sengers Ladies Cycling Team','Team Flexpoint' } ); INSERT INTO cycling.cyclist_career_teams (id,lastname,teams) VALUES (cb07baad-eac8-4f65-b28a-bddc06a0de23, 'ARMITSTEAD', { 'Boels-Dolmans Cycling Team','AA Drink - Leontien.nl','Team Garmin - Cervelo' } ); INSERT INTO cycling.cyclist_career_teams (id,lastname,teams) VALUES (1c9ebc13-1eab-4ad5-be87-dce433216d40, 'BRAND', { 'Rabobank-Liv Woman Cycling Team','Rabobank-Liv Giant','AA Drink - Leontien.nl','Leontien.nl' } ); SELECT lastname,teams FROM cycling.cyclist_career_teams; SELECT lastname, teams FROM cycling.cyclist_career_teams WHERE id=5b6962dd-3f90-4c93-8f61-eabfa4a803e2; -- NOT A QUERY, JUST A TABLE FOR QUERIES -- CREATE TABLE WITH LIST FOR UPDATE -- The SELECT statements that use this table can be found below CREATE TABLE cycling.calendar (race_id int, race_name text, race_start_date timestamp, race_end_date timestamp, PRIMARY KEY (race_id, race_start_date, race_end_date)); INSERT INTO cycling.calendar (race_id, race_name, race_start_date, race_end_date) VALUES (100, 'Giro d''Italia','2015-05-09','2015-05-31'); INSERT INTO cycling.calendar (race_id, race_name, race_start_date, race_end_date) VALUES (101, 'Criterium du Dauphine','2015-06-07','2015-06-14'); INSERT INTO cycling.calendar (race_id, race_name, race_start_date, race_end_date) VALUES (102, 'Tour de Suisse','2015-06-13','2015-06-21'); INSERT INTO cycling.calendar (race_id, race_name, race_start_date, race_end_date) VALUES (103, 'Tour de France','2015-07-04','2015-07-26'); SELECT * FROM cycling.calendar; -- NEW FOR C*3.6 -- Clustering columns can be used in a WHERE clause with ALLOW FILTERING without secondary indexes -- This query uses the clustering column "race_start_date" without an index and without using the partition key -- but using ALLOW FILTERING SELECT * FROM cycling.calendar WHERE race_start_date='2015-06-13' ALLOW FILTERING; -- Q7: -- Find all calendar events for a particular year and month CREATE TABLE cycling.upcoming_calendar ( year int, month int, events list, PRIMARY KEY ( year, month )); INSERT INTO cycling.upcoming_calendar (year, month, events) VALUES (2015, 06, ['Criterium du Dauphine','Tour de Suisse']); INSERT INTO cycling.upcoming_calendar (year, month, events) VALUES (2015, 07, ['Tour de France']); SELECT * FROM cycling.upcoming_calendar WHERE year=2015 AND month=06; -- Q8: -- SIMPLE USER-DEFINED TYPE CREATE TYPE cycling.fullname ( firstname text, lastname text ); CREATE TABLE cycling.race_winners (race_name text, race_position int, cyclist_name FROZEN, PRIMARY KEY (race_name, race_position)); INSERT INTO cycling.race_winners (race_name, race_position, cyclist_name) VALUES ('National Championships South Africa WJ-ITT (CN)', 1, {firstname:'Frances',lastname:'DU TOUT'}); INSERT INTO cycling.race_winners (race_name, race_position, cyclist_name) VALUES ('National Championships South Africa WJ-ITT (CN)', 2, {firstname:'Lynette',lastname:'BENSON'}); INSERT INTO cycling.race_winners (race_name, race_position, cyclist_name) VALUES ('National Championships South Africa WJ-ITT (CN)', 3, {firstname:'Anja',lastname:'GERBER'}); INSERT INTO cycling.race_winners (race_name, race_position, cyclist_name) VALUES ('National Championships South Africa WJ-ITT (CN)', 4, {firstname:'Ame',lastname:'VENTER'}); INSERT INTO cycling.race_winners (race_name, race_position, cyclist_name) VALUES ('National Championships South Africa WJ-ITT (CN)', 5, {firstname:'Danielle',lastname:'VAN NIEKERK'}); SELECT * FROM cycling.race_winners WHERE race_name = 'National Championships South Africa WJ-ITT (CN)'; -- Q9: -- Find all races for a particular cyclist -- CREATE TYPE - User-Defined Type, race -- CREATE TABLE WITH LIST, SIMPLE PRIMARY KEY CREATE TYPE cycling.race (race_title text, race_date timestamp, race_time text); CREATE TABLE cycling.cyclist_races ( id UUID PRIMARY KEY, lastname text, firstname text, races list> ); INSERT INTO cycling.cyclist_races (id, lastname, firstname, races) VALUES (5b6962dd-3f90-4c93-8f61-eabfa4a803e2, 'VOS', 'Marianne', [ {race_title:'Rabobank 7-Dorpenomloop Aalburg',race_date:'2015-05-09',race_time:'02:58:33'},{race_title:'Ronde van Gelderland',race_date:'2015-04-19',race_time:'03:22:23'} ]); INSERT INTO cycling.cyclist_races (id, lastname, firstname, races) VALUES (e7cd5752-bc0d-4157-a80f-7523add8dbcd, 'VAN DER BREGGEN', 'Anna', [ {race_title:'Festival Luxembourgeois du cyclisme feminin Elsy Jacobs - Prologue - Garnich > Garnich',race_date:'2015-05-01',race_time:'08:13:00'},{race_title:'Fest ival Luxembourgeois du cyclisme feminin Elsy Jacobs - Stage 2 - Garnich > Garnich',race_date:'2015-05-02',race_time:'02:41:52'},{race_title:'Festival Luxembourgeois du cyclisme feminin Elsy Jacobs - Stage 3 - Mamer > Mamer',race_date:'2015-05-03',race_time:'02:31:24'} ]); SELECT * FROM cycling.cyclist_races; SELECT lastname, races FROM cycling.cyclist_races WHERE id = e7cd5752-bc0d-4157-a80f-7523add8dbcd; -- Q10: -- Find all teams for a particular cyclist associated with the year of membership -- teams map is map -- CREATE TABLE WITH MAP, SIMPLE PRIMARY KEY CREATE TABLE cycling.cyclist_teams ( id UUID PRIMARY KEY, lastname text, firstname text, teams map ); INSERT INTO cycling.cyclist_teams (id, lastname, firstname, teams) VALUES (5b6962dd-3f90-4c93-8f61-eabfa4a803e2,'VOS', 'Marianne', {2015 : 'Rabobank-Liv Woman Cycling Team', 2014 : 'Rabobank-Liv Woman Cycling Team', 2013 : 'Rabobank-Liv Giant', 2012 : 'Rabobank Women Team', 2011 : 'Nederland bloeit' }); INSERT INTO cycling.cyclist_teams (id, lastname, firstname, teams) VALUES (e7cd5752-bc0d-4157-a80f-7523add8dbcd,'VAN DER BREGGEN', 'Anna', {2015 : 'Rabobank-Liv Woman Cycling Team', 2014 : 'Rabobank-Liv Woman Cycling Team', 2013 : 'Sengers Ladies Cycling Team', 2012 : 'Sengers Ladies Cycling Team', 2009 : 'Team Flexpoint' }); INSERT INTO cycling.cyclist_teams (id, lastname, firstname, teams) VALUES (cb07baad-eac8-4f65-b28a-bddc06a0de23,'ARMITSTEAD', 'Elizabeth', {2015 : 'Boels-Dolmans Cycling Team', 2014 : 'Boels-Dolmans Cycling Team', 2013 : 'Boels-Dolmans Cycling Team', 2012 : 'AA Drink - Leontien.nl', 2011 : 'Team Garmin - Cervelo' }); SELECT lastname, firstname, teams FROM cycling.cyclist_teams; SELECT lastname, firstname, teams FROM cycling.cyclist_teams WHERE id=5b6962dd-3f90-4c93-8f61-eabfa4a803e2; -- Q11: -- Find all stats for a particular cyclist -- CREATE TYPE - UDT, basic_info -- CREATE TABLE with UDT, SIMPLE PRIMARY KEY CREATE TYPE cycling.basic_info ( birthday timestamp, nationality text, weight text, height text ); CREATE TABLE cycling.cyclist_stats ( id UUID, lastname text, basics FROZEN , PRIMARY KEY (id) ); INSERT INTO cycling.cyclist_stats (id, lastname, basics) VALUES (e7ae5cf3-d358-4d99-b900-85902fda9bb0, 'FRAME', { birthday:'1993-06-18',nationality:'New Zealand',weight:null,height:null }); INSERT INTO cycling.cyclist_stats (id, lastname, basics) VALUES (6cbc55e9-1943-47dc-91f2-f8f9e95992eb, 'VIGANO', { birthday:'1984-06-12',nationality:'Italy',weight:'67 kg',height:'1.82 m' }); INSERT INTO cycling.cyclist_stats (id, lastname, basics) VALUES (220844bf-4860-49d6-9a4b-6b5d3a79cbfb, 'TIRALONGO', { birthday:'1977-07-08',nationality:'Italy',weight:'63 kg',height:'1.78 m' }); SELECT * FROM cycling.cyclist_stats; SELECT * FROM cycling.cyclist_stats WHERE id = 220844bf-4860-49d6-9a4b-6b5d3a79cbfb; -- NEW IN C* 3.6 -- UPDATE AND DELETE single fields in UDTs with only non-collection fields -- CHANGE "CREATE TABLE IN LAST EXAMPLE TO non-frozen CREATE TABLE cycling.cyclist_stats ( id UUID, lastname text, basics basic_info, PRIMARY KEY (id) ); -- Now birthday can be updated separate from nationality, weight, and height UPDATE cycling.cyclist_stats SET basics.birthday = '2000-12-12' WHERE id = 220844bf-4860-49d6-9a4b-6b5d3a79cbfb; -- Q12: -- Find total number of PCS points for a particular cyclist -- CREATE TABLE WITH PRIMARY KEY: PARTITION KEY + 1 CLUSTERING COLUMN -- USE STANDARD AGGREGATE IN QUERY CREATE TABLE cycling.cyclist_points (id UUID, firstname text, lastname text, race_title text, race_points int, PRIMARY KEY (id, race_points )); INSERT INTO cycling.cyclist_points (id, firstname, lastname, race_title, race_points) VALUES (e3b19ec4-774a-4d1c-9e5a-decec1e30aac, 'Giorgia','BRONZINI', 'Tour of Chongming Island World Cup', 120); INSERT INTO cycling.cyclist_points (id, firstname, lastname, race_title, race_points) VALUES (e3b19ec4-774a-4d1c-9e5a-decec1e30aac, 'Giorgia','BRONZINI', 'Trofeo Alfredo Binda - Comune di Cittiglio', 6); INSERT INTO cycling.cyclist_points (id, firstname, lastname, race_title, race_points) VALUES (e3b19ec4-774a-4d1c-9e5a-decec1e30aac, 'Giorgia','BRONZINI', 'Acht van Westerveld', 75); INSERT INTO cycling.cyclist_points (id, firstname, lastname, race_title, race_points) VALUES (220844bf-4860-49d6-9a4b-6b5d3a79cbfb, 'Paolo','TIRALONGO', '98th Giro d''Italia - Stage 15', 2); SELECT sum(race_points) FROM cycling.cyclist_points WHERE id=e3b19ec4-774a-4d1c-9e5a-decec1e30aac; -- Q13: -- USES TABLE cycling.cyclist_points -- Find total number of PCS points for a particular cyclist using a user-defined function (UDF) created using java function log -- cassandra.yaml must be modified to allow UDFs to work -- enable_user_defined_functions: true (false by default) -- CREATE UDF CREATE TABLE cycling.cyclist_points (id UUID, firstname text, lastname text, race_title text, race_points double, PRIMARY KEY (id, race_points )); INSERT INTO cycling.cyclist_points (id, firstname, lastname, race_title, race_points) VALUES (e3b19ec4-774a-4d1c-9e5a-decec1e30aac, 'Giorgia','BRONZINI', 'Tour of Chongming Island World Cup', 120); INSERT INTO cycling.cyclist_points (id, firstname, lastname, race_title, race_points) VALUES (e3b19ec4-774a-4d1c-9e5a-decec1e30aac, 'Giorgia','BRONZINI', 'Trofeo Alfredo Binda - Comune di Cittiglio', 6); INSERT INTO cycling.cyclist_points (id, firstname, lastname, race_title, race_points) VALUES (e3b19ec4-774a-4d1c-9e5a-decec1e30aac, 'Giorgia','BRONZINI', 'Acht van Westerveld', 75); INSERT INTO cycling.cyclist_points (id, firstname, lastname, race_title, race_points) VALUES (220844bf-4860-49d6-9a4b-6b5d3a79cbfb, 'Paolo','TIRALONGO', '98th Giro d''Italia - Stage 15', 2); CREATE OR REPLACE FUNCTION cycling.fLog (input double) CALLED ON NULL INPUT RETURNS double LANGUAGE java AS 'return Double.valueOf(Math.log(input.doubleValue()));'; SELECT id, lastname, fLog(race_points) FROM cycling.cyclist_points; -- Q14: --Find the average race_time in seconds for a particular race for a particular team. -- CREATE UDA that computes the average value --CREATE TABLE WITH SIMPLE PRIMARY KEY: PARTITION KEY + 2 CLUSTERING COLUMNS CREATE OR REPLACE FUNCTION cycling.avgState ( state tuple, val int ) CALLED ON NULL INPUT RETURNS tuple LANGUAGE java AS 'if (val !=null) { state.setInt(0, state.getInt(0)+1); state.setLong(1, state.getLong(1)+val.intValue()); } return state;'; CREATE OR REPLACE FUNCTION cycling.avgFinal ( state tuple ) CALLED ON NULL INPUT RETURNS double LANGUAGE java AS 'double r = 0; if (state.getInt(0) == 0) return null; r = state.getLong(1); r/= state.getInt(0); return Double.valueOf(r);'; CREATE AGGREGATE cycling.average ( int ) SFUNC avgState STYPE tuple FINALFUNC avgFinal INITCOND (0,0); CREATE TABLE cycling.team_average (team_name text, cyclist_name text, cyclist_time_sec int, race_title text, PRIMARY KEY (team_name, race_title,cyclist_name)); INSERT INTO cycling.team_average (team_name, cyclist_name, cyclist_time_sec, race_title) VALUES ('UnitedHealthCare Pro Cycling Womens Team','Katie HALL',11449,'Amgen Tour of California Women''s Race presented by SRAM - Stage 1 - Lake Tahoe > Lake Tahoe'); INSERT INTO cycling.team_average (team_name, cyclist_name, cyclist_time_sec, race_title) VALUES ('UnitedHealthCare Pro Cycling Womens Team','Linda VILLUMSEN',11485,'Amgen Tour of California Women''s Race presented by SRAM - Stage 1 - Lake Tahoe > Lake Tahoe'); INSERT INTO cycling.team_average (team_name, cyclist_name, cyclist_time_sec, race_title) VALUES ('UnitedHealthCare Pro Cycling Womens Team','Hannah BARNES',11490,'Amgen Tour of California Women''s Race presented by SRAM - Stage 1 - Lake Tahoe > Lake Tahoe'); INSERT INTO cycling.team_average (team_name, cyclist_name, cyclist_time_sec, race_title) VALUES ('Velocio-SRAM','Alena AMIALIUSIK',11451,'Amgen Tour of California Women''s Race presented by SRAM - Stage 1 - Lake Tahoe > Lake Tahoe'); INSERT INTO cycling.team_average (team_name, cyclist_name, cyclist_time_sec, race_title) VALUES ('Velocio-SRAM','Trixi WORRACK',11453,'Amgen Tour of California Women''s Race presented by SRAM - Stage 1 - Lake Tahoe > Lake Tahoe'); INSERT INTO cycling.team_average (team_name, cyclist_name, cyclist_time_sec, race_title) VALUES ('TWENTY16 presented by Sho-Air','Lauren KOMANSKI',11451,'Amgen Tour of California Women''s Race presented by SRAM - Stage 1 - Lake Tahoe > Lake Tahoe'); SELECT cycling.average(cyclist_time_sec) FROM cycling.team_average WHERE team_name='UnitedHealthCare Pro Cycling Womens Team' AND race_title='Amgen Tour of California Women''s Race presented by SRAM - Stage 1 - Lake Tahoe > Lake Tahoe'; -- Q15: -- CREATE INDEX - PARTITION KEY -- Uses cycling.rank_by_year_and_name -- Find rank for all races for a particular race year CREATE INDEX ryear ON cycling.rank_by_year_and_name (race_year); -- This will not work without the index, because the table has a composite partition key SELECT * FROM cycling.rank_by_year_and_name WHERE race_year=2015; -- INDEX on clustering column CREATE INDEX rrank ON cycling.rank_by_year_and_name (rank); SELECT * FROM cycling.rank_by_year_and_name WHERE rank = 1; -- Q16: -- CREATE INDEX - COLLECTION - SET -- Find all the cyclists that have been on a particular team CREATE INDEX team ON cycling.cyclist_career_teams (teams); SELECT * FROM cycling.cyclist_career_teams WHERE teams CONTAINS 'Nederland bloeit'; SELECT * FROM cycling.cyclist_career_teams WHERE teams CONTAINS 'Rabobank-Liv Giant'; -- Q17: -- CREATE INDEX - COLLECTION ON MAP KEYS -- Find all cyclist/team combinations for a particular year -- CREATE TABLE cycling.cyclist_teams ( id UUID PRIMARY KEY, lastname text, firstname text, teams map ); CREATE INDEX team_year ON cycling.cyclist_teams (KEYS(teams)); SELECT * FROM cycling.cyclist_teams WHERE teams CONTAINS KEY 2015; -- Q35: -- CREATE INDEX - ENTRIES ON MAP KEYS -- ONLY VALID FOR MAP TYPE CREATE TABLE cycling.birthday_list (cyclist_name text PRIMARY KEY, blist map); INSERT INTO cycling.birthday_list (cyclist_name, blist) VALUES ('Allan DAVIS', {'age':'35', 'bday':'27/07/1980', 'nation':'AUSTRALIA'}); INSERT INTO cycling.birthday_list (cyclist_name, blist) VALUES ('Claudio VANDELLI', {'age':'54', 'bday':'27/07/1961', 'nation':'ITALY'}); INSERT INTO cycling.birthday_list (cyclist_name, blist) VALUES ('Laurence BOURQUE', {'age':'23', 'bday':'27/07/1992', 'nation':'CANADA'}); INSERT INTO cycling.birthday_list (cyclist_name, blist) VALUES ('Claudio HEINEN', {'age':'23', 'bday':'27/07/1992', 'nation':'GERMANY'}); INSERT INTO cycling.birthday_list (cyclist_name, blist) VALUES ('Luc HAGENAARS', {'age':'28', 'bday':'27/07/1987', 'nation':'NETHERLANDS'}); INSERT INTO cycling.birthday_list (cyclist_name, blist) VALUES ('Toine POELS', {'age':'52', 'bday':'27/07/1963', 'nation':'NETHERLANDS'}); CREATE INDEX blist_idx ON cycling.birthday_list (ENTRIES(blist)); SELECT * FROM cycling.birthday_list WHERE blist['age'] = '23'; SELECT * FROM cycling.birthday_list WHERE blist['nation'] = 'GERMANY'; SELECT * FROM cycling.birthday_list WHERE blist['bday'] = '27/07/1992'; -- Q36: -- CREATE INDEX - FULL ON FROZEN COLLECTION -- ONLY VALID FOR FROZEN COLLECTIONS (SET, LIST, MAP) CREATE TABLE cycling.race_starts (cyclist_name text PRIMARY KEY, rnumbers FROZEN>); CREATE INDEX rnumbers_idx ON cycling.race_starts (FULL(rnumbers)); INSERT INTO cycling.race_starts (cyclist_name,rnumbers) VALUES ('Alexander KRISTOFF',[40,5,14]); INSERT INTO cycling.race_starts (cyclist_name,rnumbers) VALUES ('Alejandro VALVERDE',[67,17,20]); INSERT INTO cycling.race_starts (cyclist_name,rnumbers) VALUES ('Alberto CONTADOR',[61,14,7]); INSERT INTO cycling.race_starts (cyclist_name,rnumbers) VALUES ('Christopher FROOME',[28,10,6]); INSERT INTO cycling.race_starts (cyclist_name,rnumbers) VALUES ('John DEGENKOLB',[39,7,14]); SELECT * FROM cycling.race_starts WHERE rnumbers = [39,7,14]; -- NOT A QUERY, JUST AN EXAMPLE -- INSERT DATA IN JSON FORMAT INSERT INTO cycling.cyclist_category JSON '{ "category" : "GC", "points" : 780, "id" : "829aa84a-4bba-411f-a4fb-38167a987cda", "lastname" : "SUTHERLAND" }'; -- null INSERTION EXAMPLE INSERT INTO cycling.cyclist_category JSON '{ "category" : "Sprint", "points" : 700, "id" : "829aa84a-4bba-411f-a4fb-38167a987cda" }'; -- NOT A QUERY, JUST AN EXAMPLE -- UPDATE SET -- Can only be + -- Add team to a cyclist's list of teams, order doesn't matter; this example adds it to the end UPDATE cycling.cyclist_career_teams SET teams = teams + {'Team DSB - Ballast Nedam'} WHERE id=5b6962dd-3f90-4c93-8f61-eabfa4a803e2; -- NOT A QUERY, JUST AN EXAMPLE -- UPDATE LIST -- Add events to the events list with either +/- or a specific place in the list like events[2] UPDATE cycling.upcoming_calendar SET events = ['The Parx Casino Philly Cycling Classic'] + events WHERE year = 2015 AND month = 06; UPDATE cycling.upcoming_calendar SET events[2] = 'Vuelta Ciclista a Venezuela' WHERE year = 2015 AND month = 06; -- NOT A QUERY, JUST AN EXAMPLE -- UPDATE MAP -- Can only be + UPDATE cycling.cyclist_teams SET teams = teams + {2009 : 'DSB Bank - Nederland bloeit'} WHERE id = 5b6962dd-3f90-4c93-8f61-eabfa4a803e2; SELECT teams FROM cycling.cyclist_teams WHERE id = 5b6962dd-3f90-4c93-8f61-eabfa4a803e2; UPDATE cycling.cyclist_teams SET teams[2006] = 'Team DSB - Ballast Nedam' WHERE id = 5b6962dd-3f90-4c93-8f61-eabfa4a803e2; -- Q22: -- UPDATE AND SELECT USING TTL -- QUERY TO FIND TIME-TO-LIVE -- Insert is to put in dummy record, UPDATE gives it a TTL -- Repeated use of the SELECT will show the TTL as it counts down INSERT INTO cycling.calendar (race_id, race_name, race_start_date, race_end_date) VALUES (200, 'placeholder', '2015-05-27', '2015-05-27') USING TTL; UPDATE cycling.calendar USING TTL 300 SET race_name = 'dummy' WHERE race_id = 200 AND race_start_date = '2015-05-27' AND race_end_date = '2015-05-27'; SELECT TTL(race_name) FROM cycling.calendar WHERE race_id=200; -- Q18: -- QUERY WITH ORDER BY -- Find all calendar events for a particular year and order by month SELECT * FROM cycling.upcoming_calendar WHERE year= 2015 ORDER BY month DESC; -- Q19: -- QUERY WITH INEQUALITIES -- Find all calendar events for a particular year between two set months SELECT * FROM cycling.upcoming_calendar WHERE year = 2015 AND month <= 06 AND month >= 07; -- NOT A QUERY, REALLY, JUST AN EXAMPLE -- SELECT and GET RESULTS in JSON FORMAT SELECT JSON month, year, events FROM cycling.upcoming_calendar; -- Q20: -- QUERY - WHERE ... IN SIMPLE -- Notice the difference between using 'ORDER BY points DESC' and not using it - changes the order of reporting -- Find all cyclists for a particular category and order by points PAGING OFF; SELECT * FROM cycling.cyclist_category WHERE category IN ('Time-trial', 'Sprint') ORDER BY id DESC; PAGING OFF; SELECT * FROM cycling.cyclist_category WHERE category IN ('Time-trial', 'Sprint') ORDER BY id ASC; -- Q21: -- QUERY - WHERE ... IN COMPLEX -- Find particular races in a range of start and end dates PAGING OFF; SELECT * FROM cycling.calendar WHERE race_id IN (100, 101, 102) AND (race_start_date, race_end_date) IN (('2015-05-09','2015-05-31'),('2015-05-06', '2015-05-31')); PAGING OFF; SELECT * FROM cycling.calendar WHERE race_id IN (100, 101, 102) AND (race_start_date, race_end_date) >= ('2015-05-09','2015-05-24'); -- Q23 and 24: -- Standard Aggregates -- Find sum of cyclist points for a particular cyclist -- Find the number of cyclists from a particular country SELECT sum(race_points) FROM cycling.cyclist_points WHERE id = e3b19ec4-774a-4d1c-9e5a-decec1e30aac; SELECT count(cyclist_name) FROM cycling.country_flag WHERE country='Belgium'; -- Q25 -- QUERY - SCAN A PARTITION -- Find all cyclists that finished a race in a particular window of time CREATE TABLE cycling.race_times (race_name text, cyclist_name text, race_time text, PRIMARY KEY (race_name, race_time)); INSERT INTO cycling.race_times (race_name, cyclist_name, race_time) VALUES ('17th Santos Tour Down Under', 'Rohan DENNIS', '19:15:18'); INSERT INTO cycling.race_times (race_name, cyclist_name, race_time) VALUES ('17th Santos Tour Down Under', 'Richie PORTE', '19:15:20'); INSERT INTO cycling.race_times (race_name, cyclist_name, race_time) VALUES ('17th Santos Tour Down Under', 'Cadel EVANS', '19:15:38'); INSERT INTO cycling.race_times (race_name, cyclist_name, race_time) VALUES ('17th Santos Tour Down Under', 'Tom DUMOULIN', '19:15:40'); SELECT * FROM cycling.race_times WHERE race_name = '17th Santos Tour Down Under' AND race_time >= '19:15:19' AND race_time <= '19:15:39'; -- NOT A QUERY, JUST AN EXAMPLE: -- BATCH statement -- Insert data into multiple tables using a BATCH statement -- Note that what is inserted is data for the SAME cyclist, to two tables BEGIN BATCH INSERT INTO cycling.cyclist_name (id, lastname, firstname) VALUES (c7fceba0-c141-4207-9494-a29f9809de6f, 'PIETERS', 'Amy'); INSERT INTO cycling.cyclist_id (lastname, firstname, age, id) VALUES ('PIETERS', 'Amy', 23, c7fceba0-c141-4207-9494-a29f9809de6f); APPLY BATCH; SELECT * FROM cycling.cyclist_name; SELECT * FROM cycling.cyclist_id; -- NOT A QUERY, JUST AN EXAMPLE: -- BATCH statement MISUSE -- Insert data into same table, but involves multiple nodes due to partition key = id BEGIN BATCH INSERT INTO cycling.cyclist_name (id, lastname, firstname) VALUES (6d5f1663-89c0-45fc-8cfd-60a373b01622,'HOSKINS', 'Melissa'); INSERT INTO cycling.cyclist_name (id, lastname, firstname) VALUES (38ab64b6-26cc-4de9-ab28-c257cf011659,'FERNANDES', 'Marcia'); INSERT INTO cycling.cyclist_name (id, lastname, firstname) VALUES (9011d3be-d35c-4a8d-83f7-a3c543789ee7,'NIEWIADOMA', 'Katarzyna'); INSERT INTO cycling.cyclist_name (id, lastname, firstname) VALUES (95addc4c-459e-4ed7-b4b5-472f19a67995,'ADRIAN', 'Vera'); APPLY BATCH; -- NOT A QUERY, JUST AN EXAMPLE: -- BATCH statement WITH CONDITIONAL "IF NOT EXISTS" -- EXAMPLE USES CYCLIST'S EXPENSES CREATE TABLE cycling.cyclist_expenses ( cyclist_name text, balance float STATIC, expense_id int, amount float, description text, paid boolean, PRIMARY KEY (cyclist_name, expense_id) ); BEGIN BATCH INSERT INTO cycling.cyclist_expenses (cyclist_name, balance) VALUES ('Vera ADRIAN', 0) IF NOT EXISTS; INSERT INTO cycling.cyclist_expenses (cyclist_name, expense_id, amount, description, paid) VALUES ('Vera ADRIAN', 1, 7.95, 'Breakfast', false); APPLY BATCH; UPDATE cycling.cyclist_expenses SET balance = -7.95 WHERE cyclist_name = 'Vera ADRIAN' IF balance = 0; -- NOT A QUERY, JUST AN EXAMPLE: -- BATCH statement WITH CONDITIONAL "IF" BEGIN BATCH INSERT INTO cycling.cyclist_expenses (cyclist_name, expense_id, amount, description, paid) VALUES ('Vera ADRIAN', 2, 13.44, 'Lunch', true); INSERT INTO cycling.cyclist_expenses (cyclist_name, expense_id, amount, description, paid) VALUES ('Vera ADRIAN', 3, 25.00, 'Dinner', false); UPDATE cycling.cyclist_expenses SET balance = -32.95 WHERE cyclist_name = 'Vera ADRIAN' IF balance = -7.95; APPLY BATCH; -- NOT A QUERY, JUST AN EXAMPLE: -- BATCH statement WITH CONDITIONAL "IF" BEGIN BATCH UPDATE cycling.cyclist_expenses SET balance = 0 WHERE cyclist_name = 'Vera ADRIAN' IF balance = -32.95; UPDATE cycling.cyclist_expenses SET paid = true WHERE cyclist_name = 'Vera ADRIAN' AND expense_id = 1 IF paid = false; UPDATE cycling.cyclist_expenses SET paid = true WHERE cyclist_name = 'Vera ADRIAN' AND expense_id = 3 IF paid = false; APPLY BATCH; -- NOT A QUERY, JUST AN EXAMPLE -- LIGHTWEIGHT TRANSACTION -- Insert or update information using a conditional statement INSERT INTO cycling.cyclist_name (id, lastname, firstname) VALUES (c4b65263-fe58-4846-83e8-f0e1c13d518f, 'RATTO', 'Rissella') IF NOT EXISTS; -- UPDATE USING LIGHTWEIGHT TRANSACTION UPDATE cycling.cyclist_name SET firstname = 'Rossella' WHERE id=c4b65263-fe58-4846-83e8-f0e1c13d518f IF lastname = 'RATTO'; -- Q26 -- QUERY USING MULTIPLE INDEXES -- DISCUSSION OF THE NEED FOR ALLOW FILTERING -- IS THIS BETTER THAN cyclist_stats?? CREATE TABLE cycling.cyclist_alt_stats ( id UUID PRIMARY KEY, lastname text, birthday timestamp, nationality text, weight text, height text ); INSERT INTO cycling.cyclist_alt_stats (id, lastname, birthday, nationality, weight, height) VALUES (ed584e99-80f7-4b13-9a90-9dc5571e6821,'TSATEVICH', '1989-07-05', 'Russia', '64 kg', '1.69 m'); INSERT INTO cycling.cyclist_alt_stats (id, lastname, birthday, nationality, weight, height) VALUES (a9e96714-2dd0-41f9-8bd0-557196a44ecf,'ISAYCHEV', '1986-04-21', 'Russia', '80 kg', '1.88 m'); INSERT INTO cycling.cyclist_alt_stats (id, lastname, birthday, nationality, weight, height) VALUES (823ec386-2a46-45c9-be41-2425a4b7658e,'BELKOV', '1985-01-09', 'Russia', '71 kg', '1.84 m'); INSERT INTO cycling.cyclist_alt_stats (id, lastname, birthday, nationality, weight, height) VALUES (e0953617-07eb-4c82-8f91-3b2757981625,'BRUTT', '1982-01-29', 'Russia', '68 kg', '1.78 m'); INSERT INTO cycling.cyclist_alt_stats (id, lastname, birthday, nationality, weight, height) VALUES (078654a6-42fa-4142-ae43-cebdc67bd902,'LAGUTIN', '1981-01-14', 'Russia', '63 kg', '1.82 m'); INSERT INTO cycling.cyclist_alt_stats (id, lastname, birthday, nationality, weight, height) VALUES (d74d6e70-7484-4df5-8551-f5090c37f617,'GRMAY', '1991-08-25', 'Ethiopia', '63 kg', '1.75 m'); INSERT INTO cycling.cyclist_alt_stats (id, lastname, birthday, nationality, weight, height) VALUES (c09e9451-50da-483d-8108-e6bea2e827b3,'VEIKKANEN', '1981-03-29', 'Finland', '66 kg', '1.78 m'); INSERT INTO cycling.cyclist_alt_stats (id, lastname, birthday, nationality, weight, height) VALUES (f1deff54-7d96-4981-b14a-b70be4da82d2,'TLEUBAYEV', '1987-03-07', 'Kazakhstan', null, null); INSERT INTO cycling.cyclist_alt_stats (id, lastname, birthday, nationality, weight, height) VALUES (1ba0417d-62da-4103-b710-de6fb227db6f,'PAULINHO', '1990-05-27', 'Portugal', null, null); INSERT INTO cycling.cyclist_alt_stats (id, lastname, birthday, nationality, weight, height) VALUES (4ceb495c-55ab-4f71-83b9-81117252bb13,'DUVAL', '1990-05-27','France', null, null); CREATE INDEX birthday_idx ON cycling.cyclist_alt_stats (birthday); CREATE INDEX nationality_idx ON cycling.cyclist_alt_stats (nationality); SELECT * FROM cycling.cyclist_alt_stats WHERE birthday = '1982-01-29' AND nationality = 'Russia' ALLOW FILTERING; SELECT * FROM cycling.cyclist_alt_stats WHERE birthday = '1990-05-27' AND nationality = 'Portugal' ALLOW FILTERING; -- Q27 -- USING EXPIRING DATA AND TTL TO DISPLAY THE LAST 3 DAYS race data -- 3 days in seconds is 259,200 -- 2 days in seconds is 172800 -- Data will vanish when its TTL runs out CREATE TABLE cycling.last_3_days (race_name text, year timestamp, rank int, cyclist_name text, PRIMARY KEY (year, rank, cyclist_name)); INSERT INTO cycling.last_3_days (race_name, year, rank, cyclist_name) VALUES ('Giro d''Italia Stage 16','2015-05-26',1,'Mikel Landa') USING TTL 259200; INSERT INTO cycling.last_3_days (race_name, year, rank, cyclist_name) VALUES ('Giro d''Italia Stage 16','2015-05-26',2,'Steven Kruijswijk') USING TTL 259200; INSERT INTO cycling.last_3_days (race_name, year, rank, cyclist_name) VALUES ('Giro d''Italia Stage 16','2015-05-26',3,'Alberto Contador') USING TTL 259200; INSERT INTO cycling.last_3_days (race_name, year, rank, cyclist_name) VALUES ('National Championships United States - Road Race (NC)','2015-05-25',1,'Matthew Busche') USING TTL 172800; INSERT INTO cycling.last_3_days (race_name, year, rank, cyclist_name) VALUES ('National Championships United States - Road Race (NC)','2015-05-25',2,'Joe Dombrowski') USING TTL 172800; INSERT INTO cycling.last_3_days (race_name, year, rank, cyclist_name) VALUES ('National Championships United States - Road Race (NC)','2015-05-25',3,'Kiel Reijnen') USING TTL 172800; SELECT TTL(race_name) FROM cycling.last_3_days; SELECT TTL(race_name) FROM cycling.last_3_days; SELECT * FROM cycling.last_3_days; // WILL ONLY SHOW NON-EXPIRED ROWS -- Q28: -- QUERY USING FUNCTION TOKEN() -- Note how results are not consistent with dates alone; partitioner order is how they are returned -- All 6 entries show SELECT * FROM cycling.last_3_days WHERE token(year) > token ('2015-05-24'); -- No entries show SELECT * FROM cycling.last_3_days WHERE token(year) > token ('2015-05-25'); -- 3 entries for 2015-05-25 show SELECT * FROM cycling.last_3_days WHERE token(year) > token ('2015-05-26'); -- No entries show SELECT * FROM cycling.last_3_days WHERE token(year) > token ('2015-05-27'); SELECT token(year) FROM cycling.last_3_days; //PRINTS partition hash -- MIXED TOKEN AND PARTITION KEY SELECT * FROM cycling.last_3_days WHERE token(year) < token ('2015-05-26') AND year IN ('2015-05-24','2015-05-25'); -- DELETE WHOLE ROW -- Leave column(s) blank DELETE FROM cycling.calendar WHERE race_id = 200; -- DELETE COLUMN VALUE DELETE lastname FROM cycling.cyclist_name WHERE id = c7fceba0-c141-4207-9494-a29f9809de6f; UPDATE cycling.cyclist_name SET lastname = 'PIETERS' WHERE id = c7fceba0-c141-4207-9494-a29f9809de6f; // TO RESTORE THE COLUMN VALUE -- DELETE ITEM FROM LIST DELETE events[2] FROM cycling.upcoming_calendar WHERE year = 2015 AND month = 06; -- DELETE ITEM FROM MAP DELETE teams[2009] FROM cycling.cyclist_teams WHERE id=e7cd5752-bc0d-4157-a80f-7523add8dbcd; UPDATE cycling.cyclist_teams SET teams = teams + {2009 : 'Team Flexpoint' } WHERE id = e7cd5752-bc0d-4157-a80f-7523add8dbcd; // TO RESTORE THE MAP VALUE -- ALTER TABLE -- ADD COLUMN ALTER TABLE cycling.cyclist_alt_stats ADD age int; -- ALTER TABLE WITH COLLECTION ALTER TABLE cycling.upcoming_calendar ADD description map; UPDATE cycling.upcoming_calendar SET description = description + {'Criterium du Dauphine' : 'Easy race', 'Tour du Suisse' : 'Hard uphill race'} WHERE year = 2015 AND month = 6; -- ALTER TABLE AND ALTER COLUMN TYPE -- ADDS COLUMN as varchar and then changes it to text ALTER TABLE cycling.cyclist_alt_stats ADD favorite_color varchar; ALTER TABLE cycling.cyclist_alt_stats ALTER favorite_color TYPE text; -- ALTER TYPE ALTER TYPE cycling.fullname ADD middlename text; ALTER TYPE cycling.fullname RENAME middlename TO middleinitial; -- TUPLE WAS USED IN THE UDA TO HOLD 2 values - see example in UDA section -- Q29: -- TUPLE -- Store the latitude/longitude waypoints for the route of a race CREATE TABLE cycling.route (race_id int, race_name text, point_id int, lat_long tuple>, PRIMARY KEY (race_id, point_id)); INSERT INTO cycling.route (race_id, race_name, point_id, lat_long) VALUES (500, '47th Tour du Pays de Vaud', 1, ('Onnens', (46.8444,6.6667))); INSERT INTO cycling.route (race_id, race_name, point_id, lat_long) VALUES (500, '47th Tour du Pays de Vaud', 2, ('Champagne', (46.833, 6.65))); INSERT INTO cycling.route (race_id, race_name, point_id, lat_long) VALUES (500, '47th Tour du Pays de Vaud', 3, ('Novalle', (46.833, 6.6))); INSERT INTO cycling.route (race_id, race_name, point_id, lat_long) VALUES (500, '47th Tour du Pays de Vaud', 4, ('Vuiteboeuf', (46.8, 6.55))); INSERT INTO cycling.route (race_id, race_name, point_id, lat_long) VALUES (500, '47th Tour du Pays de Vaud', 5, ('Baulmes', (46.7833, 6.5333))); INSERT INTO cycling.route (race_id, race_name, point_id, lat_long) VALUES (500, '47th Tour du Pays de Vaud', 6, ('Les Clées', (46.7222, 6.5222))); SELECT race_name, point_id, lat_long AS City_Latitude_Longitude FROM cycling.route; // Showcases 'AS' to rename column header -- Q30: -- QUERY USING DISTINCT -- Find all the distinct race_id values from cycling.route SELECT DISTINCT race_id from cycling.route; -- Q31: -- TUPLE -- Rank nations by points, including top cyclist -- tuple is rank, name, points CREATE TABLE cycling.nation_rank ( nation text PRIMARY KEY, info tuple ); INSERT INTO cycling.nation_rank (nation, info) VALUES ('Spain', (1,'Alejandro VALVERDE' , 9054)); INSERT INTO cycling.nation_rank (nation, info) VALUES ('France', (2,'Sylvain CHAVANEL' , 6339)); INSERT INTO cycling.nation_rank (nation, info) VALUES ('Belgium', (3,'Phillippe GILBERT' , 6222)); INSERT INTO cycling.nation_rank (nation, info) VALUES ('Italy', (4,'Davide REBELLINI' , 6090)); SELECT * FROM cycling.nation_rank; -- Q32: -- TUPLE -- Popular Riders CREATE TABLE cycling.popular (rank int PRIMARY KEY, cinfo tuple ); INSERT INTO cycling.popular (rank, cinfo) VALUES (1, ('Spain', 'Mikel LANDA', 1137)); INSERT INTO cycling.popular (rank, cinfo) VALUES (2, ('Netherlands', 'Steven KRUIJSWIJK', 621)); INSERT INTO cycling.popular (rank, cinfo) VALUES (3, ('USA', 'Matthew BUSCHE', 230)); INSERT INTO cycling.popular (rank, cinfo) VALUES (4, ('Italy', 'Fabio ARU', 163)); INSERT INTO cycling.popular (rank, cinfo) VALUES (5, ('Canada', 'Ryder HESJEDAL', 148)); SELECT * FROM cycling.popular; -- Q33: -- COUNTER TABLE -- Keep the count for popularity, incrementing or decrementing CREATE TABLE cycling.popular_count ( id UUID PRIMARY KEY, popularity counter ); UPDATE cycling.popular_count SET popularity = popularity + 1 WHERE id = 6ab09bec-e68e-48d9-a5f8-97e6fb4c9b47; SELECT * FROM cycling.popular_count; UPDATE cycling.popular_count SET popularity = popularity + 125 WHERE id = 6ab09bec-e68e-48d9-a5f8-97e6fb4c9b47; SELECT * FROM cycling.popular_count; UPDATE cycling.popular_count SET popularity = popularity - 64 WHERE id = 6ab09bec-e68e-48d9-a5f8-97e6fb4c9b47; SELECT * FROM cycling.popular_count; -- Q34: -- Find the writetime for a column in a table SELECT WRITETIME (firstname) FROM cycling.cyclist_points WHERE id=220844bf-4860-49d6-9a4b-6b5d3a79cbfb; -- NOT A QUERY -- INSERTING STRING CONSTANT USING DOUBLE DOLLAR SIGNS INSERT INTO cycling.calendar (race_id, race_start_date, race_end_date, race_name) VALUES (201, '2015-02-18', '2015-02-22', $$Women's Tour of New Zealand$$); -- ROLES, USERS, PERMISSIONS -- cassandra.yaml must be changed to allow login with username and password -- authenticator: PasswordAuthenticator (AllowAllAuthenticator by default) -- authorizer: CassandraAuthorizer (AllowAllAuthorizer by default) CREATE USER IF NOT EXISTS sandy WITH PASSWORD 'Ride2Win@' NOSUPERUSER; CREATE USER chuck WITH PASSWORD 'Always1st$' SUPERUSER; ALTER USER sandy SUPERUSER; LIST USERS; -- DROP USER IF EXISTS chuck; CREATE ROLE IF NOT EXISTS team_manager WITH PASSWORD = 'RockIt4Us!'; CREATE ROLE sys_admin WITH PASSWORD = 'IcanDoIt4ll' AND LOGIN = true AND SUPERUSER = true; ALTER ROLE sys_admin WITH PASSWORD = 'All4one1forAll' AND SUPERUSER = false; GRANT sys_admin TO team_manager; GRANT team_manager TO sandy; LIST ROLES; LIST ROLES OF sandy; REVOKE sys_admin FROM team_manager; REVOKE team_manager FROM sandy; DROP ROLE IF EXISTS sys_admin; GRANT MODIFY ON KEYSPACE cycling TO team_manager; GRANT DESCRIBE ON ALL ROLES TO sys_admin; GRANT AUTHORIZE ALL KEYSPACES TO sys_admin; REVOKE SELECT ON ALL KEYSPACES FROM team_manager; REVOKE EXECUTE ON FUNCTION cycling.fLog(double) FROM team_manager; LIST ALL PERMISSIONS OF sandy; LIST ALL PERMISSIONS ON cycling.cyclist_name OF chuck; -- Q35: -- MATERIALIZED VIEW CREATE TABLE cycling.cyclist_mv (cid UUID PRIMARY KEY, name text, age int, birthday date, country text); INSERT INTO cycling.cyclist_mv (cid,name,age,birthday,country) VALUES (e7ae5cf3-d358-4d99-b900-85902fda9bb0,'Alex FRAME', 22, 1993-06-18, 'New Zealand'); INSERT INTO cycling.cyclist_mv (cid,name,age,birthday,country) VALUES (220844bf-4860-49d6-9a4b-6b5d3a79cbfb,'Paolo TIRALONGO', 38, '1977-07-08', 'Italy'); INSERT INTO cycling.cyclist_mv (cid,name,age,birthday,country) VALUES (6ab09bec-e68e-48d9-a5f8-97e6fb4c9b47,'Steven KRUIKSWIJK', 28, '1987-06-07', 'Netherlands'); INSERT INTO cycling.cyclist_mv (cid,name,age,birthday,country) VALUES (ffdfa2a7-5fc6-49a7-bfdc-3fcdcfdd7156,'Pascal EENKHOORN', 18, '1997-02-08', 'Netherlands'); INSERT INTO cycling.cyclist_mv (cid,name,age,birthday,country) VALUES (18f471bf-f631-4bc4-a9a2-d6f6cf5ea503,'Bram WELTEN', 18, '1997-03-29', 'Netherlands'); INSERT INTO cycling.cyclist_mv (cid,name,age,birthday,country) VALUES (15a116fc-b833-4da6-ab9a-4a7775752836,'Adrien COSTA', 18, '1997-08-19', 'United States'); INSERT INTO cycling.cyclist_mv (cid,name,age,birthday,country) VALUES (862cc51f-00a1-4d5a-976b-a359cab7300e,'Joakim BUKDAL', 20, '1994-09-04', 'Denmark'); INSERT INTO cycling.cyclist_mv (cid,name,age,birthday,country) VALUES (c9c9c484-5e4a-4542-8203-8d047a01b8a8,'Cristian EGIDIO', 27, '1987-09-04', 'Brazil'); INSERT INTO cycling.cyclist_mv (cid,name,age,birthday,country) VALUES (d1aad83b-be60-47a4-bd6e-069b8da0d97b,'Johannes HEIDER', 27, '1987-09-04','Germany'); CREATE MATERIALIZED VIEW cycling.cyclist_by_age AS SELECT age, birthday, name, country FROM cyclist_mv WHERE age is NOT NULL AND cid IS NOT NULL PRIMARY KEY (age, cid); CREATE MATERIALIZED VIEW cycling.cyclist_by_country AS SELECT age, birthday, name, country FROM cyclist_mv WHERE country is NOT NULL AND cid IS NOT NULL PRIMARY KEY (country, cid); CREATE MATERIALIZED VIEW cycling.cyclist_by_birthday AS SELECT age, birthday, name, country FROM cyclist_mv WHERE birthday is NOT NULL AND cid IS NOT NULL PRIMARY KEY (birthday, cid); --DROP MATERIALIZED VIEW cycling.cyclist_by_age; -- Q36: -- USING TIMESTAMP INSERT INTO cycling.calendar (race_id, race_name, race_start_date, race_end_date) VALUES (200, 'placeholder', '2015-05-27', '2015-05-27') USING TIMESTAMP 123456789; -- exit \q usql-0.19.19/contrib/cassandra/usql-config000066400000000000000000000001771476173253300204500ustar00rootroot00000000000000DB="cassandra://cassandra:cassandra@localhost" VSQL="SELECT release_version AS version FROM system.local WHERE key = 'local';" usql-0.19.19/contrib/charts/000077500000000000000000000000001476173253300156165ustar00rootroot00000000000000usql-0.19.19/contrib/charts/area_density_stacked.vl.json000066400000000000000000000010411476173253300232720ustar00rootroot00000000000000{ "$schema": "https://vega.github.io/schema/vega-lite/v5.json", "title": "{{ header }}", "width": 400, "height": 80, "data": { "url": "data/penguins.json" }, "mark": "area", "transform": [ { "density": "{{ x }}", "groupby": ["Species"], "extent": [2500, 6500] } ], "encoding": { "x": {"field": "value", "type": "quantitative", "title": "{{ title_x }}"}, "y": {"field": "density", "type": "quantitative", "stack": "zero"}, "color": {"field": "{{ field_x }}", "type": "nominal"} } } usql-0.19.19/contrib/charts/area_density_stacked.vl.json.svg000066400000000000000000000560041476173253300241010ustar00rootroot000000000000002,5003,0003,5004,0004,5005,0005,5006,0006,500Body Mass (g)0.0000.0010.002densityAdelieChinstrapGentooSpeciesDistribution of Body Mass of Penguinsusql-0.19.19/contrib/charts/area_density_stacked.vl.json.svg.export.png000066400000000000000000000521701476173253300262040ustar00rootroot00000000000000PNG  IHDRB pHYsodtEXtSoftwarewww.inkscape.org< IDATxwxTI@!t"]@RD"E, *^rAʵAtRBO6m $<(}2Yg}TB$I$I=H]H$I$UI$Itϒ$I$I,I$ItϒtGغu:qVvO>X,2;~ѷo_v;BBBB-Zρط>iӦ2`"l6[޽{r/**|)DGGYgZu'ORjj*Cvp"㕸mFDDDݻo*;I6T_{2oooN+J 22v{ǜ3g...7 !DnG?om66nȧ~,KLL$22;:?u)~myiҤIa2ߍ|a2Dnnnv/?.3..N9rD\|OF2Ha"/+ˎ;&jpssS>Eaޝ)w.5b-֭[dqB=zYf Z-  h4 (+Q0`rg}Ve˖P09"ѳgO!>h !x BCCB*z1e<'NjZB @8995k֔x`0iӦ !صkr _?s !e̙3UPP ^xR j1bS~.]vUO*Pnn*JDGG !}ak4|*Lu&qF!ĵDFW^'xH6m$~~~EwhhX,bJѡCEڵj֬)v)bԩBV9&B'NTZ:Nl6aĉJ<$8P_~YKݻW9k|PJX$PHBy @kIBՊ>Lic=Vw֭'Iw+Y#$-[R^=222(~ӦMر޽{sN 3Cdʕ<~;wdڴi 8IIIdjԨEغu->vXXr%_}UM4_~tR5jlcYp!saҤI[ox 6OtRBBBxYz5AAAҮ]ń x'5k-ٳg:_ӳgO/_yصk-Z`ѢE$$$(upp`!Xjk֬fOPb<;wh4/FRR}-СC7o{_$""ٳg_pU>~w&OL׮]9~8?#͚5c׮],^[&rU>#f͚EJJ &##G}Ν;oZb{av;~~~j[͛#<±cxXt)K,Q`͚5,Y'|T>|ٳgɼyhѢw.5[_o3k,l6SNl6sA֬YC6mصk .d̘1erNI6*;z}[lBڷoxE\\/"1f̘" @zj@[B{ ^GGjVƍ\^,4ptt,:^Ľf!C!56z/RTCҥBwСCB!.]$,⡇غuBg*=BB\v !ѣS#4}t裏aŋ/(űcNJ;woFL2Eo߾B! @ԫWO,^XE'ZjgUnoVz WԊ+B=Zbݺu%`0gggz/X@!D yrJ1w\Ç !҃xb!ĵ[ÀhРB>HbԩBk;zǕ}5j\xQh4ċ/W$^"{2gZVZwؑ '''>BBBXl-K.n䄣#5k 99mf-$''쌫M NCcي/33,o9d2QPP3 _ŸMJJ R`@```c7i҄.]pa:ݻiԨ:taRRRhР 6,qg}K.SPPo 2+)aPKfgg+_n.\@LΟ?_d&++K9OaWN"f\\\puud2)nt[V[\_.]*E;>_SO=Uj,GR1e۟[x1;=z>z'''q9OTT`?Kx-^Ro?ʕ+7oݺuS&Y?#&IyY7zO^˗/+ˮ^Zl_Ţ{yaÆqE,XZwީrHRy2m6Fʼn'8y$FE)֬Y֭[ر#...jF#2T{j\n!C_裏P~}9ɓy衇7o^}u:2e Rqq1{lL޽Yt)@_QT}o`<<<"**6'<BCx\*n+(o庙L&z=ev;4gAAf'''<<tt:$%!InIvv6I9$''5jȢeI2$I$%$I$%!I$IY2$I$Ukҿ&MDzzzqqq< :۷+wc=3<< 55iӦ1`LBvvvC$IJK={#GaYjUmz+B֭۷/ׯh4ҩS'6l@\\SN%,,ףRҥ ~)fYfUR$I$IjԳvɨQe[+Z,ׯÇgڵӧO FlzJi$I$IUS5<%Oyr6@|||)_-]#Fc$I$U7UGHRaەl"ωRTl"FcM6ƍӳgOeٞ={ػwoЭ[l$It[nep݋!!!JRPݺuxʿ### "00peyTTAAA̜9u֬X۷͔)Sشi ڵk2e VBV3dȐn$I$h޽у^zqA<<r:ビ&LnK͍ݻWBR̚5C(BZd;َڑ,ZF111B! Ŷmرc 6m$;&T*xWE\\ܹh֬B ^/BCCEXXB1d!C ԩS⧟~Xb?Ē%KÇoV&mjԭ1I$Iy{{sa|M~֭[Ǟ={o߾;#B=/dٳ#'NdL6.CtWBOFvf FVـÓ$I*U͚5o7nݻw^j(#*\ڶmKzիN9===z!>7oΩS5k_5۷o… T2kkU !Zɷp<2II%&1m:x8ț6 iZ}wBKT}WDFFҾ}{Ο?Py[˗ЦMQDGGOELL :6mڰuV[<==1L>7RPPLr|TXveDZq*4JdT+wՁn-./rZ$t,Zٳgh:t('N$'' 2vX׿E=Xp!o2Bzĉ BxoKY;$ItRv4oZbӡHN^JHr$IRuW>/;Vs~:V$P\r-ʚsgI2%l -ʐi2m5`+/Nvv67\ߛ7 !! ZTR3h;[DUv(f`7ίp$I!t~X}sWR1[l-6]IՇt~D)мyxGn]JJ 7o1BCC8s 1ՀL u!īp>ݎ 4g3#+;I[\b+7\tBVZ<ݻ,_f _5iiiEٳgsȑ#ŎLf/̝;=NIЏ?ŋȨxxLxg.m>.Itg6.fڵ-ZOtRe/ƍquueo?:uꫯ}"۶m?r={PfM @\\6)5:s >,~~~ 6uaVm9rTz[%`L]w$Iˉw|8AAAoߞ_6[W:j7ޠe˖^׳j*3f < TE뎕zOnpr ċKڕ$Iw)lٲ3gIhh(O?4FQyOrr2t:_333ZlݺzѪUjR)5rppGaڵ̜9>>,X3K޽QO.ڵ#99D IDATrV>V:[NUv.j:d2bI$j]Q8v[աW:%;|pe-Zo߾XVFaÆ:tt0 ʨ_ʶ}ᡇ"==7774?ibRj"ԧOFI e111xxxp}I͚55Hzmtè2,V;]{<.w$鞥RxǦCDŧP7ۅЭeFբՖ~KPxzzR+ƌC׮]y[n(U/;]uG+;*)|/_e#IRR]ުNer׻i"vZÉeѢEsNE Y*~>9? SV}3uտ[Y$:i"~z6n܈ng„ rNǠA=8kbJ'ĵ)]+aWe$ItϺ騱%K~z:t#Gו+W֭[(UuI9.쮝,%g1e6L %Izb#`r? &~XOJvZ:ucǎ{mӧ8? ;;PmZ###TD,R5oa7ae!.X N1yH;֖Ctv ~-bMpq.0`Jػw/Ǐe˖888ڵkYz5jcpmT 60;ƌs{mn=ԬY:u0wb_z%&O\IU.8tWna:xb2ym6uq07$Iw)Œ"` ꄶbrss>|8ׯ/711UVM~Pt ///f3888o>ZhAN8}4[nݝ~HO[&??ooov؁'ݺuc֭\tVZѱcGvI͚5پ};͛7WW $ ֭[QqJU8vjeqBnlۏFWv8$Ux]vѪUh5k|g̙3sᾺ^`SOl&!! zO?СCpDEEA`` &M"<<#G2w\xX _E៝~JIp"PaܕLYy| [/1_+omnI䳥or7oʔ)ԯ_ >bu:gFRqe>LHH8::2h T* 6Ņ\ٷ]vʣ=|2ڵĉnݚ\>F#|>\{~iaw%JM6o˗IJJb8::rQr hڵߟI&^l}\\O?4CU ؽ{7=< r´i8p@{ILaV&_QvQgtXɓ',&22VݸqSJٙٳg/G}4,={3fa}ժUE3f [o߾_Ȅ ذaqqq105j4o޼䛭n9J*hn)q)8X3YjӸ$élo&Q`sW =p"Z3S]*;,Iʁ(7BiȚ5k?~<:WWW7n̜9sqC gϞ,`ڴipeV\*+*!n>7|øq6l+V{tؑ?LYp!yyy+4nܘcǎ)hVZqԵy~'8;;_|@=Xd \x~֭[3tЛ?fʔ)eڦU>feߩ7@N Bs)MG6Gi y>6xcZT*p6 @.d֭[b oߎ7SLaӦMc0/5Yff.C^A |Mɲed{Xc[P%9Yt)H]t)8]0+t  tP&++yڳ֕m!HN%9=oըqu4dɀ 5#2ps2\Ksw6bĨڽeTA]l݊etAA8]oiϓAڵ]*uةSӧ>͛7W*$Ȋ"Ge/l8PuA}V\ kmVތz-;&o7G\OW4\}7݊:vԘ4o.y%Ա8tI7LƍGpp0~~~̊+XbN⩧^#""o PJM:wLN|2rXTB:,߇^j3W*.}j u{"LޕVgڈO#2>-\=ӪWӓFiC`/MI',11o[nEjb{Aff&cƌaԨQ1yd6mNPP 6TJCvSx衇pttT:n8}]}Q F˖-eѢEL2aÆgyXUVj"G~ؾ};ұcG{=y䑊Q鹼r!'\`Lw,b kiB.%%ޮ%3S8s95agQUԫAz~G J92w 7& t7۷oɓ"`֭jՊƍT^b;vRRRk___L&ŦIMM- UDȋ/Ӽ,]ZM͚5ٱcLYfzW>Yo͐pBWBr_.48M#)Aؔۆ⼰cL1ri:ZM#4ݹ'zU śij*qy.]r vk׮E0~x<=={Vv;6 FC9p[&77(BBBhѢk׮f"""MoB"t9СCYtG`2|wӇ{_aÆe^̙3qrr⥗^bذa۷dL?k|%(5jذ!.:|Mƺuꫯ*">Yvf.Ù)r|'`Zԉ[ zg~tTwdŕژ f]iO& 7--%9 fkǒ: 3sDܰ*+uk ׏Z Y\ Zgi VΗaf#a+J7gR;rz%Tʄ~VZzdф%Trc%;yv(;P /6GZff;+"xwJAՂ^ڠA/lk:K'TIHp%"}nvZK:>q"RգR]`eGr׻'ƍPٳ={&Mдir N*{Y{9t&̏mApQǾ s6[zk5C3fwg>l{MR'mŘ} UNdrU `+pǚfÚhž[U* +*ܲ Q%^MV3 d8 z"{ ?I_z%.ޤXk2f \e?wL.Ep̏-†!'jl޵rKʟ'fgFR d5hOgOo}L<ՙSp.H@} 2!kX,W,i2 +/r,WgjTHFq\ ;8r *//Tިh:+Rus^0^gN͆dӵ?" \>* z= tj0QT zT? |V PrtDѠrt@`Det@ϗ;jGl$SJ-2pvvf lْv{RqLqY#!1_Hfh(ڵqh֬YIRP5t!IJ'9vjcL8e,@d̊T:bI2Rp2DKw˕+X\!s*t8tx-) $LDƏ"8t6̇ҫq+I ' X0'1,zn_auʕ肂ptD[÷CL&BD~}˯ ĵw|V^*Mu?D- d#X._&CB%E:$DX턝/{_gz289! > J5B~ֆXⓀ$92sd$?|~}:tġC{4^$J&BULRzQddVىor?T CG,9cK@e]Kڷǡ}{4>$H&BUfȹ~?I*U5N4ͱ0_Cݥ*clm3JP%ζ2Sw/S ;t ma^rT9* sT?.GM[:]eG'd"Ts} \nHc|݇oHɨʀݱ56k,v1`+@Hw kB~%{ݯ1lU+-Zvu{L*@rz.NŰd bRmt`~A\>-2 [aa1i$# >=!7}۷a04o}6De0Tv]N&B@ơ<Gd|Z'?A. y E:t^-Y ̱H$r %:Kt4ٿJEWM7jATtP5s<2r|yz~7kh|p: A*N)4@5 KR.֤Vd$ݜZ1=k jtj_}z DTnRVkײtRԩÌ3pww/>..wyL^xzݻdԪU qF~yteTϙ+)K\JH/^N8Q˘IM oU\d\tkMC vm]rXS$$ەG~lJCNj ݣ*={3faYjUmƌ+B֭۷/ׯh42a6l@\\#F ,,j6IDATuֱe-[W_}ό3n;|҉LHb1&rJ֠,-xh pZqИqPYqŎ*56 "5E@oE5uA&YX]K:/66v+<=t3֔ty@Nb(IGX,ʈGZԞIRTDhΝ5 ___[oE~~>FBbb"`]`Cpp0t:ضm=KmPUS1\5{wu>p; ۰Ee(q Ѥ@\R#AAkMHNID[cshhJTXHKmPTEYf_1>tIi]m[݆W NNuGgx_HƯ^⎠tAn0JY0v/.osp髗[[-;^Ѡs@7h ZttsE Z\wMBuuuX9}4w}7x{{[}}}?^gtRWWG]]`0pҍ?4 }ϣi?]WmP\MhKlVhF}#hSL|N@EG)P:I:MN/:6h B057aj)t:NNh h Nh::qtDd@CLml*h4_?k2,W.כL&qWW7+uW\ 裏㏭1W| ʕqgfhzmBoC ޷Çqĭ@h8qr\__ۛ3g~ˑ#G, w7ꊈ ""WXANNZqmqؖ8ĝɦxLBxx8 ,, VKQQ$"""Xj&LcϞ=u]PZZJ}}=0k,,X77o&11(B\i;޽NpvvkעxwF׳}v<ȩS,4 Wń؄Ba{lرcY|U٣>j= ,s~~~,YGn !B/^;aK mqmqQ{;d!BeS !B& rQ͛G\\oV6-~(//{kZdddĞ={zmcsm-s/Z %''.!..78kF#gU) @֣7UWW[n)++F577ˌ3XjU7x2)))rԩ%[XڐUqzjuQu9kU_[[NjjjTMM:{RJ+Vlب233֭[RJ=>P*44T}=$響 ڪrss… { ڷo|Xeffh;m4U[[RO?Uqqqh4cZڬXB[vQPPf 6//O*TJ)辶&Reee*==6L͝;W͚5G $ѻ6mڤ^{zܺGFF?j㘘USS,XvޭRҥKj̘1~E[cLyy9'O*7}׿X5Zzo6hÆ ̚5"~_Zյiӈ[^^F<<<6lXۛu-[ʕ+R\&ѻF#6m"::Djkke=nÇb̙DGG[=pl&11p/^RjnX fsق6RRRxW0`U݃>*++ƒMc6qppiQTTAAAf.]j3 {<ϗ~?7gaٲeJ&ѻΝܹs(**b޼yڵR/q뚛1 tuu1~xRSS4hG}tO<۷ofKz$L]aqa20L}ݖ>>A!Chnnf[{{{ér~+˛puu\RJkk+EEE 2p222xשJJJ CE͒%K={6̍_0j(gʔ),_G}ggg9ѣ` @zz:s۶m#66 rss3g .Q8v8--lBCCe=Ȏ;4hYd@}|㉉!""sH@@Řfٰaaaa9s[}v%e? *^d2kU6zhONll,| 555;466Bzz:PVVFXXO<}uoߎh$22x4Ƿ?gÆ  ~ӟIZZYYYqF0`O>$AAA]iiia}~f͚c !ߒBoI $B~K!!B[ !ߒ}AgΜVː!Cxn+yyylݺJ 7m۶bҤIZwIGGwesoٳgٻw/3f̸nM&;v`ҤI 6CȷƄA~!SN*{8x`/7iiin*s=ǫ t2rHyꩧqn?Nkk+7lǏ~4m+BBذ7|d-[ƪU㏉7Ç777uV,WL& .dȐ!deeeqӓK T\\' 6N'NAFF#FwI޻w/:cӦM|fuE;v;o޼JfΜIuu5...sbI,o !n3SJ)5qDP*??_){ut<==ŋUIIԩSUllzGRJ=Zyyyt5rHuA}嗕RJ)j*1cƨ@쬒,O?}[_Bذ7L6;wD`` EQVVƯk>SJKK)**-[pf̘a9ל9s蠤rя~t=z4Æ Z9B\\U3fP^^γ>KJJ @w_r^O||UUUl޼477[8q&6Ξ=kվ =Bq arss#%%4i,YH}Y&LiHJJɉT"##ٵk222׽-vYLL ...;vG 憇IKKRkƋ/Hmm-8::Ojj*;vuYpp0}QWWODDV=JHHMͧB\Mx} !5ٌ` 44 &F~~>h4RRRΎ֬YL>NGxx8K,Օ)SpssFڵkj{_z#G2x`>RSSQJh! ɓ'oN#&&@`ԩ^777\]]ikkۛ^xd/`{$&&RQQʕ+Yr^GBqd!!cǎgz)-Z-(##cǎ~0 w^RSSyW/!Y OL>lPaa7i7]xCʕ+Yt)flق=#44Aqw: 'Ν;lf}ݕoҥK444ャc_wG$B!-֘B!- BoNr|}XIENDB`usql-0.19.19/contrib/charts/penguins.json000066400000000000000000002030571476173253300203500ustar00rootroot00000000000000[ { "Species": "Adelie", "Island": "Torgersen", "Beak Length (mm)": 39.1, "Beak Depth (mm)": 18.7, "Flipper Length (mm)": 181, "Body Mass (g)": 3750, "Sex": "MALE" }, { "Species": "Adelie", "Island": "Torgersen", "Beak Length (mm)": 39.5, "Beak Depth (mm)": 17.4, "Flipper Length (mm)": 186, "Body Mass (g)": 3800, "Sex": "FEMALE" }, { "Species": "Adelie", "Island": "Torgersen", "Beak Length (mm)": 40.3, "Beak Depth (mm)": 18, "Flipper Length (mm)": 195, "Body Mass (g)": 3250, "Sex": "FEMALE" }, { "Species": "Adelie", "Island": "Torgersen", "Beak Length (mm)": null, "Beak Depth (mm)": null, "Flipper Length (mm)": null, "Body Mass (g)": null, "Sex": null }, { "Species": "Adelie", "Island": "Torgersen", "Beak Length (mm)": 36.7, "Beak Depth (mm)": 19.3, "Flipper Length (mm)": 193, "Body Mass (g)": 3450, "Sex": "FEMALE" }, { "Species": "Adelie", "Island": "Torgersen", "Beak Length (mm)": 39.3, "Beak Depth (mm)": 20.6, "Flipper Length (mm)": 190, "Body Mass (g)": 3650, "Sex": "MALE" }, { "Species": "Adelie", "Island": "Torgersen", "Beak Length (mm)": 38.9, "Beak Depth (mm)": 17.8, "Flipper Length (mm)": 181, "Body Mass (g)": 3625, "Sex": "FEMALE" }, { "Species": "Adelie", "Island": "Torgersen", "Beak Length (mm)": 39.2, "Beak Depth (mm)": 19.6, "Flipper Length (mm)": 195, "Body Mass (g)": 4675, "Sex": "MALE" }, { "Species": "Adelie", "Island": "Torgersen", "Beak Length (mm)": 34.1, "Beak Depth (mm)": 18.1, "Flipper Length (mm)": 193, "Body Mass (g)": 3475, "Sex": null }, { "Species": "Adelie", "Island": "Torgersen", "Beak Length (mm)": 42, "Beak Depth (mm)": 20.2, "Flipper Length (mm)": 190, "Body Mass (g)": 4250, "Sex": null }, { "Species": "Adelie", "Island": "Torgersen", "Beak Length (mm)": 37.8, "Beak Depth (mm)": 17.1, "Flipper Length (mm)": 186, "Body Mass (g)": 3300, "Sex": null }, { "Species": "Adelie", "Island": "Torgersen", "Beak Length (mm)": 37.8, "Beak Depth (mm)": 17.3, "Flipper Length (mm)": 180, "Body Mass (g)": 3700, "Sex": null }, { "Species": "Adelie", "Island": "Torgersen", "Beak Length (mm)": 41.1, "Beak Depth (mm)": 17.6, "Flipper Length (mm)": 182, "Body Mass (g)": 3200, "Sex": "FEMALE" }, { "Species": "Adelie", "Island": "Torgersen", "Beak Length (mm)": 38.6, "Beak Depth (mm)": 21.2, "Flipper Length (mm)": 191, "Body Mass (g)": 3800, "Sex": "MALE" }, { "Species": "Adelie", "Island": "Torgersen", "Beak Length (mm)": 34.6, "Beak Depth (mm)": 21.1, "Flipper Length (mm)": 198, "Body Mass (g)": 4400, "Sex": "MALE" }, { "Species": "Adelie", "Island": "Torgersen", "Beak Length (mm)": 36.6, "Beak Depth (mm)": 17.8, "Flipper Length (mm)": 185, "Body Mass (g)": 3700, "Sex": "FEMALE" }, { "Species": "Adelie", "Island": "Torgersen", "Beak Length (mm)": 38.7, "Beak Depth (mm)": 19, "Flipper Length (mm)": 195, "Body Mass (g)": 3450, "Sex": "FEMALE" }, { "Species": "Adelie", "Island": "Torgersen", "Beak Length (mm)": 42.5, "Beak Depth (mm)": 20.7, "Flipper Length (mm)": 197, "Body Mass (g)": 4500, "Sex": "MALE" }, { "Species": "Adelie", "Island": "Torgersen", "Beak Length (mm)": 34.4, "Beak Depth (mm)": 18.4, "Flipper Length (mm)": 184, "Body Mass (g)": 3325, "Sex": "FEMALE" }, { "Species": "Adelie", "Island": "Torgersen", "Beak Length (mm)": 46, "Beak Depth (mm)": 21.5, "Flipper Length (mm)": 194, "Body Mass (g)": 4200, "Sex": "MALE" }, { "Species": "Adelie", "Island": "Biscoe", "Beak Length (mm)": 37.8, "Beak Depth (mm)": 18.3, "Flipper Length (mm)": 174, "Body Mass (g)": 3400, "Sex": "FEMALE" }, { "Species": "Adelie", "Island": "Biscoe", "Beak Length (mm)": 37.7, "Beak Depth (mm)": 18.7, "Flipper Length (mm)": 180, "Body Mass (g)": 3600, "Sex": "MALE" }, { "Species": "Adelie", "Island": "Biscoe", "Beak Length (mm)": 35.9, "Beak Depth (mm)": 19.2, "Flipper Length (mm)": 189, "Body Mass (g)": 3800, "Sex": "FEMALE" }, { "Species": "Adelie", "Island": "Biscoe", "Beak Length (mm)": 38.2, "Beak Depth (mm)": 18.1, "Flipper Length (mm)": 185, "Body Mass (g)": 3950, "Sex": "MALE" }, { "Species": "Adelie", "Island": "Biscoe", "Beak Length (mm)": 38.8, "Beak Depth (mm)": 17.2, "Flipper Length (mm)": 180, "Body Mass (g)": 3800, "Sex": "MALE" }, { "Species": "Adelie", "Island": "Biscoe", "Beak Length (mm)": 35.3, "Beak Depth (mm)": 18.9, "Flipper Length (mm)": 187, "Body Mass (g)": 3800, "Sex": "FEMALE" }, { "Species": "Adelie", "Island": "Biscoe", "Beak Length (mm)": 40.6, "Beak Depth (mm)": 18.6, "Flipper Length (mm)": 183, "Body Mass (g)": 3550, "Sex": "MALE" }, { "Species": "Adelie", "Island": "Biscoe", "Beak Length (mm)": 40.5, "Beak Depth (mm)": 17.9, "Flipper Length (mm)": 187, "Body Mass (g)": 3200, "Sex": "FEMALE" }, { "Species": "Adelie", "Island": "Biscoe", "Beak Length (mm)": 37.9, "Beak Depth (mm)": 18.6, "Flipper Length (mm)": 172, "Body Mass (g)": 3150, "Sex": "FEMALE" }, { "Species": "Adelie", "Island": "Biscoe", "Beak Length (mm)": 40.5, "Beak Depth (mm)": 18.9, "Flipper Length (mm)": 180, "Body Mass (g)": 3950, "Sex": "MALE" }, { "Species": "Adelie", "Island": "Dream", "Beak Length (mm)": 39.5, "Beak Depth (mm)": 16.7, "Flipper Length (mm)": 178, "Body Mass (g)": 3250, "Sex": "FEMALE" }, { "Species": "Adelie", "Island": "Dream", "Beak Length (mm)": 37.2, "Beak Depth (mm)": 18.1, "Flipper Length (mm)": 178, "Body Mass (g)": 3900, "Sex": "MALE" }, { "Species": "Adelie", "Island": "Dream", "Beak Length (mm)": 39.5, "Beak Depth (mm)": 17.8, "Flipper Length (mm)": 188, "Body Mass (g)": 3300, "Sex": "FEMALE" }, { "Species": "Adelie", "Island": "Dream", "Beak Length (mm)": 40.9, "Beak Depth (mm)": 18.9, "Flipper Length (mm)": 184, "Body Mass (g)": 3900, "Sex": "MALE" }, { "Species": "Adelie", "Island": "Dream", "Beak Length (mm)": 36.4, "Beak Depth (mm)": 17, "Flipper Length (mm)": 195, "Body Mass (g)": 3325, "Sex": "FEMALE" }, { "Species": "Adelie", "Island": "Dream", "Beak Length (mm)": 39.2, "Beak Depth (mm)": 21.1, "Flipper Length (mm)": 196, "Body Mass (g)": 4150, "Sex": "MALE" }, { "Species": "Adelie", "Island": "Dream", "Beak Length (mm)": 38.8, "Beak Depth (mm)": 20, "Flipper Length (mm)": 190, "Body Mass (g)": 3950, "Sex": "MALE" }, { "Species": "Adelie", "Island": "Dream", "Beak Length (mm)": 42.2, "Beak Depth (mm)": 18.5, "Flipper Length (mm)": 180, "Body Mass (g)": 3550, "Sex": "FEMALE" }, { "Species": "Adelie", "Island": "Dream", "Beak Length (mm)": 37.6, "Beak Depth (mm)": 19.3, "Flipper Length (mm)": 181, "Body Mass (g)": 3300, "Sex": "FEMALE" }, { "Species": "Adelie", "Island": "Dream", "Beak Length (mm)": 39.8, "Beak Depth (mm)": 19.1, "Flipper Length (mm)": 184, "Body Mass (g)": 4650, "Sex": "MALE" }, { "Species": "Adelie", "Island": "Dream", "Beak Length (mm)": 36.5, "Beak Depth (mm)": 18, "Flipper Length (mm)": 182, "Body Mass (g)": 3150, "Sex": "FEMALE" }, { "Species": "Adelie", "Island": "Dream", "Beak Length (mm)": 40.8, "Beak Depth (mm)": 18.4, "Flipper Length (mm)": 195, "Body Mass (g)": 3900, "Sex": "MALE" }, { "Species": "Adelie", "Island": "Dream", "Beak Length (mm)": 36, "Beak Depth (mm)": 18.5, "Flipper Length (mm)": 186, "Body Mass (g)": 3100, "Sex": "FEMALE" }, { "Species": "Adelie", "Island": "Dream", "Beak Length (mm)": 44.1, "Beak Depth (mm)": 19.7, "Flipper Length (mm)": 196, "Body Mass (g)": 4400, "Sex": "MALE" }, { "Species": "Adelie", "Island": "Dream", "Beak Length (mm)": 37, "Beak Depth (mm)": 16.9, "Flipper Length (mm)": 185, "Body Mass (g)": 3000, "Sex": "FEMALE" }, { "Species": "Adelie", "Island": "Dream", "Beak Length (mm)": 39.6, "Beak Depth (mm)": 18.8, "Flipper Length (mm)": 190, "Body Mass (g)": 4600, "Sex": "MALE" }, { "Species": "Adelie", "Island": "Dream", "Beak Length (mm)": 41.1, "Beak Depth (mm)": 19, "Flipper Length (mm)": 182, "Body Mass (g)": 3425, "Sex": "MALE" }, { "Species": "Adelie", "Island": "Dream", "Beak Length (mm)": 37.5, "Beak Depth (mm)": 18.9, "Flipper Length (mm)": 179, "Body Mass (g)": 2975, "Sex": null }, { "Species": "Adelie", "Island": "Dream", "Beak Length (mm)": 36, "Beak Depth (mm)": 17.9, "Flipper Length (mm)": 190, "Body Mass (g)": 3450, "Sex": "FEMALE" }, { "Species": "Adelie", "Island": "Dream", "Beak Length (mm)": 42.3, "Beak Depth (mm)": 21.2, "Flipper Length (mm)": 191, "Body Mass (g)": 4150, "Sex": "MALE" }, { "Species": "Adelie", "Island": "Biscoe", "Beak Length (mm)": 39.6, "Beak Depth (mm)": 17.7, "Flipper Length (mm)": 186, "Body Mass (g)": 3500, "Sex": "FEMALE" }, { "Species": "Adelie", "Island": "Biscoe", "Beak Length (mm)": 40.1, "Beak Depth (mm)": 18.9, "Flipper Length (mm)": 188, "Body Mass (g)": 4300, "Sex": "MALE" }, { "Species": "Adelie", "Island": "Biscoe", "Beak Length (mm)": 35, "Beak Depth (mm)": 17.9, "Flipper Length (mm)": 190, "Body Mass (g)": 3450, "Sex": "FEMALE" }, { "Species": "Adelie", "Island": "Biscoe", "Beak Length (mm)": 42, "Beak Depth (mm)": 19.5, "Flipper Length (mm)": 200, "Body Mass (g)": 4050, "Sex": "MALE" }, { "Species": "Adelie", "Island": "Biscoe", "Beak Length (mm)": 34.5, "Beak Depth (mm)": 18.1, "Flipper Length (mm)": 187, "Body Mass (g)": 2900, "Sex": "FEMALE" }, { "Species": "Adelie", "Island": "Biscoe", "Beak Length (mm)": 41.4, "Beak Depth (mm)": 18.6, "Flipper Length (mm)": 191, "Body Mass (g)": 3700, "Sex": "MALE" }, { "Species": "Adelie", "Island": "Biscoe", "Beak Length (mm)": 39, "Beak Depth (mm)": 17.5, "Flipper Length (mm)": 186, "Body Mass (g)": 3550, "Sex": "FEMALE" }, { "Species": "Adelie", "Island": "Biscoe", "Beak Length (mm)": 40.6, "Beak Depth (mm)": 18.8, "Flipper Length (mm)": 193, "Body Mass (g)": 3800, "Sex": "MALE" }, { "Species": "Adelie", "Island": "Biscoe", "Beak Length (mm)": 36.5, "Beak Depth (mm)": 16.6, "Flipper Length (mm)": 181, "Body Mass (g)": 2850, "Sex": "FEMALE" }, { "Species": "Adelie", "Island": "Biscoe", "Beak Length (mm)": 37.6, "Beak Depth (mm)": 19.1, "Flipper Length (mm)": 194, "Body Mass (g)": 3750, "Sex": "MALE" }, { "Species": "Adelie", "Island": "Biscoe", "Beak Length (mm)": 35.7, "Beak Depth (mm)": 16.9, "Flipper Length (mm)": 185, "Body Mass (g)": 3150, "Sex": "FEMALE" }, { "Species": "Adelie", "Island": "Biscoe", "Beak Length (mm)": 41.3, "Beak Depth (mm)": 21.1, "Flipper Length (mm)": 195, "Body Mass (g)": 4400, "Sex": "MALE" }, { "Species": "Adelie", "Island": "Biscoe", "Beak Length (mm)": 37.6, "Beak Depth (mm)": 17, "Flipper Length (mm)": 185, "Body Mass (g)": 3600, "Sex": "FEMALE" }, { "Species": "Adelie", "Island": "Biscoe", "Beak Length (mm)": 41.1, "Beak Depth (mm)": 18.2, "Flipper Length (mm)": 192, "Body Mass (g)": 4050, "Sex": "MALE" }, { "Species": "Adelie", "Island": "Biscoe", "Beak Length (mm)": 36.4, "Beak Depth (mm)": 17.1, "Flipper Length (mm)": 184, "Body Mass (g)": 2850, "Sex": "FEMALE" }, { "Species": "Adelie", "Island": "Biscoe", "Beak Length (mm)": 41.6, "Beak Depth (mm)": 18, "Flipper Length (mm)": 192, "Body Mass (g)": 3950, "Sex": "MALE" }, { "Species": "Adelie", "Island": "Biscoe", "Beak Length (mm)": 35.5, "Beak Depth (mm)": 16.2, "Flipper Length (mm)": 195, "Body Mass (g)": 3350, "Sex": "FEMALE" }, { "Species": "Adelie", "Island": "Biscoe", "Beak Length (mm)": 41.1, "Beak Depth (mm)": 19.1, "Flipper Length (mm)": 188, "Body Mass (g)": 4100, "Sex": "MALE" }, { "Species": "Adelie", "Island": "Torgersen", "Beak Length (mm)": 35.9, "Beak Depth (mm)": 16.6, "Flipper Length (mm)": 190, "Body Mass (g)": 3050, "Sex": "FEMALE" }, { "Species": "Adelie", "Island": "Torgersen", "Beak Length (mm)": 41.8, "Beak Depth (mm)": 19.4, "Flipper Length (mm)": 198, "Body Mass (g)": 4450, "Sex": "MALE" }, { "Species": "Adelie", "Island": "Torgersen", "Beak Length (mm)": 33.5, "Beak Depth (mm)": 19, "Flipper Length (mm)": 190, "Body Mass (g)": 3600, "Sex": "FEMALE" }, { "Species": "Adelie", "Island": "Torgersen", "Beak Length (mm)": 39.7, "Beak Depth (mm)": 18.4, "Flipper Length (mm)": 190, "Body Mass (g)": 3900, "Sex": "MALE" }, { "Species": "Adelie", "Island": "Torgersen", "Beak Length (mm)": 39.6, "Beak Depth (mm)": 17.2, "Flipper Length (mm)": 196, "Body Mass (g)": 3550, "Sex": "FEMALE" }, { "Species": "Adelie", "Island": "Torgersen", "Beak Length (mm)": 45.8, "Beak Depth (mm)": 18.9, "Flipper Length (mm)": 197, "Body Mass (g)": 4150, "Sex": "MALE" }, { "Species": "Adelie", "Island": "Torgersen", "Beak Length (mm)": 35.5, "Beak Depth (mm)": 17.5, "Flipper Length (mm)": 190, "Body Mass (g)": 3700, "Sex": "FEMALE" }, { "Species": "Adelie", "Island": "Torgersen", "Beak Length (mm)": 42.8, "Beak Depth (mm)": 18.5, "Flipper Length (mm)": 195, "Body Mass (g)": 4250, "Sex": "MALE" }, { "Species": "Adelie", "Island": "Torgersen", "Beak Length (mm)": 40.9, "Beak Depth (mm)": 16.8, "Flipper Length (mm)": 191, "Body Mass (g)": 3700, "Sex": "FEMALE" }, { "Species": "Adelie", "Island": "Torgersen", "Beak Length (mm)": 37.2, "Beak Depth (mm)": 19.4, "Flipper Length (mm)": 184, "Body Mass (g)": 3900, "Sex": "MALE" }, { "Species": "Adelie", "Island": "Torgersen", "Beak Length (mm)": 36.2, "Beak Depth (mm)": 16.1, "Flipper Length (mm)": 187, "Body Mass (g)": 3550, "Sex": "FEMALE" }, { "Species": "Adelie", "Island": "Torgersen", "Beak Length (mm)": 42.1, "Beak Depth (mm)": 19.1, "Flipper Length (mm)": 195, "Body Mass (g)": 4000, "Sex": "MALE" }, { "Species": "Adelie", "Island": "Torgersen", "Beak Length (mm)": 34.6, "Beak Depth (mm)": 17.2, "Flipper Length (mm)": 189, "Body Mass (g)": 3200, "Sex": "FEMALE" }, { "Species": "Adelie", "Island": "Torgersen", "Beak Length (mm)": 42.9, "Beak Depth (mm)": 17.6, "Flipper Length (mm)": 196, "Body Mass (g)": 4700, "Sex": "MALE" }, { "Species": "Adelie", "Island": "Torgersen", "Beak Length (mm)": 36.7, "Beak Depth (mm)": 18.8, "Flipper Length (mm)": 187, "Body Mass (g)": 3800, "Sex": "FEMALE" }, { "Species": "Adelie", "Island": "Torgersen", "Beak Length (mm)": 35.1, "Beak Depth (mm)": 19.4, "Flipper Length (mm)": 193, "Body Mass (g)": 4200, "Sex": "MALE" }, { "Species": "Adelie", "Island": "Dream", "Beak Length (mm)": 37.3, "Beak Depth (mm)": 17.8, "Flipper Length (mm)": 191, "Body Mass (g)": 3350, "Sex": "FEMALE" }, { "Species": "Adelie", "Island": "Dream", "Beak Length (mm)": 41.3, "Beak Depth (mm)": 20.3, "Flipper Length (mm)": 194, "Body Mass (g)": 3550, "Sex": "MALE" }, { "Species": "Adelie", "Island": "Dream", "Beak Length (mm)": 36.3, "Beak Depth (mm)": 19.5, "Flipper Length (mm)": 190, "Body Mass (g)": 3800, "Sex": "MALE" }, { "Species": "Adelie", "Island": "Dream", "Beak Length (mm)": 36.9, "Beak Depth (mm)": 18.6, "Flipper Length (mm)": 189, "Body Mass (g)": 3500, "Sex": "FEMALE" }, { "Species": "Adelie", "Island": "Dream", "Beak Length (mm)": 38.3, "Beak Depth (mm)": 19.2, "Flipper Length (mm)": 189, "Body Mass (g)": 3950, "Sex": "MALE" }, { "Species": "Adelie", "Island": "Dream", "Beak Length (mm)": 38.9, "Beak Depth (mm)": 18.8, "Flipper Length (mm)": 190, "Body Mass (g)": 3600, "Sex": "FEMALE" }, { "Species": "Adelie", "Island": "Dream", "Beak Length (mm)": 35.7, "Beak Depth (mm)": 18, "Flipper Length (mm)": 202, "Body Mass (g)": 3550, "Sex": "FEMALE" }, { "Species": "Adelie", "Island": "Dream", "Beak Length (mm)": 41.1, "Beak Depth (mm)": 18.1, "Flipper Length (mm)": 205, "Body Mass (g)": 4300, "Sex": "MALE" }, { "Species": "Adelie", "Island": "Dream", "Beak Length (mm)": 34, "Beak Depth (mm)": 17.1, "Flipper Length (mm)": 185, "Body Mass (g)": 3400, "Sex": "FEMALE" }, { "Species": "Adelie", "Island": "Dream", "Beak Length (mm)": 39.6, "Beak Depth (mm)": 18.1, "Flipper Length (mm)": 186, "Body Mass (g)": 4450, "Sex": "MALE" }, { "Species": "Adelie", "Island": "Dream", "Beak Length (mm)": 36.2, "Beak Depth (mm)": 17.3, "Flipper Length (mm)": 187, "Body Mass (g)": 3300, "Sex": "FEMALE" }, { "Species": "Adelie", "Island": "Dream", "Beak Length (mm)": 40.8, "Beak Depth (mm)": 18.9, "Flipper Length (mm)": 208, "Body Mass (g)": 4300, "Sex": "MALE" }, { "Species": "Adelie", "Island": "Dream", "Beak Length (mm)": 38.1, "Beak Depth (mm)": 18.6, "Flipper Length (mm)": 190, "Body Mass (g)": 3700, "Sex": "FEMALE" }, { "Species": "Adelie", "Island": "Dream", "Beak Length (mm)": 40.3, "Beak Depth (mm)": 18.5, "Flipper Length (mm)": 196, "Body Mass (g)": 4350, "Sex": "MALE" }, { "Species": "Adelie", "Island": "Dream", "Beak Length (mm)": 33.1, "Beak Depth (mm)": 16.1, "Flipper Length (mm)": 178, "Body Mass (g)": 2900, "Sex": "FEMALE" }, { "Species": "Adelie", "Island": "Dream", "Beak Length (mm)": 43.2, "Beak Depth (mm)": 18.5, "Flipper Length (mm)": 192, "Body Mass (g)": 4100, "Sex": "MALE" }, { "Species": "Adelie", "Island": "Biscoe", "Beak Length (mm)": 35, "Beak Depth (mm)": 17.9, "Flipper Length (mm)": 192, "Body Mass (g)": 3725, "Sex": "FEMALE" }, { "Species": "Adelie", "Island": "Biscoe", "Beak Length (mm)": 41, "Beak Depth (mm)": 20, "Flipper Length (mm)": 203, "Body Mass (g)": 4725, "Sex": "MALE" }, { "Species": "Adelie", "Island": "Biscoe", "Beak Length (mm)": 37.7, "Beak Depth (mm)": 16, "Flipper Length (mm)": 183, "Body Mass (g)": 3075, "Sex": "FEMALE" }, { "Species": "Adelie", "Island": "Biscoe", "Beak Length (mm)": 37.8, "Beak Depth (mm)": 20, "Flipper Length (mm)": 190, "Body Mass (g)": 4250, "Sex": "MALE" }, { "Species": "Adelie", "Island": "Biscoe", "Beak Length (mm)": 37.9, "Beak Depth (mm)": 18.6, "Flipper Length (mm)": 193, "Body Mass (g)": 2925, "Sex": "FEMALE" }, { "Species": "Adelie", "Island": "Biscoe", "Beak Length (mm)": 39.7, "Beak Depth (mm)": 18.9, "Flipper Length (mm)": 184, "Body Mass (g)": 3550, "Sex": "MALE" }, { "Species": "Adelie", "Island": "Biscoe", "Beak Length (mm)": 38.6, "Beak Depth (mm)": 17.2, "Flipper Length (mm)": 199, "Body Mass (g)": 3750, "Sex": "FEMALE" }, { "Species": "Adelie", "Island": "Biscoe", "Beak Length (mm)": 38.2, "Beak Depth (mm)": 20, "Flipper Length (mm)": 190, "Body Mass (g)": 3900, "Sex": "MALE" }, { "Species": "Adelie", "Island": "Biscoe", "Beak Length (mm)": 38.1, "Beak Depth (mm)": 17, "Flipper Length (mm)": 181, "Body Mass (g)": 3175, "Sex": "FEMALE" }, { "Species": "Adelie", "Island": "Biscoe", "Beak Length (mm)": 43.2, "Beak Depth (mm)": 19, "Flipper Length (mm)": 197, "Body Mass (g)": 4775, "Sex": "MALE" }, { "Species": "Adelie", "Island": "Biscoe", "Beak Length (mm)": 38.1, "Beak Depth (mm)": 16.5, "Flipper Length (mm)": 198, "Body Mass (g)": 3825, "Sex": "FEMALE" }, { "Species": "Adelie", "Island": "Biscoe", "Beak Length (mm)": 45.6, "Beak Depth (mm)": 20.3, "Flipper Length (mm)": 191, "Body Mass (g)": 4600, "Sex": "MALE" }, { "Species": "Adelie", "Island": "Biscoe", "Beak Length (mm)": 39.7, "Beak Depth (mm)": 17.7, "Flipper Length (mm)": 193, "Body Mass (g)": 3200, "Sex": "FEMALE" }, { "Species": "Adelie", "Island": "Biscoe", "Beak Length (mm)": 42.2, "Beak Depth (mm)": 19.5, "Flipper Length (mm)": 197, "Body Mass (g)": 4275, "Sex": "MALE" }, { "Species": "Adelie", "Island": "Biscoe", "Beak Length (mm)": 39.6, "Beak Depth (mm)": 20.7, "Flipper Length (mm)": 191, "Body Mass (g)": 3900, "Sex": "FEMALE" }, { "Species": "Adelie", "Island": "Biscoe", "Beak Length (mm)": 42.7, "Beak Depth (mm)": 18.3, "Flipper Length (mm)": 196, "Body Mass (g)": 4075, "Sex": "MALE" }, { "Species": "Adelie", "Island": "Torgersen", "Beak Length (mm)": 38.6, "Beak Depth (mm)": 17, "Flipper Length (mm)": 188, "Body Mass (g)": 2900, "Sex": "FEMALE" }, { "Species": "Adelie", "Island": "Torgersen", "Beak Length (mm)": 37.3, "Beak Depth (mm)": 20.5, "Flipper Length (mm)": 199, "Body Mass (g)": 3775, "Sex": "MALE" }, { "Species": "Adelie", "Island": "Torgersen", "Beak Length (mm)": 35.7, "Beak Depth (mm)": 17, "Flipper Length (mm)": 189, "Body Mass (g)": 3350, "Sex": "FEMALE" }, { "Species": "Adelie", "Island": "Torgersen", "Beak Length (mm)": 41.1, "Beak Depth (mm)": 18.6, "Flipper Length (mm)": 189, "Body Mass (g)": 3325, "Sex": "MALE" }, { "Species": "Adelie", "Island": "Torgersen", "Beak Length (mm)": 36.2, "Beak Depth (mm)": 17.2, "Flipper Length (mm)": 187, "Body Mass (g)": 3150, "Sex": "FEMALE" }, { "Species": "Adelie", "Island": "Torgersen", "Beak Length (mm)": 37.7, "Beak Depth (mm)": 19.8, "Flipper Length (mm)": 198, "Body Mass (g)": 3500, "Sex": "MALE" }, { "Species": "Adelie", "Island": "Torgersen", "Beak Length (mm)": 40.2, "Beak Depth (mm)": 17, "Flipper Length (mm)": 176, "Body Mass (g)": 3450, "Sex": "FEMALE" }, { "Species": "Adelie", "Island": "Torgersen", "Beak Length (mm)": 41.4, "Beak Depth (mm)": 18.5, "Flipper Length (mm)": 202, "Body Mass (g)": 3875, "Sex": "MALE" }, { "Species": "Adelie", "Island": "Torgersen", "Beak Length (mm)": 35.2, "Beak Depth (mm)": 15.9, "Flipper Length (mm)": 186, "Body Mass (g)": 3050, "Sex": "FEMALE" }, { "Species": "Adelie", "Island": "Torgersen", "Beak Length (mm)": 40.6, "Beak Depth (mm)": 19, "Flipper Length (mm)": 199, "Body Mass (g)": 4000, "Sex": "MALE" }, { "Species": "Adelie", "Island": "Torgersen", "Beak Length (mm)": 38.8, "Beak Depth (mm)": 17.6, "Flipper Length (mm)": 191, "Body Mass (g)": 3275, "Sex": "FEMALE" }, { "Species": "Adelie", "Island": "Torgersen", "Beak Length (mm)": 41.5, "Beak Depth (mm)": 18.3, "Flipper Length (mm)": 195, "Body Mass (g)": 4300, "Sex": "MALE" }, { "Species": "Adelie", "Island": "Torgersen", "Beak Length (mm)": 39, "Beak Depth (mm)": 17.1, "Flipper Length (mm)": 191, "Body Mass (g)": 3050, "Sex": "FEMALE" }, { "Species": "Adelie", "Island": "Torgersen", "Beak Length (mm)": 44.1, "Beak Depth (mm)": 18, "Flipper Length (mm)": 210, "Body Mass (g)": 4000, "Sex": "MALE" }, { "Species": "Adelie", "Island": "Torgersen", "Beak Length (mm)": 38.5, "Beak Depth (mm)": 17.9, "Flipper Length (mm)": 190, "Body Mass (g)": 3325, "Sex": "FEMALE" }, { "Species": "Adelie", "Island": "Torgersen", "Beak Length (mm)": 43.1, "Beak Depth (mm)": 19.2, "Flipper Length (mm)": 197, "Body Mass (g)": 3500, "Sex": "MALE" }, { "Species": "Adelie", "Island": "Dream", "Beak Length (mm)": 36.8, "Beak Depth (mm)": 18.5, "Flipper Length (mm)": 193, "Body Mass (g)": 3500, "Sex": "FEMALE" }, { "Species": "Adelie", "Island": "Dream", "Beak Length (mm)": 37.5, "Beak Depth (mm)": 18.5, "Flipper Length (mm)": 199, "Body Mass (g)": 4475, "Sex": "MALE" }, { "Species": "Adelie", "Island": "Dream", "Beak Length (mm)": 38.1, "Beak Depth (mm)": 17.6, "Flipper Length (mm)": 187, "Body Mass (g)": 3425, "Sex": "FEMALE" }, { "Species": "Adelie", "Island": "Dream", "Beak Length (mm)": 41.1, "Beak Depth (mm)": 17.5, "Flipper Length (mm)": 190, "Body Mass (g)": 3900, "Sex": "MALE" }, { "Species": "Adelie", "Island": "Dream", "Beak Length (mm)": 35.6, "Beak Depth (mm)": 17.5, "Flipper Length (mm)": 191, "Body Mass (g)": 3175, "Sex": "FEMALE" }, { "Species": "Adelie", "Island": "Dream", "Beak Length (mm)": 40.2, "Beak Depth (mm)": 20.1, "Flipper Length (mm)": 200, "Body Mass (g)": 3975, "Sex": "MALE" }, { "Species": "Adelie", "Island": "Dream", "Beak Length (mm)": 37, "Beak Depth (mm)": 16.5, "Flipper Length (mm)": 185, "Body Mass (g)": 3400, "Sex": "FEMALE" }, { "Species": "Adelie", "Island": "Dream", "Beak Length (mm)": 39.7, "Beak Depth (mm)": 17.9, "Flipper Length (mm)": 193, "Body Mass (g)": 4250, "Sex": "MALE" }, { "Species": "Adelie", "Island": "Dream", "Beak Length (mm)": 40.2, "Beak Depth (mm)": 17.1, "Flipper Length (mm)": 193, "Body Mass (g)": 3400, "Sex": "FEMALE" }, { "Species": "Adelie", "Island": "Dream", "Beak Length (mm)": 40.6, "Beak Depth (mm)": 17.2, "Flipper Length (mm)": 187, "Body Mass (g)": 3475, "Sex": "MALE" }, { "Species": "Adelie", "Island": "Dream", "Beak Length (mm)": 32.1, "Beak Depth (mm)": 15.5, "Flipper Length (mm)": 188, "Body Mass (g)": 3050, "Sex": "FEMALE" }, { "Species": "Adelie", "Island": "Dream", "Beak Length (mm)": 40.7, "Beak Depth (mm)": 17, "Flipper Length (mm)": 190, "Body Mass (g)": 3725, "Sex": "MALE" }, { "Species": "Adelie", "Island": "Dream", "Beak Length (mm)": 37.3, "Beak Depth (mm)": 16.8, "Flipper Length (mm)": 192, "Body Mass (g)": 3000, "Sex": "FEMALE" }, { "Species": "Adelie", "Island": "Dream", "Beak Length (mm)": 39, "Beak Depth (mm)": 18.7, "Flipper Length (mm)": 185, "Body Mass (g)": 3650, "Sex": "MALE" }, { "Species": "Adelie", "Island": "Dream", "Beak Length (mm)": 39.2, "Beak Depth (mm)": 18.6, "Flipper Length (mm)": 190, "Body Mass (g)": 4250, "Sex": "MALE" }, { "Species": "Adelie", "Island": "Dream", "Beak Length (mm)": 36.6, "Beak Depth (mm)": 18.4, "Flipper Length (mm)": 184, "Body Mass (g)": 3475, "Sex": "FEMALE" }, { "Species": "Adelie", "Island": "Dream", "Beak Length (mm)": 36, "Beak Depth (mm)": 17.8, "Flipper Length (mm)": 195, "Body Mass (g)": 3450, "Sex": "FEMALE" }, { "Species": "Adelie", "Island": "Dream", "Beak Length (mm)": 37.8, "Beak Depth (mm)": 18.1, "Flipper Length (mm)": 193, "Body Mass (g)": 3750, "Sex": "MALE" }, { "Species": "Adelie", "Island": "Dream", "Beak Length (mm)": 36, "Beak Depth (mm)": 17.1, "Flipper Length (mm)": 187, "Body Mass (g)": 3700, "Sex": "FEMALE" }, { "Species": "Adelie", "Island": "Dream", "Beak Length (mm)": 41.5, "Beak Depth (mm)": 18.5, "Flipper Length (mm)": 201, "Body Mass (g)": 4000, "Sex": "MALE" }, { "Species": "Chinstrap", "Island": "Dream", "Beak Length (mm)": 46.5, "Beak Depth (mm)": 17.9, "Flipper Length (mm)": 192, "Body Mass (g)": 3500, "Sex": "FEMALE" }, { "Species": "Chinstrap", "Island": "Dream", "Beak Length (mm)": 50, "Beak Depth (mm)": 19.5, "Flipper Length (mm)": 196, "Body Mass (g)": 3900, "Sex": "MALE" }, { "Species": "Chinstrap", "Island": "Dream", "Beak Length (mm)": 51.3, "Beak Depth (mm)": 19.2, "Flipper Length (mm)": 193, "Body Mass (g)": 3650, "Sex": "MALE" }, { "Species": "Chinstrap", "Island": "Dream", "Beak Length (mm)": 45.4, "Beak Depth (mm)": 18.7, "Flipper Length (mm)": 188, "Body Mass (g)": 3525, "Sex": "FEMALE" }, { "Species": "Chinstrap", "Island": "Dream", "Beak Length (mm)": 52.7, "Beak Depth (mm)": 19.8, "Flipper Length (mm)": 197, "Body Mass (g)": 3725, "Sex": "MALE" }, { "Species": "Chinstrap", "Island": "Dream", "Beak Length (mm)": 45.2, "Beak Depth (mm)": 17.8, "Flipper Length (mm)": 198, "Body Mass (g)": 3950, "Sex": "FEMALE" }, { "Species": "Chinstrap", "Island": "Dream", "Beak Length (mm)": 46.1, "Beak Depth (mm)": 18.2, "Flipper Length (mm)": 178, "Body Mass (g)": 3250, "Sex": "FEMALE" }, { "Species": "Chinstrap", "Island": "Dream", "Beak Length (mm)": 51.3, "Beak Depth (mm)": 18.2, "Flipper Length (mm)": 197, "Body Mass (g)": 3750, "Sex": "MALE" }, { "Species": "Chinstrap", "Island": "Dream", "Beak Length (mm)": 46, "Beak Depth (mm)": 18.9, "Flipper Length (mm)": 195, "Body Mass (g)": 4150, "Sex": "FEMALE" }, { "Species": "Chinstrap", "Island": "Dream", "Beak Length (mm)": 51.3, "Beak Depth (mm)": 19.9, "Flipper Length (mm)": 198, "Body Mass (g)": 3700, "Sex": "MALE" }, { "Species": "Chinstrap", "Island": "Dream", "Beak Length (mm)": 46.6, "Beak Depth (mm)": 17.8, "Flipper Length (mm)": 193, "Body Mass (g)": 3800, "Sex": "FEMALE" }, { "Species": "Chinstrap", "Island": "Dream", "Beak Length (mm)": 51.7, "Beak Depth (mm)": 20.3, "Flipper Length (mm)": 194, "Body Mass (g)": 3775, "Sex": "MALE" }, { "Species": "Chinstrap", "Island": "Dream", "Beak Length (mm)": 47, "Beak Depth (mm)": 17.3, "Flipper Length (mm)": 185, "Body Mass (g)": 3700, "Sex": "FEMALE" }, { "Species": "Chinstrap", "Island": "Dream", "Beak Length (mm)": 52, "Beak Depth (mm)": 18.1, "Flipper Length (mm)": 201, "Body Mass (g)": 4050, "Sex": "MALE" }, { "Species": "Chinstrap", "Island": "Dream", "Beak Length (mm)": 45.9, "Beak Depth (mm)": 17.1, "Flipper Length (mm)": 190, "Body Mass (g)": 3575, "Sex": "FEMALE" }, { "Species": "Chinstrap", "Island": "Dream", "Beak Length (mm)": 50.5, "Beak Depth (mm)": 19.6, "Flipper Length (mm)": 201, "Body Mass (g)": 4050, "Sex": "MALE" }, { "Species": "Chinstrap", "Island": "Dream", "Beak Length (mm)": 50.3, "Beak Depth (mm)": 20, "Flipper Length (mm)": 197, "Body Mass (g)": 3300, "Sex": "MALE" }, { "Species": "Chinstrap", "Island": "Dream", "Beak Length (mm)": 58, "Beak Depth (mm)": 17.8, "Flipper Length (mm)": 181, "Body Mass (g)": 3700, "Sex": "FEMALE" }, { "Species": "Chinstrap", "Island": "Dream", "Beak Length (mm)": 46.4, "Beak Depth (mm)": 18.6, "Flipper Length (mm)": 190, "Body Mass (g)": 3450, "Sex": "FEMALE" }, { "Species": "Chinstrap", "Island": "Dream", "Beak Length (mm)": 49.2, "Beak Depth (mm)": 18.2, "Flipper Length (mm)": 195, "Body Mass (g)": 4400, "Sex": "MALE" }, { "Species": "Chinstrap", "Island": "Dream", "Beak Length (mm)": 42.4, "Beak Depth (mm)": 17.3, "Flipper Length (mm)": 181, "Body Mass (g)": 3600, "Sex": "FEMALE" }, { "Species": "Chinstrap", "Island": "Dream", "Beak Length (mm)": 48.5, "Beak Depth (mm)": 17.5, "Flipper Length (mm)": 191, "Body Mass (g)": 3400, "Sex": "MALE" }, { "Species": "Chinstrap", "Island": "Dream", "Beak Length (mm)": 43.2, "Beak Depth (mm)": 16.6, "Flipper Length (mm)": 187, "Body Mass (g)": 2900, "Sex": "FEMALE" }, { "Species": "Chinstrap", "Island": "Dream", "Beak Length (mm)": 50.6, "Beak Depth (mm)": 19.4, "Flipper Length (mm)": 193, "Body Mass (g)": 3800, "Sex": "MALE" }, { "Species": "Chinstrap", "Island": "Dream", "Beak Length (mm)": 46.7, "Beak Depth (mm)": 17.9, "Flipper Length (mm)": 195, "Body Mass (g)": 3300, "Sex": "FEMALE" }, { "Species": "Chinstrap", "Island": "Dream", "Beak Length (mm)": 52, "Beak Depth (mm)": 19, "Flipper Length (mm)": 197, "Body Mass (g)": 4150, "Sex": "MALE" }, { "Species": "Chinstrap", "Island": "Dream", "Beak Length (mm)": 50.5, "Beak Depth (mm)": 18.4, "Flipper Length (mm)": 200, "Body Mass (g)": 3400, "Sex": "FEMALE" }, { "Species": "Chinstrap", "Island": "Dream", "Beak Length (mm)": 49.5, "Beak Depth (mm)": 19, "Flipper Length (mm)": 200, "Body Mass (g)": 3800, "Sex": "MALE" }, { "Species": "Chinstrap", "Island": "Dream", "Beak Length (mm)": 46.4, "Beak Depth (mm)": 17.8, "Flipper Length (mm)": 191, "Body Mass (g)": 3700, "Sex": "FEMALE" }, { "Species": "Chinstrap", "Island": "Dream", "Beak Length (mm)": 52.8, "Beak Depth (mm)": 20, "Flipper Length (mm)": 205, "Body Mass (g)": 4550, "Sex": "MALE" }, { "Species": "Chinstrap", "Island": "Dream", "Beak Length (mm)": 40.9, "Beak Depth (mm)": 16.6, "Flipper Length (mm)": 187, "Body Mass (g)": 3200, "Sex": "FEMALE" }, { "Species": "Chinstrap", "Island": "Dream", "Beak Length (mm)": 54.2, "Beak Depth (mm)": 20.8, "Flipper Length (mm)": 201, "Body Mass (g)": 4300, "Sex": "MALE" }, { "Species": "Chinstrap", "Island": "Dream", "Beak Length (mm)": 42.5, "Beak Depth (mm)": 16.7, "Flipper Length (mm)": 187, "Body Mass (g)": 3350, "Sex": "FEMALE" }, { "Species": "Chinstrap", "Island": "Dream", "Beak Length (mm)": 51, "Beak Depth (mm)": 18.8, "Flipper Length (mm)": 203, "Body Mass (g)": 4100, "Sex": "MALE" }, { "Species": "Chinstrap", "Island": "Dream", "Beak Length (mm)": 49.7, "Beak Depth (mm)": 18.6, "Flipper Length (mm)": 195, "Body Mass (g)": 3600, "Sex": "MALE" }, { "Species": "Chinstrap", "Island": "Dream", "Beak Length (mm)": 47.5, "Beak Depth (mm)": 16.8, "Flipper Length (mm)": 199, "Body Mass (g)": 3900, "Sex": "FEMALE" }, { "Species": "Chinstrap", "Island": "Dream", "Beak Length (mm)": 47.6, "Beak Depth (mm)": 18.3, "Flipper Length (mm)": 195, "Body Mass (g)": 3850, "Sex": "FEMALE" }, { "Species": "Chinstrap", "Island": "Dream", "Beak Length (mm)": 52, "Beak Depth (mm)": 20.7, "Flipper Length (mm)": 210, "Body Mass (g)": 4800, "Sex": "MALE" }, { "Species": "Chinstrap", "Island": "Dream", "Beak Length (mm)": 46.9, "Beak Depth (mm)": 16.6, "Flipper Length (mm)": 192, "Body Mass (g)": 2700, "Sex": "FEMALE" }, { "Species": "Chinstrap", "Island": "Dream", "Beak Length (mm)": 53.5, "Beak Depth (mm)": 19.9, "Flipper Length (mm)": 205, "Body Mass (g)": 4500, "Sex": "MALE" }, { "Species": "Chinstrap", "Island": "Dream", "Beak Length (mm)": 49, "Beak Depth (mm)": 19.5, "Flipper Length (mm)": 210, "Body Mass (g)": 3950, "Sex": "MALE" }, { "Species": "Chinstrap", "Island": "Dream", "Beak Length (mm)": 46.2, "Beak Depth (mm)": 17.5, "Flipper Length (mm)": 187, "Body Mass (g)": 3650, "Sex": "FEMALE" }, { "Species": "Chinstrap", "Island": "Dream", "Beak Length (mm)": 50.9, "Beak Depth (mm)": 19.1, "Flipper Length (mm)": 196, "Body Mass (g)": 3550, "Sex": "MALE" }, { "Species": "Chinstrap", "Island": "Dream", "Beak Length (mm)": 45.5, "Beak Depth (mm)": 17, "Flipper Length (mm)": 196, "Body Mass (g)": 3500, "Sex": "FEMALE" }, { "Species": "Chinstrap", "Island": "Dream", "Beak Length (mm)": 50.9, "Beak Depth (mm)": 17.9, "Flipper Length (mm)": 196, "Body Mass (g)": 3675, "Sex": "FEMALE" }, { "Species": "Chinstrap", "Island": "Dream", "Beak Length (mm)": 50.8, "Beak Depth (mm)": 18.5, "Flipper Length (mm)": 201, "Body Mass (g)": 4450, "Sex": "MALE" }, { "Species": "Chinstrap", "Island": "Dream", "Beak Length (mm)": 50.1, "Beak Depth (mm)": 17.9, "Flipper Length (mm)": 190, "Body Mass (g)": 3400, "Sex": "FEMALE" }, { "Species": "Chinstrap", "Island": "Dream", "Beak Length (mm)": 49, "Beak Depth (mm)": 19.6, "Flipper Length (mm)": 212, "Body Mass (g)": 4300, "Sex": "MALE" }, { "Species": "Chinstrap", "Island": "Dream", "Beak Length (mm)": 51.5, "Beak Depth (mm)": 18.7, "Flipper Length (mm)": 187, "Body Mass (g)": 3250, "Sex": "MALE" }, { "Species": "Chinstrap", "Island": "Dream", "Beak Length (mm)": 49.8, "Beak Depth (mm)": 17.3, "Flipper Length (mm)": 198, "Body Mass (g)": 3675, "Sex": "FEMALE" }, { "Species": "Chinstrap", "Island": "Dream", "Beak Length (mm)": 48.1, "Beak Depth (mm)": 16.4, "Flipper Length (mm)": 199, "Body Mass (g)": 3325, "Sex": "FEMALE" }, { "Species": "Chinstrap", "Island": "Dream", "Beak Length (mm)": 51.4, "Beak Depth (mm)": 19, "Flipper Length (mm)": 201, "Body Mass (g)": 3950, "Sex": "MALE" }, { "Species": "Chinstrap", "Island": "Dream", "Beak Length (mm)": 45.7, "Beak Depth (mm)": 17.3, "Flipper Length (mm)": 193, "Body Mass (g)": 3600, "Sex": "FEMALE" }, { "Species": "Chinstrap", "Island": "Dream", "Beak Length (mm)": 50.7, "Beak Depth (mm)": 19.7, "Flipper Length (mm)": 203, "Body Mass (g)": 4050, "Sex": "MALE" }, { "Species": "Chinstrap", "Island": "Dream", "Beak Length (mm)": 42.5, "Beak Depth (mm)": 17.3, "Flipper Length (mm)": 187, "Body Mass (g)": 3350, "Sex": "FEMALE" }, { "Species": "Chinstrap", "Island": "Dream", "Beak Length (mm)": 52.2, "Beak Depth (mm)": 18.8, "Flipper Length (mm)": 197, "Body Mass (g)": 3450, "Sex": "MALE" }, { "Species": "Chinstrap", "Island": "Dream", "Beak Length (mm)": 45.2, "Beak Depth (mm)": 16.6, "Flipper Length (mm)": 191, "Body Mass (g)": 3250, "Sex": "FEMALE" }, { "Species": "Chinstrap", "Island": "Dream", "Beak Length (mm)": 49.3, "Beak Depth (mm)": 19.9, "Flipper Length (mm)": 203, "Body Mass (g)": 4050, "Sex": "MALE" }, { "Species": "Chinstrap", "Island": "Dream", "Beak Length (mm)": 50.2, "Beak Depth (mm)": 18.8, "Flipper Length (mm)": 202, "Body Mass (g)": 3800, "Sex": "MALE" }, { "Species": "Chinstrap", "Island": "Dream", "Beak Length (mm)": 45.6, "Beak Depth (mm)": 19.4, "Flipper Length (mm)": 194, "Body Mass (g)": 3525, "Sex": "FEMALE" }, { "Species": "Chinstrap", "Island": "Dream", "Beak Length (mm)": 51.9, "Beak Depth (mm)": 19.5, "Flipper Length (mm)": 206, "Body Mass (g)": 3950, "Sex": "MALE" }, { "Species": "Chinstrap", "Island": "Dream", "Beak Length (mm)": 46.8, "Beak Depth (mm)": 16.5, "Flipper Length (mm)": 189, "Body Mass (g)": 3650, "Sex": "FEMALE" }, { "Species": "Chinstrap", "Island": "Dream", "Beak Length (mm)": 45.7, "Beak Depth (mm)": 17, "Flipper Length (mm)": 195, "Body Mass (g)": 3650, "Sex": "FEMALE" }, { "Species": "Chinstrap", "Island": "Dream", "Beak Length (mm)": 55.8, "Beak Depth (mm)": 19.8, "Flipper Length (mm)": 207, "Body Mass (g)": 4000, "Sex": "MALE" }, { "Species": "Chinstrap", "Island": "Dream", "Beak Length (mm)": 43.5, "Beak Depth (mm)": 18.1, "Flipper Length (mm)": 202, "Body Mass (g)": 3400, "Sex": "FEMALE" }, { "Species": "Chinstrap", "Island": "Dream", "Beak Length (mm)": 49.6, "Beak Depth (mm)": 18.2, "Flipper Length (mm)": 193, "Body Mass (g)": 3775, "Sex": "MALE" }, { "Species": "Chinstrap", "Island": "Dream", "Beak Length (mm)": 50.8, "Beak Depth (mm)": 19, "Flipper Length (mm)": 210, "Body Mass (g)": 4100, "Sex": "MALE" }, { "Species": "Chinstrap", "Island": "Dream", "Beak Length (mm)": 50.2, "Beak Depth (mm)": 18.7, "Flipper Length (mm)": 198, "Body Mass (g)": 3775, "Sex": "FEMALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 46.1, "Beak Depth (mm)": 13.2, "Flipper Length (mm)": 211, "Body Mass (g)": 4500, "Sex": "FEMALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 50, "Beak Depth (mm)": 16.3, "Flipper Length (mm)": 230, "Body Mass (g)": 5700, "Sex": "MALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 48.7, "Beak Depth (mm)": 14.1, "Flipper Length (mm)": 210, "Body Mass (g)": 4450, "Sex": "FEMALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 50, "Beak Depth (mm)": 15.2, "Flipper Length (mm)": 218, "Body Mass (g)": 5700, "Sex": "MALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 47.6, "Beak Depth (mm)": 14.5, "Flipper Length (mm)": 215, "Body Mass (g)": 5400, "Sex": "MALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 46.5, "Beak Depth (mm)": 13.5, "Flipper Length (mm)": 210, "Body Mass (g)": 4550, "Sex": "FEMALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 45.4, "Beak Depth (mm)": 14.6, "Flipper Length (mm)": 211, "Body Mass (g)": 4800, "Sex": "FEMALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 46.7, "Beak Depth (mm)": 15.3, "Flipper Length (mm)": 219, "Body Mass (g)": 5200, "Sex": "MALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 43.3, "Beak Depth (mm)": 13.4, "Flipper Length (mm)": 209, "Body Mass (g)": 4400, "Sex": "FEMALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 46.8, "Beak Depth (mm)": 15.4, "Flipper Length (mm)": 215, "Body Mass (g)": 5150, "Sex": "MALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 40.9, "Beak Depth (mm)": 13.7, "Flipper Length (mm)": 214, "Body Mass (g)": 4650, "Sex": "FEMALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 49, "Beak Depth (mm)": 16.1, "Flipper Length (mm)": 216, "Body Mass (g)": 5550, "Sex": "MALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 45.5, "Beak Depth (mm)": 13.7, "Flipper Length (mm)": 214, "Body Mass (g)": 4650, "Sex": "FEMALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 48.4, "Beak Depth (mm)": 14.6, "Flipper Length (mm)": 213, "Body Mass (g)": 5850, "Sex": "MALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 45.8, "Beak Depth (mm)": 14.6, "Flipper Length (mm)": 210, "Body Mass (g)": 4200, "Sex": "FEMALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 49.3, "Beak Depth (mm)": 15.7, "Flipper Length (mm)": 217, "Body Mass (g)": 5850, "Sex": "MALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 42, "Beak Depth (mm)": 13.5, "Flipper Length (mm)": 210, "Body Mass (g)": 4150, "Sex": "FEMALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 49.2, "Beak Depth (mm)": 15.2, "Flipper Length (mm)": 221, "Body Mass (g)": 6300, "Sex": "MALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 46.2, "Beak Depth (mm)": 14.5, "Flipper Length (mm)": 209, "Body Mass (g)": 4800, "Sex": "FEMALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 48.7, "Beak Depth (mm)": 15.1, "Flipper Length (mm)": 222, "Body Mass (g)": 5350, "Sex": "MALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 50.2, "Beak Depth (mm)": 14.3, "Flipper Length (mm)": 218, "Body Mass (g)": 5700, "Sex": "MALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 45.1, "Beak Depth (mm)": 14.5, "Flipper Length (mm)": 215, "Body Mass (g)": 5000, "Sex": "FEMALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 46.5, "Beak Depth (mm)": 14.5, "Flipper Length (mm)": 213, "Body Mass (g)": 4400, "Sex": "FEMALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 46.3, "Beak Depth (mm)": 15.8, "Flipper Length (mm)": 215, "Body Mass (g)": 5050, "Sex": "MALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 42.9, "Beak Depth (mm)": 13.1, "Flipper Length (mm)": 215, "Body Mass (g)": 5000, "Sex": "FEMALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 46.1, "Beak Depth (mm)": 15.1, "Flipper Length (mm)": 215, "Body Mass (g)": 5100, "Sex": "MALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 44.5, "Beak Depth (mm)": 14.3, "Flipper Length (mm)": 216, "Body Mass (g)": 4100, "Sex": null }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 47.8, "Beak Depth (mm)": 15, "Flipper Length (mm)": 215, "Body Mass (g)": 5650, "Sex": "MALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 48.2, "Beak Depth (mm)": 14.3, "Flipper Length (mm)": 210, "Body Mass (g)": 4600, "Sex": "FEMALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 50, "Beak Depth (mm)": 15.3, "Flipper Length (mm)": 220, "Body Mass (g)": 5550, "Sex": "MALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 47.3, "Beak Depth (mm)": 15.3, "Flipper Length (mm)": 222, "Body Mass (g)": 5250, "Sex": "MALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 42.8, "Beak Depth (mm)": 14.2, "Flipper Length (mm)": 209, "Body Mass (g)": 4700, "Sex": "FEMALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 45.1, "Beak Depth (mm)": 14.5, "Flipper Length (mm)": 207, "Body Mass (g)": 5050, "Sex": "FEMALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 59.6, "Beak Depth (mm)": 17, "Flipper Length (mm)": 230, "Body Mass (g)": 6050, "Sex": "MALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 49.1, "Beak Depth (mm)": 14.8, "Flipper Length (mm)": 220, "Body Mass (g)": 5150, "Sex": "FEMALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 48.4, "Beak Depth (mm)": 16.3, "Flipper Length (mm)": 220, "Body Mass (g)": 5400, "Sex": "MALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 42.6, "Beak Depth (mm)": 13.7, "Flipper Length (mm)": 213, "Body Mass (g)": 4950, "Sex": "FEMALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 44.4, "Beak Depth (mm)": 17.3, "Flipper Length (mm)": 219, "Body Mass (g)": 5250, "Sex": "MALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 44, "Beak Depth (mm)": 13.6, "Flipper Length (mm)": 208, "Body Mass (g)": 4350, "Sex": "FEMALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 48.7, "Beak Depth (mm)": 15.7, "Flipper Length (mm)": 208, "Body Mass (g)": 5350, "Sex": "MALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 42.7, "Beak Depth (mm)": 13.7, "Flipper Length (mm)": 208, "Body Mass (g)": 3950, "Sex": "FEMALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 49.6, "Beak Depth (mm)": 16, "Flipper Length (mm)": 225, "Body Mass (g)": 5700, "Sex": "MALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 45.3, "Beak Depth (mm)": 13.7, "Flipper Length (mm)": 210, "Body Mass (g)": 4300, "Sex": "FEMALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 49.6, "Beak Depth (mm)": 15, "Flipper Length (mm)": 216, "Body Mass (g)": 4750, "Sex": "MALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 50.5, "Beak Depth (mm)": 15.9, "Flipper Length (mm)": 222, "Body Mass (g)": 5550, "Sex": "MALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 43.6, "Beak Depth (mm)": 13.9, "Flipper Length (mm)": 217, "Body Mass (g)": 4900, "Sex": "FEMALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 45.5, "Beak Depth (mm)": 13.9, "Flipper Length (mm)": 210, "Body Mass (g)": 4200, "Sex": "FEMALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 50.5, "Beak Depth (mm)": 15.9, "Flipper Length (mm)": 225, "Body Mass (g)": 5400, "Sex": "MALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 44.9, "Beak Depth (mm)": 13.3, "Flipper Length (mm)": 213, "Body Mass (g)": 5100, "Sex": "FEMALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 45.2, "Beak Depth (mm)": 15.8, "Flipper Length (mm)": 215, "Body Mass (g)": 5300, "Sex": "MALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 46.6, "Beak Depth (mm)": 14.2, "Flipper Length (mm)": 210, "Body Mass (g)": 4850, "Sex": "FEMALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 48.5, "Beak Depth (mm)": 14.1, "Flipper Length (mm)": 220, "Body Mass (g)": 5300, "Sex": "MALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 45.1, "Beak Depth (mm)": 14.4, "Flipper Length (mm)": 210, "Body Mass (g)": 4400, "Sex": "FEMALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 50.1, "Beak Depth (mm)": 15, "Flipper Length (mm)": 225, "Body Mass (g)": 5000, "Sex": "MALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 46.5, "Beak Depth (mm)": 14.4, "Flipper Length (mm)": 217, "Body Mass (g)": 4900, "Sex": "FEMALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 45, "Beak Depth (mm)": 15.4, "Flipper Length (mm)": 220, "Body Mass (g)": 5050, "Sex": "MALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 43.8, "Beak Depth (mm)": 13.9, "Flipper Length (mm)": 208, "Body Mass (g)": 4300, "Sex": "FEMALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 45.5, "Beak Depth (mm)": 15, "Flipper Length (mm)": 220, "Body Mass (g)": 5000, "Sex": "MALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 43.2, "Beak Depth (mm)": 14.5, "Flipper Length (mm)": 208, "Body Mass (g)": 4450, "Sex": "FEMALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 50.4, "Beak Depth (mm)": 15.3, "Flipper Length (mm)": 224, "Body Mass (g)": 5550, "Sex": "MALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 45.3, "Beak Depth (mm)": 13.8, "Flipper Length (mm)": 208, "Body Mass (g)": 4200, "Sex": "FEMALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 46.2, "Beak Depth (mm)": 14.9, "Flipper Length (mm)": 221, "Body Mass (g)": 5300, "Sex": "MALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 45.7, "Beak Depth (mm)": 13.9, "Flipper Length (mm)": 214, "Body Mass (g)": 4400, "Sex": "FEMALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 54.3, "Beak Depth (mm)": 15.7, "Flipper Length (mm)": 231, "Body Mass (g)": 5650, "Sex": "MALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 45.8, "Beak Depth (mm)": 14.2, "Flipper Length (mm)": 219, "Body Mass (g)": 4700, "Sex": "FEMALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 49.8, "Beak Depth (mm)": 16.8, "Flipper Length (mm)": 230, "Body Mass (g)": 5700, "Sex": "MALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 46.2, "Beak Depth (mm)": 14.4, "Flipper Length (mm)": 214, "Body Mass (g)": 4650, "Sex": null }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 49.5, "Beak Depth (mm)": 16.2, "Flipper Length (mm)": 229, "Body Mass (g)": 5800, "Sex": "MALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 43.5, "Beak Depth (mm)": 14.2, "Flipper Length (mm)": 220, "Body Mass (g)": 4700, "Sex": "FEMALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 50.7, "Beak Depth (mm)": 15, "Flipper Length (mm)": 223, "Body Mass (g)": 5550, "Sex": "MALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 47.7, "Beak Depth (mm)": 15, "Flipper Length (mm)": 216, "Body Mass (g)": 4750, "Sex": "FEMALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 46.4, "Beak Depth (mm)": 15.6, "Flipper Length (mm)": 221, "Body Mass (g)": 5000, "Sex": "MALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 48.2, "Beak Depth (mm)": 15.6, "Flipper Length (mm)": 221, "Body Mass (g)": 5100, "Sex": "MALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 46.5, "Beak Depth (mm)": 14.8, "Flipper Length (mm)": 217, "Body Mass (g)": 5200, "Sex": "FEMALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 46.4, "Beak Depth (mm)": 15, "Flipper Length (mm)": 216, "Body Mass (g)": 4700, "Sex": "FEMALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 48.6, "Beak Depth (mm)": 16, "Flipper Length (mm)": 230, "Body Mass (g)": 5800, "Sex": "MALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 47.5, "Beak Depth (mm)": 14.2, "Flipper Length (mm)": 209, "Body Mass (g)": 4600, "Sex": "FEMALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 51.1, "Beak Depth (mm)": 16.3, "Flipper Length (mm)": 220, "Body Mass (g)": 6000, "Sex": "MALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 45.2, "Beak Depth (mm)": 13.8, "Flipper Length (mm)": 215, "Body Mass (g)": 4750, "Sex": "FEMALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 45.2, "Beak Depth (mm)": 16.4, "Flipper Length (mm)": 223, "Body Mass (g)": 5950, "Sex": "MALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 49.1, "Beak Depth (mm)": 14.5, "Flipper Length (mm)": 212, "Body Mass (g)": 4625, "Sex": "FEMALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 52.5, "Beak Depth (mm)": 15.6, "Flipper Length (mm)": 221, "Body Mass (g)": 5450, "Sex": "MALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 47.4, "Beak Depth (mm)": 14.6, "Flipper Length (mm)": 212, "Body Mass (g)": 4725, "Sex": "FEMALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 50, "Beak Depth (mm)": 15.9, "Flipper Length (mm)": 224, "Body Mass (g)": 5350, "Sex": "MALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 44.9, "Beak Depth (mm)": 13.8, "Flipper Length (mm)": 212, "Body Mass (g)": 4750, "Sex": "FEMALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 50.8, "Beak Depth (mm)": 17.3, "Flipper Length (mm)": 228, "Body Mass (g)": 5600, "Sex": "MALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 43.4, "Beak Depth (mm)": 14.4, "Flipper Length (mm)": 218, "Body Mass (g)": 4600, "Sex": "FEMALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 51.3, "Beak Depth (mm)": 14.2, "Flipper Length (mm)": 218, "Body Mass (g)": 5300, "Sex": "MALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 47.5, "Beak Depth (mm)": 14, "Flipper Length (mm)": 212, "Body Mass (g)": 4875, "Sex": "FEMALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 52.1, "Beak Depth (mm)": 17, "Flipper Length (mm)": 230, "Body Mass (g)": 5550, "Sex": "MALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 47.5, "Beak Depth (mm)": 15, "Flipper Length (mm)": 218, "Body Mass (g)": 4950, "Sex": "FEMALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 52.2, "Beak Depth (mm)": 17.1, "Flipper Length (mm)": 228, "Body Mass (g)": 5400, "Sex": "MALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 45.5, "Beak Depth (mm)": 14.5, "Flipper Length (mm)": 212, "Body Mass (g)": 4750, "Sex": "FEMALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 49.5, "Beak Depth (mm)": 16.1, "Flipper Length (mm)": 224, "Body Mass (g)": 5650, "Sex": "MALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 44.5, "Beak Depth (mm)": 14.7, "Flipper Length (mm)": 214, "Body Mass (g)": 4850, "Sex": "FEMALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 50.8, "Beak Depth (mm)": 15.7, "Flipper Length (mm)": 226, "Body Mass (g)": 5200, "Sex": "MALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 49.4, "Beak Depth (mm)": 15.8, "Flipper Length (mm)": 216, "Body Mass (g)": 4925, "Sex": "MALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 46.9, "Beak Depth (mm)": 14.6, "Flipper Length (mm)": 222, "Body Mass (g)": 4875, "Sex": "FEMALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 48.4, "Beak Depth (mm)": 14.4, "Flipper Length (mm)": 203, "Body Mass (g)": 4625, "Sex": "FEMALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 51.1, "Beak Depth (mm)": 16.5, "Flipper Length (mm)": 225, "Body Mass (g)": 5250, "Sex": "MALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 48.5, "Beak Depth (mm)": 15, "Flipper Length (mm)": 219, "Body Mass (g)": 4850, "Sex": "FEMALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 55.9, "Beak Depth (mm)": 17, "Flipper Length (mm)": 228, "Body Mass (g)": 5600, "Sex": "MALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 47.2, "Beak Depth (mm)": 15.5, "Flipper Length (mm)": 215, "Body Mass (g)": 4975, "Sex": "FEMALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 49.1, "Beak Depth (mm)": 15, "Flipper Length (mm)": 228, "Body Mass (g)": 5500, "Sex": "MALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 47.3, "Beak Depth (mm)": 13.8, "Flipper Length (mm)": 216, "Body Mass (g)": 4725, "Sex": null }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 46.8, "Beak Depth (mm)": 16.1, "Flipper Length (mm)": 215, "Body Mass (g)": 5500, "Sex": "MALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 41.7, "Beak Depth (mm)": 14.7, "Flipper Length (mm)": 210, "Body Mass (g)": 4700, "Sex": "FEMALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 53.4, "Beak Depth (mm)": 15.8, "Flipper Length (mm)": 219, "Body Mass (g)": 5500, "Sex": "MALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 43.3, "Beak Depth (mm)": 14, "Flipper Length (mm)": 208, "Body Mass (g)": 4575, "Sex": "FEMALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 48.1, "Beak Depth (mm)": 15.1, "Flipper Length (mm)": 209, "Body Mass (g)": 5500, "Sex": "MALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 50.5, "Beak Depth (mm)": 15.2, "Flipper Length (mm)": 216, "Body Mass (g)": 5000, "Sex": "FEMALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 49.8, "Beak Depth (mm)": 15.9, "Flipper Length (mm)": 229, "Body Mass (g)": 5950, "Sex": "MALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 43.5, "Beak Depth (mm)": 15.2, "Flipper Length (mm)": 213, "Body Mass (g)": 4650, "Sex": "FEMALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 51.5, "Beak Depth (mm)": 16.3, "Flipper Length (mm)": 230, "Body Mass (g)": 5500, "Sex": "MALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 46.2, "Beak Depth (mm)": 14.1, "Flipper Length (mm)": 217, "Body Mass (g)": 4375, "Sex": "FEMALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 55.1, "Beak Depth (mm)": 16, "Flipper Length (mm)": 230, "Body Mass (g)": 5850, "Sex": "MALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 44.5, "Beak Depth (mm)": 15.7, "Flipper Length (mm)": 217, "Body Mass (g)": 4875, "Sex": "." }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 48.8, "Beak Depth (mm)": 16.2, "Flipper Length (mm)": 222, "Body Mass (g)": 6000, "Sex": "MALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 47.2, "Beak Depth (mm)": 13.7, "Flipper Length (mm)": 214, "Body Mass (g)": 4925, "Sex": "FEMALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": null, "Beak Depth (mm)": null, "Flipper Length (mm)": null, "Body Mass (g)": null, "Sex": null }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 46.8, "Beak Depth (mm)": 14.3, "Flipper Length (mm)": 215, "Body Mass (g)": 4850, "Sex": "FEMALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 50.4, "Beak Depth (mm)": 15.7, "Flipper Length (mm)": 222, "Body Mass (g)": 5750, "Sex": "MALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 45.2, "Beak Depth (mm)": 14.8, "Flipper Length (mm)": 212, "Body Mass (g)": 5200, "Sex": "FEMALE" }, { "Species": "Gentoo", "Island": "Biscoe", "Beak Length (mm)": 49.9, "Beak Depth (mm)": 16.1, "Flipper Length (mm)": 213, "Body Mass (g)": 5400, "Sex": "MALE" } ]usql-0.19.19/contrib/clickhouse/000077500000000000000000000000001476173253300164635ustar00rootroot00000000000000usql-0.19.19/contrib/clickhouse/podman-config000066400000000000000000000002641476173253300211310ustar00rootroot00000000000000NAME=clickhouse IMAGE=docker.io/clickhouse/clickhouse-server PUBLISH=9000:9000 ENV="CLICKHOUSE_DEFAULT_ACCESS_MANAGEMENT=1 CLICKHOUSE_USER=clickhouse CLICKHOUSE_PASSWORD=P4ssw0rd" usql-0.19.19/contrib/clickhouse/usql-config000066400000000000000000000001241476173253300206320ustar00rootroot00000000000000DB="clickhouse://clickhouse:P4ssw0rd@localhost" VSQL="select version() as version;" usql-0.19.19/contrib/cockroach/000077500000000000000000000000001476173253300162665ustar00rootroot00000000000000usql-0.19.19/contrib/cockroach/podman-config000066400000000000000000000002761476173253300207370ustar00rootroot00000000000000NAME=cockroach IMAGE=docker.io/cockroachdb/cockroach:latest PUBLISH=26257:26257 ENV="COCKROACH_DATABASE=cockroach COCKROACH_USER=cockroach COCKROACH_PASSWORD=P4ssw0rd" CMD=start-single-node usql-0.19.19/contrib/config.yaml000066400000000000000000000025311476173253300164640ustar00rootroot00000000000000--- # named connections connections: my_couchbase_conn: couchbase://Administrator:P4ssw0rd@localhost my_clickhouse_conn: clickhouse://clickhouse:P4ssw0rd@localhost css: cassandra://cassandra:cassandra@localhost fsl: flightsql://flight_username:P4ssw0rd@localhost gdr: protocol: godror username: system password: P4ssw0rd hostname: localhost port: 1521 database: free ign: ignite://ignite:ignite@localhost mss: sqlserver://sa:Adm1nP@ssw0rd@localhost mym: mysql://root:P4ssw0rd@localhost myz: mymysql://root:P4ssw0rd@localhost ora: oracle://system:P4ssw0rd@localhost/free ore: oracle://system:P4ssw0rd@localhost:1522/db1 pgs: postgres://postgres:P4ssw0rd@localhost pgx: pgx://postgres:P4ssw0rd@localhost vrt: proto: vertica user: vertica pass: vertica host: localhost sll: file: /path/to/mydb.sqlite3 mdc: modernsqlite:test.db dkd: test.duckdb zzz: ["databricks", "token:dapi*****@adb-*************.azuredatabricks.net:443/sql/protocolv1/o/*********/*******"] zz2: proto: mysql user: 'my username' pass: 'my password!' host: localhost opts: opt1: "😀" # startup script init: | \echo welcome to the jungle `date` \set SYNTAX_HL_STYLE paraiso-dark \set PROMPT1 '\033[32m%S%M%/%R%#\033[0m ' # charts path charts_path: charts # defined queries queries: q1: usql-0.19.19/contrib/couchbase/000077500000000000000000000000001476173253300162665ustar00rootroot00000000000000usql-0.19.19/contrib/couchbase/README.md000066400000000000000000000002631476173253300175460ustar00rootroot00000000000000# Couchbase Notes ```sh $ podman volume create couchbase-data ``` After running the docker image, browse to http://127.0.0.1:8091/ui/index.html and manually configure database. usql-0.19.19/contrib/couchbase/podman-config000066400000000000000000000001661476173253300207350ustar00rootroot00000000000000NAME=couchbase IMAGE=docker.io/library/couchbase PUBLISH=8091-8094:8091-8094 VOLUME=couchbase-data:/opt/couchbase/var usql-0.19.19/contrib/couchbase/usql-config000066400000000000000000000002571476173253300204440ustar00rootroot00000000000000# NOTE: this will only work after setting up a database on http://localhost:8091/ DB="couchbase://Administrator:P4ssw0rd@localhost" VSQL="select raw ds_version() as version;" usql-0.19.19/contrib/db2/000077500000000000000000000000001476173253300150015ustar00rootroot00000000000000usql-0.19.19/contrib/db2/README.md000066400000000000000000000011311476173253300162540ustar00rootroot00000000000000# db2 Notes 1. Install unixodbc: ```sh $ sudo aptitude install unixodbc unixodbc-bin unixodbc-dev ``` 2. Download `dsdriver` and install: ```sh $ ls ~/Downloads/ibm_data_server_driver_package_linuxx64_v11.5.tar.gz /home/ken/Downloads/ibm_data_server_driver_package_linuxx64_v11.5.tar.gz $ sudo ./install-dsdriver.sh ``` 3. Copy ODBC and CLI configs: ```sh $ cat odbcinst.ini | sudo tee -a /etc/odbcinst.ini $ sudo cp {db2cli.ini,db2dsdriver.cfg} /opt/db2/clidriver/cfg/ ``` 4. Run DB2 docker image: ```sh $ ../docker-run.sh db2 -u ``` 5. Verify DB2 working: ```sh $ ./db2cli-validate.sh ``` usql-0.19.19/contrib/db2/db2cli-validate.sh000077500000000000000000000010771476173253300202730ustar00rootroot00000000000000#!/bin/bash # see https://www.ibm.com/developerworks/community/blogs/ff78a96f-bf23-457e-befa-77f266844cbb/entry/db2cli_validate_command_line_tool_for_validating_and_testing_cli_environment_and_configuration?lang=en # see https://blogs.sas.com/content/sgf/2017/11/16/connecting-sas-db2-database-via-odbc-without-tears/ CLIDRIVER=${1:-/opt/db2/clidriver} export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:$CLIDRIVER/lib export DB2CLIINIPATH=$CLIDRIVER/cfg export DB2DSDRIVER_CFG_PATH=$CLIDRIVER/cfg $CLIDRIVER/bin/db2cli validate -dsn SAMPLE -connect -user db2inst1 -passwd P4ssw0rd usql-0.19.19/contrib/db2/db2cli.ini000066400000000000000000000001561476173253300166430ustar00rootroot00000000000000[sample] Database = testdb Protocol = TCPIP Hostname = localhost ServiceName = 50000 usql-0.19.19/contrib/db2/db2dsdriver.cfg000066400000000000000000000003661476173253300177010ustar00rootroot00000000000000 usql-0.19.19/contrib/db2/install-dsdriver.sh000077500000000000000000000013431476173253300206270ustar00rootroot00000000000000#!/bin/bash DEST=${1:-/opt/db2} FILE=$2 if [ ! -w $DEST ]; then echo "ERROR: not able to write to $DEST" exit 1 fi echo "DEST: $DEST" if [ ! -e "$DEST" ]; then echo "$DEST does not exist" exit 1 fi if [ -z "$FILE" ]; then FILE=$(ls $HOME/Downloads/ibm_data_server_driver_package_linuxx64_*.tar.gz||:) fi if [ -z "$FILE" ]; then echo "cannot find driver package to extract" exit 1 fi set -e USER=$(whoami) # extract pushd $DEST &> /dev/null echo "EXTRACTING: $FILE" # extract tar -zxf $FILE tar -zxf dsdriver/odbc_cli_driver/linuxamd64/ibm_data_server_driver_for_odbc_cli.tar.gz # fix permissions chown $USER:$USER -R . find ./ -type d -exec chmod 0755 {} \; find ./ -type d -exec chmod -s {} \; popd &> /dev/null usql-0.19.19/contrib/db2/odbcinst.ini000066400000000000000000000001431476173253300173050ustar00rootroot00000000000000[DB2] Description=DB2 driver Driver=/opt/db2/clidriver/lib/libdb2.so FileUsage = 1 DontDLClose = 1 usql-0.19.19/contrib/db2/podman-config000066400000000000000000000002631476173253300174460ustar00rootroot00000000000000NAME=db2 PUBLISH="50000:50000 55000:55000" IMAGE=docker.io/ibmcom/db2 ENV="LICENSE=accept DB2INSTANCE=db2inst1 DB2INST1_PASSWORD=P4ssw0rd DBNAME=testdb" VOLUME=db2-data:/database usql-0.19.19/contrib/db2/test.sql000066400000000000000000000007301476173253300165010ustar00rootroot00000000000000\connect odbc+db2://db2inst1:P4ssw0rd@localhost/testdb create schema test; create table test.mytable ( COL1 INTEGER NOT NULL, COL2 CHAR(25), COL3 VARCHAR(25) NOT NULL, COL4 DATE, COL5 DECIMAL(10,2), PRIMARY KEY (COL1), UNIQUE (COL3) ); insert into test.mytable (col1, col2, col3, col4, col5) values (1, 'a', 'first', current date, 15.0), (2, 'b', 'second', current date, 16.0), (3, 'c', 'third', current date, 17.0) ; select * from test.mytable; usql-0.19.19/contrib/db2/usql-config000066400000000000000000000001701476173253300171510ustar00rootroot00000000000000DB="odbc+db2://db2inst1:P4ssw0rd@localhost/testdb" VSQL="SELECT service_level AS version FROM sysibmadm.env_inst_info;" usql-0.19.19/contrib/duckdb/000077500000000000000000000000001476173253300155665ustar00rootroot00000000000000usql-0.19.19/contrib/duckdb/usql-config000066400000000000000000000001301476173253300177320ustar00rootroot00000000000000DB="duckdb:test.duckdb" VSQL="SELECT library_version AS version FROM pragma_version();" usql-0.19.19/contrib/exasol/000077500000000000000000000000001476173253300156255ustar00rootroot00000000000000usql-0.19.19/contrib/exasol/podman-config000066400000000000000000000000771476173253300202750ustar00rootroot00000000000000NAME=exasol IMAGE=docker.io/exasol/docker-db PUBLISH=8563:8563 usql-0.19.19/contrib/exasol/usql-config000066400000000000000000000002251476173253300177760ustar00rootroot00000000000000DB="exasol://sys:exasol@localhost/?encryption=0" VSQL="SELECT param_value AS version FROM exa_metadata WHERE param_name = 'databaseProductVersion';" usql-0.19.19/contrib/firebird/000077500000000000000000000000001476173253300161205ustar00rootroot00000000000000usql-0.19.19/contrib/firebird/podman-config000066400000000000000000000002311476173253300205600ustar00rootroot00000000000000NAME=firebird IMAGE=docker.io/jacobalberty/firebird PUBLISH=3050:3050 ENV="FIREBIRD_DATABASE=booktest FIREBIRD_USER=booktest FIREBIRD_PASSWORD=booktest" usql-0.19.19/contrib/firebird/usql-config000066400000000000000000000002171476173253300202720ustar00rootroot00000000000000DB="firebird://booktest:booktest@localhost/booktest" VSQL="SELECT rdb\$get_context('SYSTEM', 'ENGINE_VERSION') AS version FROM rdb\$database;" usql-0.19.19/contrib/flightsql/000077500000000000000000000000001476173253300163275ustar00rootroot00000000000000usql-0.19.19/contrib/flightsql/podman-config000066400000000000000000000001511476173253300207700ustar00rootroot00000000000000NAME=flightsql IMAGE=docker.io/voltrondata/flight-sql PUBLISH=31337:31337 ENV="FLIGHT_PASSWORD=P4ssw0rd" usql-0.19.19/contrib/flightsql/usql-config000066400000000000000000000001561476173253300205030ustar00rootroot00000000000000DB="flightsql://flight_username:P4ssw0rd@localhost:31337?tls=skip-verify" VSQL="SELECT version() AS version;" usql-0.19.19/contrib/go-setup.sh000077500000000000000000000033651476173253300164430ustar00rootroot00000000000000#!/bin/bash # trimmed down version of: # https://github.com/kenshaw/shell-config/blob/master/scripts/go-setup.sh ARCH=$(uname -m) PLATFORM=linux case $ARCH in aarch64) ARCH=arm64 ;; x86_64) ARCH=amd64 ;; esac REPO=https://go.googlesource.com/go DL=https://go.dev/dl/ EXT=tar.gz DEST=/usr/local set -e LATEST=$(curl -4 -s "$DL"|sed -E -n "/go1\.[0-9]+(\.[0-9]+)?\.$PLATFORM-$ARCH\.$EXT(.+?)<\/a.*/\1/' <<< "$LATEST") STABLE=$(sed -E -e 's/^go//' -e "s/\.$PLATFORM-$ARCH\.$EXT$//" <<< "$ARCHIVE") if ! [[ "$STABLE" =~ ^1\.[0-9\.]+$ ]]; then echo "ERROR: unable to retrieve latest Go version for $PLATFORM/$ARCH ($STABLE)" exit 1 fi REMOTE=$(sed -E -e 's/.* $2" curl -4 -L -# -o $2 $1 } # extract WORKDIR=$(mktemp -d /tmp/go-setup.XXXX) grab $REMOTE $WORKDIR/$ARCHIVE echo "USING: $WORKDIR/$ARCHIVE" pushd $WORKDIR &> /dev/null case $EXT in tar.gz) tar -zxf $ARCHIVE ;; zip) unzip -q $ARCHIVE ;; *) echo "ERROR: unknown extension $EXT" exit ;; esac echo "MOVING: $WORKDIR/go -> $DEST/go" mv go $DEST/go chown -R root:root $DEST/go echo "INSTALLED: $($DEST/go/bin/go version)" usql-0.19.19/contrib/godror/000077500000000000000000000000001476173253300156265ustar00rootroot00000000000000usql-0.19.19/contrib/godror/fix-oob-config.sh000077500000000000000000000003741476173253300207770ustar00rootroot00000000000000#!/bin/bash # adds DISABLE_OOB=on to user's .sqlnet.ora config # # See: # https://github.com/oracle/docker-images/issues/1352 # https://franckpachot.medium.com/19c-instant-client-and-docker-1566630ab20e echo "DISABLE_OOB=ON" >> $HOME/.sqlnet.ora usql-0.19.19/contrib/godror/grab-instantclient.sh000077500000000000000000000047511476173253300217640ustar00rootroot00000000000000#!/bin/bash DEST=${1:-/opt/oracle} # available versions: # 21.7.0.0.0 # 21.6.0.0.0 # 21.1.0.0.0 # 19.9.0.0.0 # 18.5.0.0.0 # 12.2.0.1.0 VERSION= OPTIND=1 while getopts "v:" opt; do case "$opt" in v) VERSION=$OPTARG ;; esac done if [ -z "$VERSION" ]; then VERSION=$( wget --quiet -O- https://www.oracle.com/database/technologies/instant-client/linux-x86-64-downloads.html| \ sed -n -e 's/.*\/instantclient-basic-linux\.x64-\([^d]\+\)dbru\.zip.*/\1/p' | \ head -1 ) fi if [[ ! "$VERSION" =~ ^[0-9\.]+$ ]]; then echo "error: invalid VERSION" exit 1 fi BASE=https://download.oracle.com/otn_software/linux/instantclient/$(sed -e 's/[^0-9]//g' <<< "$VERSION") # build list of archives to retrieve declare -a ARCHIVES for i in basic sdk sqlplus; do ARCHIVES+=("$BASE/instantclient-$i-linux.x64-${VERSION}dbru.zip") done grab() { echo -n "RETRIEVING: $1 -> $2 " wget --progress=dot -O $2 $1 2>&1 |\ grep --line-buffered "%" | \ sed -u -e "s,\.,,g" | \ awk '{printf("\b\b\b\b%4s", $2)}' echo -ne "\b\b\b\b" echo " DONE." } cache() { FILE=$(basename $2) if [ ! -f $1/$FILE ]; then grab $2 $1/$FILE fi } set -e echo "DEST: $DEST" if [ ! -w $DEST ]; then echo "$DEST is not writable!" exit 1 fi if [ ! -e "$DEST" ]; then echo "$DEST does not exist!" exit 1 fi # retrieve archives for i in ${ARCHIVES[@]}; do cache $DEST $i done # remove existing directory, if any DVER=$(awk -F. '{print $1 "_" $2}' <<< "$VERSION") if [ -e $DEST/instantclient_$DVER ]; then echo "REMOVING: $DEST/instantclient_$DVER" rm -rf $DEST/instantclient_$DVER fi # extract pushd $DEST &> /dev/null for i in ${ARCHIVES[@]}; do unzip -qq $(basename $i) done popd &> /dev/null # write pkg-config file DATA=$(cat < $DEST/oci8.pc rm -f /etc/ld.so.conf.d/oracle-instantclient.conf echo "$DEST/instantclient_$DVER" | tee -a /etc/ld.so.conf.d/oracle-instantclient.conf ldconfig -v # write sqlnet.ora DATA=$(cat < $DEST/instantclient_${DVER}/network/admin/sqlnet.ora usql-0.19.19/contrib/godror/usql-config000066400000000000000000000001261476173253300177770ustar00rootroot00000000000000DB="godror://system:P4ssw0rd@localhost/orasid" VSQL="SELECT version FROM v\$instance" usql-0.19.19/contrib/h2/000077500000000000000000000000001476173253300146435ustar00rootroot00000000000000usql-0.19.19/contrib/h2/podman-config000066400000000000000000000001101476173253300172770ustar00rootroot00000000000000NAME=h2 IMAGE=docker.io/buildo/h2database PUBLISH="8082:8082 9092:9092" usql-0.19.19/contrib/hive/000077500000000000000000000000001476173253300152655ustar00rootroot00000000000000usql-0.19.19/contrib/hive/podman-config000066400000000000000000000001641476173253300177320ustar00rootroot00000000000000NAME=hive IMAGE=docker.io/apache/hive:4.0.0-beta-1 PUBLISH="10000:10000 10002:10002" ENV="SERVICE_NAME=hiveserver2" usql-0.19.19/contrib/hive/usql-config000066400000000000000000000001041476173253300174320ustar00rootroot00000000000000DB="hive://user:pass@localhost" VSQL="SELECT version() AS version;" usql-0.19.19/contrib/ignite/000077500000000000000000000000001476173253300156115ustar00rootroot00000000000000usql-0.19.19/contrib/ignite/README.md000066400000000000000000000001331476173253300170650ustar00rootroot00000000000000# Ignite Notes After starting the database, run `activate.sh`: ```sh $ ./activate.sh ``` usql-0.19.19/contrib/ignite/activate.sh000077500000000000000000000002101476173253300177410ustar00rootroot00000000000000#!/bin/bash docker exec -it ignite \ /opt/ignite/apache-ignite/bin/control.sh \ --activate \ --user ignite \ --password ignite usql-0.19.19/contrib/ignite/podman-config000066400000000000000000000001111476173253300202460ustar00rootroot00000000000000NAME=ignite IMAGE=docker.io/usql/ignite PUBLISH=10800:10800 NETWORK=host usql-0.19.19/contrib/ignite/usql-config000066400000000000000000000000601476173253300177570ustar00rootroot00000000000000DB="ignite://ignite:ignite@localhost/ExampleDB" usql-0.19.19/contrib/mymysql/000077500000000000000000000000001476173253300160455ustar00rootroot00000000000000usql-0.19.19/contrib/mymysql/usql-config000066400000000000000000000001141476173253300202130ustar00rootroot00000000000000DB="mymysql://root:P4ssw0rd@localhost/" VSQL="SELECT version() AS version;" usql-0.19.19/contrib/mysql/000077500000000000000000000000001476173253300154775ustar00rootroot00000000000000usql-0.19.19/contrib/mysql/podman-config000066400000000000000000000001401476173253300201360ustar00rootroot00000000000000NAME=mysql IMAGE=docker.io/library/mariadb PUBLISH=3306:3306 ENV="MYSQL_ROOT_PASSWORD=P4ssw0rd" usql-0.19.19/contrib/mysql/test.sql000066400000000000000000000036501476173253300172030ustar00rootroot00000000000000-- mysql test script \set \set SYNTAX_HL_FORMAT terminal16m \set SYNTAX_HL true \? \copyright \set SYNTAX_HL_STYLE dracula select 'test'' ' \g \set NAME myname drop database if exists testdb; create database testdb; use testdb; SET FOREIGN_KEY_CHECKS=0; DROP TABLE IF EXISTS authors; DROP TABLE IF EXISTS books; DROP FUNCTION IF EXISTS say_hello; SET FOREIGN_KEY_CHECKS=1; CREATE TABLE authors ( author_id integer NOT NULL AUTO_INCREMENT PRIMARY KEY, name text NOT NULL DEFAULT '' ) ENGINE=InnoDB; CREATE INDEX authors_name_idx ON authors(name(255)); \set SYNTAX_HL_STYLE paraiso-dark CREATE TABLE books ( /* this is a multiline comment */ book_id integer NOT NULL AUTO_INCREMENT PRIMARY KEY, author_id integer NOT NULL, isbn varchar(255) NOT NULL DEFAULT '' UNIQUE, book_type ENUM('FICTION', 'NONFICTION') NOT NULL DEFAULT 'FICTION', title text NOT NULL DEFAULT '', year integer NOT NULL DEFAULT 2000, available datetime NOT NULL DEFAULT NOW(), tags text NOT NULL DEFAULT '', CONSTRAINT FOREIGN KEY (author_id) REFERENCES authors(author_id) ) ENGINE=InnoDB; CREATE INDEX books_title_idx ON books(title, year); insert into authors (name) values ('jk rowling'), ('author amazing') \g select * from authors; \set COLNAME name \set NAME amaz \echo `echo hello` select :"COLNAME" from authors where :COLNAME like '%' || :'NAME' || '%' \print \raw \g \gset AUTHOR_ select :'AUTHOR_name'; \begin insert into authors (name) values ('test'); \rollback insert into authors (name) values ('hello'); select * from authors; insert into books (author_id, isbn, title, year, available) values (1, '1', 'one', 2018, '2018-06-01 00:00:00'), (2, '2', 'two', 2019, '2019-06-01 00:00:00') ; select * from books b inner join authors a on a.author_id = b.author_id; CREATE FUNCTION say_hello(s text) RETURNS text DETERMINISTIC RETURN CONCAT('hello ', s); select say_hello('a name!') \G /* exiting! */ \q usql-0.19.19/contrib/mysql/usql-config000066400000000000000000000001121476173253300176430ustar00rootroot00000000000000DB="mysql://root:P4ssw0rd@localhost/" VSQL="SELECT version() AS version;" usql-0.19.19/contrib/oracle-enterprise/000077500000000000000000000000001476173253300177555ustar00rootroot00000000000000usql-0.19.19/contrib/oracle-enterprise/podman-config000066400000000000000000000003551476173253300224240ustar00rootroot00000000000000NAME=oracle-enterprise IMAGE=container-registry.oracle.com/database/enterprise:21.3.0.0 PUBLISH=1522:1521 ENV="ORACLE_PDB=db1 ORACLE_PWD=P4ssw0rd" NETWORK="slirp4netns:enable_ipv6=false" VOLUME=oracle-enterprise-data:/opt/oracle/oradata usql-0.19.19/contrib/oracle-enterprise/usql-config000066400000000000000000000001301476173253300221210ustar00rootroot00000000000000DB="oracle://system:P4ssw0rd@localhost:1522/db1" VSQL="SELECT version FROM v\$instance" usql-0.19.19/contrib/oracle/000077500000000000000000000000001476173253300155775ustar00rootroot00000000000000usql-0.19.19/contrib/oracle/init.sql000066400000000000000000000012541476173253300172650ustar00rootroot00000000000000\set ORACLE_USER system \set ORACLE_PASS oracle \set ORACLE_SVC xe \set ORACLE_HOST `docker port oracle 1521` \prompt NAME 'Create database user: ' \prompt -password PASS 'Password for "':NAME'": ' \connect 'oracle://':ORACLE_USER':':ORACLE_PASS'@':ORACLE_HOST'/':ORACLE_SVC \set DATNAME :NAME.dat CREATE TABLESPACE :NAME NOLOGGING DATAFILE :'DATNAME' SIZE 100m AUTOEXTEND ON; CREATE USER :NAME IDENTIFIED BY :NAME DEFAULT TABLESPACE :NAME; GRANT CREATE SESSION, CREATE TABLE, CREATE VIEW, CREATE SEQUENCE, CREATE PROCEDURE, CREATE TRIGGER, UNLIMITED TABLESPACE, SELECT ANY DICTIONARY TO :NAME; ALTER SYSTEM SET OPEN_CURSORS=400 SCOPE=both; usql-0.19.19/contrib/oracle/podman-config000066400000000000000000000003151476173253300202420ustar00rootroot00000000000000NAME=oracle IMAGE=container-registry.oracle.com/database/free PUBLISH=1521:1521 ENV="ORACLE_PDB=db1 ORACLE_PWD=P4ssw0rd" NETWORK="slirp4netns:enable_ipv6=false" VOLUME=oracle-free-data:/opt/oracle/oradata usql-0.19.19/contrib/oracle/usql-config000066400000000000000000000001241476173253300177460ustar00rootroot00000000000000DB="oracle://system:P4ssw0rd@localhost/free" VSQL="SELECT version FROM v\$instance" usql-0.19.19/contrib/pgx/000077500000000000000000000000001476173253300151305ustar00rootroot00000000000000usql-0.19.19/contrib/pgx/usql-config000066400000000000000000000001661476173253300173050ustar00rootroot00000000000000DB="pgx://postgres:P4ssw0rd@localhost" VSQL="SELECT setting AS version FROM pg_settings WHERE name='server_version';" usql-0.19.19/contrib/podman-run.sh000077500000000000000000000061361476173253300167570ustar00rootroot00000000000000#!/bin/bash # podman-run.sh: starts or restarts podman containers. # # Usage: podman-run.sh [-u] # # Where is a name of a subdirectory containing podman-config, # 'all', or 'test'. # # all -- starts all available database images. # test -- starts the primary testing images. The testing images are cassandra, mysql, postgres, sqlserver, and oracle # -u -- perform podman pull for images prior to start. # # Will stop any running podman container prior to starting. DIR=$1 SRC=$(realpath $(cd -P "$(dirname "${BASH_SOURCE[0]}" )" && pwd)) if [ -z "$DIR" ]; then echo "usage: $0 [-u]" exit 1 fi shift UPDATE=0 OPTIND=1 while getopts "u" opt; do case "$opt" in u) UPDATE=1 ;; esac done podman_run() { TARGET=$1 BASE=$SRC/$TARGET if [ ! -e $BASE/podman-config ]; then echo "error: $BASE/podman-config doesn't exist" exit 1 fi # load parameters from podman-config unset IMAGE NAME PUBLISH ENV VOLUME NETWORK PRIVILEGED HOSTNAME PARAMS CMD source $BASE/podman-config if [[ "$TARGET" != "$NAME" ]]; then echo "error: $BASE/podman-config is invalid" exit 1 fi # setup params PARAMS=() for k in NAME PUBLISH ENV VOLUME NETWORK PRIVILEGED HOSTNAME; do n=$(tr 'A-Z' 'a-z' <<< "$k") v=$(eval echo "\$$k") if [ ! -z "$v" ]; then for p in $v; do PARAMS+=("--$n=$p") done fi done # determine if image exists EXISTS=$(podman image ls -q $IMAGE) if [[ "$UPDATE" == "0" && -z "$EXISTS" ]]; then UPDATE=1 fi # show parameters echo "-------------------------------------------" echo "NAME: $NAME $HOSTNAME" echo "IMAGE: $IMAGE (update: $UPDATE)" echo "PUBLISH: $PUBLISH" echo "ENV: $ENV" echo "VOLUME: $VOLUME" echo "NETWORK: $NETWORK" echo "PRIVILEGED: $PRIVILEGED" echo "CMD: $CMD" echo # update if [[ "$UPDATE" == "1" ]]; then if [ ! -f $BASE/Dockerfile ]; then (set -ex; podman pull $IMAGE ) else pushd $BASE &> /dev/null (set -ex; podman build --pull -t $IMAGE:latest . ) popd &> /dev/null fi REF=$(awk -F: '{print $1}' <<< "$IMAGE") REMOVE=$(podman image list --filter=dangling=true --filter=reference=$IMAGE -q) if [ ! -z "$REMOVE" ]; then (set -ex; podman image rm -f $REMOVE ) fi fi # stop and remove if [ ! -z "$(podman ps -q --filter "name=$NAME")" ]; then (set -x; podman stop $NAME ) fi if [ ! -z "$(podman ps -q -a --filter "name=$NAME")" ]; then (set -x; podman rm -f $NAME ) fi # start (set -ex; podman run --detach --rm ${PARAMS[@]} $IMAGE $CMD ) echo } pushd $SRC &> /dev/null TARGETS=() case $DIR in all) TARGETS+=($(find . -type f -name podman-config|awk -F'/' '{print $2}'|grep -v db2)) if [[ "$(podman image ls -q --filter 'reference=docker.io/ibmcom/db2')" != "" ]]; then TARGETS+=(db2) fi ;; test) TARGETS+=(mysql postgres sqlserver oracle clickhouse cassandra) ;; *) TARGETS+=($DIR) ;; esac for TARGET in ${TARGETS[@]}; do podman_run $TARGET done popd &> /dev/null usql-0.19.19/contrib/podman-stop.sh000077500000000000000000000005761476173253300171420ustar00rootroot00000000000000#!/bin/bash SRC=$(realpath $(cd -P "$(dirname "${BASH_SOURCE[0]}" )" && pwd)) for TARGET in $SRC/*/podman-config; do NAME=$(basename $(dirname $TARGET)) if [ ! -z "$(podman ps -q --filter "name=$NAME")" ]; then (set -x; podman stop $NAME ) fi if [ ! -z "$(podman ps -q -a --filter "name=$NAME")" ]; then (set -x; podman rm -f $NAME ) fi done usql-0.19.19/contrib/postgres/000077500000000000000000000000001476173253300162005ustar00rootroot00000000000000usql-0.19.19/contrib/postgres/init.sql000066400000000000000000000007251476173253300176700ustar00rootroot00000000000000\set POSTGRES_USER postgres \set POSTGRES_PASS P4ssw0rd \set POSTGRES_DB postgres \set POSTGRES_HOST `docker port postgres 5432 | head -n1` \prompt NAME 'Create database user: ' \prompt -password PASS 'Password for "':NAME'": ' \connect 'postgres://':POSTGRES_USER':':POSTGRES_PASS'@':POSTGRES_HOST'/':POSTGRES_DB'?sslmode=disable' DROP USER IF EXISTS :NAME; CREATE USER :NAME PASSWORD :'PASS'; DROP DATABASE IF EXISTS :NAME; CREATE DATABASE :NAME OWNER :NAME; usql-0.19.19/contrib/postgres/odbcinst.ini000066400000000000000000000004411476173253300205050ustar00rootroot00000000000000[PostgreSQL ANSI] Description=PostgreSQL ODBC driver (ANSI version) Driver=psqlodbca.so Setup=libodbcpsqlS.so Debug=0 CommLog=1 UsageCount=1 [PostgreSQL Unicode] Description=PostgreSQL ODBC driver (Unicode version) Driver=psqlodbcw.so Setup=libodbcpsqlS.so Debug=0 CommLog=1 UsageCount=1 usql-0.19.19/contrib/postgres/podman-config000066400000000000000000000001371476173253300206450ustar00rootroot00000000000000NAME=postgres IMAGE=docker.io/usql/postgres PUBLISH=5432:5432 ENV="POSTGRES_PASSWORD=P4ssw0rd" usql-0.19.19/contrib/postgres/schema.sql000066400000000000000000000017671476173253300201740ustar00rootroot00000000000000\connect postgres://booktest:booktest@localhost/ DROP TABLE IF EXISTS books CASCADE; DROP TYPE IF EXISTS book_type CASCADE; DROP TABLE IF EXISTS authors CASCADE; DROP FUNCTION IF EXISTS say_hello(text) CASCADE; CREATE TABLE authors ( author_id SERIAL PRIMARY KEY, name text NOT NULL DEFAULT '' ); CREATE INDEX authors_name_idx ON authors(name); CREATE TYPE book_type AS ENUM ( 'FICTION', 'NONFICTION' ); CREATE TABLE books ( book_id SERIAL PRIMARY KEY, author_id integer NOT NULL REFERENCES authors(author_id), isbn text NOT NULL DEFAULT '' UNIQUE, booktype book_type NOT NULL DEFAULT 'FICTION', title text NOT NULL DEFAULT '', year integer NOT NULL DEFAULT 2000, available timestamp with time zone NOT NULL DEFAULT 'NOW()', tags varchar[] NOT NULL DEFAULT '{}' ); CREATE INDEX books_title_idx ON books(title, year); CREATE FUNCTION say_hello(text) RETURNS text AS $$ BEGIN RETURN CONCAT('hello ', $1); END; $$ LANGUAGE plpgsql; CREATE INDEX books_title_lower_idx ON books(title); usql-0.19.19/contrib/postgres/test.sql000066400000000000000000000036711476173253300177070ustar00rootroot00000000000000-- postgres test script \set \set SYNTAX_HL_FORMAT terminal16m \set SYNTAX_HL true \? \copyright \set SYNTAX_HL_STYLE dracula select 'test'' ' \g \set NAME myname DROP TABLE IF EXISTS books; DROP TABLE IF EXISTS authors; DROP TABLE IF EXISTS books CASCADE; DROP TYPE IF EXISTS book_type CASCADE; DROP TABLE IF EXISTS authors CASCADE; DROP FUNCTION IF EXISTS say_hello(text) CASCADE; CREATE TABLE authors ( author_id SERIAL PRIMARY KEY, name text NOT NULL DEFAULT '' ); CREATE INDEX authors_name_idx ON authors(name); CREATE TYPE book_type AS ENUM ( 'FICTION', 'NONFICTION' ); CREATE INDEX authors_name_idx ON authors(name); \set SYNTAX_HL_STYLE paraiso-dark CREATE TABLE books ( /* this is a multiline comment */ book_id SERIAL PRIMARY KEY, author_id integer NOT NULL REFERENCES authors(author_id), isbn text NOT NULL DEFAULT '' UNIQUE, booktype book_type NOT NULL DEFAULT 'FICTION', title text NOT NULL DEFAULT '', year integer NOT NULL DEFAULT 2000, available timestamp with time zone NOT NULL DEFAULT 'NOW()', tags varchar[] NOT NULL DEFAULT '{}' ); CREATE INDEX books_title_idx ON books(title, year); insert into authors (name) values ('jk rowling'), ('author amazing') \g select * from authors; \set COLNAME name \set NAME amaz \echo `echo hello` select :"COLNAME" from authors where :COLNAME like '%' || :'NAME' || '%' \print \raw \g \gset AUTHOR_ select :'AUTHOR_name'; \begin insert into authors (name) values ('test'); \rollback insert into authors (name) values ('hello'); select * from authors; insert into books (author_id, isbn, title, year, available) values (1, '1', 'one', 2018, '2018-06-01 00:00:00'), (2, '2', 'two', 2019, '2019-06-01 00:00:00') ; select * from books b inner join authors a on a.author_id = b.author_id; CREATE FUNCTION say_hello(text) RETURNS text AS $$ BEGIN RETURN CONCAT('hello ', $1); END; $$ LANGUAGE plpgsql; select say_hello('a name!') \G /* exiting! */ \q usql-0.19.19/contrib/postgres/usql-config000066400000000000000000000001731476173253300203530ustar00rootroot00000000000000DB="postgres://postgres:P4ssw0rd@localhost" VSQL="SELECT setting AS version FROM pg_settings WHERE name='server_version';" usql-0.19.19/contrib/presto/000077500000000000000000000000001476173253300156465ustar00rootroot00000000000000usql-0.19.19/contrib/presto/podman-config000066400000000000000000000001071476173253300203100ustar00rootroot00000000000000NAME=presto IMAGE=docker.io/ahanaio/prestodb-sandbox PUBLISH=8080:8080 usql-0.19.19/contrib/presto/usql-config000066400000000000000000000001411476173253300200140ustar00rootroot00000000000000DB="presto://localhost" VSQL="SELECT node_version AS version FROM system.runtime.nodes LIMIT 1;" usql-0.19.19/contrib/sqlite3/000077500000000000000000000000001476173253300157165ustar00rootroot00000000000000usql-0.19.19/contrib/sqlite3/build-windows-icu.sh000077500000000000000000000002451476173253300216230ustar00rootroot00000000000000#!/bin/bash ../source/runConfigureICU \ MinGW \ --host=x86_64-w64-mingw32 \ --disable-release \ --disable-debug \ --enable-static \ --prefix=/opt/local usql-0.19.19/contrib/sqlite3/icu-i18n-mingw64.pc000066400000000000000000000003171476173253300210710ustar00rootroot00000000000000mingw64_prefix=C:\msys64\opt\local includedir="${mingw64_prefix}\include" libdir="${mingw64_prefix}\lib" Name: icu-i18n-mingw64 Version: dev Description: icu-i18n Cflags: -I${includedir} Libs: -L${libdir} usql-0.19.19/contrib/sqlite3/test.sql000066400000000000000000000031011476173253300174110ustar00rootroot00000000000000-- sqlite3 test script \set \set SYNTAX_HL_FORMAT terminal16m \set SYNTAX_HL true help \? \copyright \set SYNTAX_HL_STYLE dracula select 'test'' ' \g \set NAME myname PRAGMA foreign_keys = 1; DROP TABLE IF EXISTS books; DROP TABLE IF EXISTS authors; CREATE TABLE authors ( author_id integer NOT NULL PRIMARY KEY AUTOINCREMENT, name text NOT NULL DEFAULT '' ); CREATE INDEX authors_name_idx ON authors(name); \set SYNTAX_HL_STYLE paraiso-dark CREATE TABLE books ( /* this is a multiline comment */ book_id integer NOT NULL PRIMARY KEY AUTOINCREMENT, -- the id of the author author_id integer NOT NULL REFERENCES authors(author_id), isbn text NOT NULL DEFAULT '' UNIQUE, title text NOT NULL DEFAULT '', year integer NOT NULL DEFAULT 2000, available timestamp with time zone NOT NULL DEFAULT '', tags text NOT NULL DEFAULT '{}' ); CREATE INDEX books_title_idx ON books(title, year); insert into authors (name) values ("jk rowling"), ("author amazing") \g select * from authors; \set COLNAME name \set NAME amaz \echo `echo hello` select :"COLNAME" from authors where :COLNAME like '%' || :'NAME' || '%' \print \raw \g \gset AUTHOR_ select :'AUTHOR_name'; \begin insert into authors (name) values ('test'); \rollback insert into authors (name) values ('hello'); select * from authors; insert into books (author_id, isbn, title, year, available) values (1, '1', 'one', 2018, '2018-06-01 00:00:00'), (2, '2', 'two', 2019, '2019-06-01 00:00:00') ; select * from books b inner join authors a on a.author_id = b.author_id; /* exiting! */ \q usql-0.19.19/contrib/sqlite3/usql-config000066400000000000000000000001051476173253300200640ustar00rootroot00000000000000DB="sqlite3:test.sqlite3" VSQL="SELECT sqlite_version() AS version;" usql-0.19.19/contrib/sqlserver/000077500000000000000000000000001476173253300163605ustar00rootroot00000000000000usql-0.19.19/contrib/sqlserver/init.sql000066400000000000000000000017601476173253300200500ustar00rootroot00000000000000EXEC sp_configure 'contained database authentication', 1; RECONFIGURE; DROP LOGIN :NAME; DROP DATABASE :NAME; CREATE DATABASE :NAME CONTAINMENT=PARTIAL; \set QNAME "''":NAME"''" \set SQL 'CREATE LOGIN ':NAME' WITH PASSWORD=':QNAME', CHECK_POLICY=OFF, DEFAULT_DATABASE=':NAME';' EXEC [:NAME].[dbo].[sp_executesql] N:'SQL' \set SQL 'CREATE USER ':NAME' FOR LOGIN ':NAME' WITH DEFAULT_SCHEMA=':NAME';' EXEC [:NAME].[dbo].[sp_executesql] N:'SQL'; \set SQL 'CREATE SCHEMA ':NAME' AUTHORIZATION ':NAME';' EXEC [:NAME].[dbo].[sp_executesql] N:'SQL'; \set SQL 'EXEC sp_addrolemember db_owner, ':QNAME';' EXEC [:NAME].[dbo].[sp_executesql] N:'SQL'; -- original reconnect version: -- --\connect 'sqlserver://localhost/':NAME -- --CREATE LOGIN :NAME -- WITH -- PASSWORD=:'PASS', -- CHECK_POLICY=OFF, -- DEFAULT_DATABASE=:NAME; -- --CREATE USER :NAME -- FOR LOGIN :NAME -- WITH DEFAULT_SCHEMA=:NAME; -- --CREATE SCHEMA :NAME AUTHORIZATION :NAME; -- --EXEC sp_addrolemember 'db_owner', :'NAME'; usql-0.19.19/contrib/sqlserver/podman-config000066400000000000000000000002221476173253300210200ustar00rootroot00000000000000NAME=sqlserver IMAGE=mcr.microsoft.com/mssql/server:2022-latest PUBLISH=1433:1433 ENV="ACCEPT_EULA=Y MSSQL_PID=Express SA_PASSWORD=Adm1nP@ssw0rd" usql-0.19.19/contrib/sqlserver/test.sql000066400000000000000000000030111476173253300200530ustar00rootroot00000000000000-- sqlserver test script \set \set SYNTAX_HL_FORMAT terminal16m \set SYNTAX_HL true \? \copyright \set SYNTAX_HL_STYLE dracula select 'test'' ' \g \set NAME myname DROP TABLE IF EXISTS books; DROP TABLE IF EXISTS authors; CREATE TABLE authors ( author_id integer NOT NULL IDENTITY(1,1) PRIMARY KEY, name varchar(255) NOT NULL DEFAULT '' ); CREATE INDEX authors_name_idx ON authors(name); CREATE TABLE books ( book_id integer NOT NULL IDENTITY(1,1) PRIMARY KEY, author_id integer NOT NULL FOREIGN KEY REFERENCES authors(author_id), isbn varchar(255) NOT NULL DEFAULT '' UNIQUE, title varchar(255) NOT NULL DEFAULT '', year integer NOT NULL DEFAULT 2000, available datetime2 NOT NULL DEFAULT CURRENT_TIMESTAMP, tags varchar(255) NOT NULL DEFAULT '' ); CREATE INDEX books_title_idx ON books(title, year); \set SYNTAX_HL_STYLE paraiso-dark insert into authors (name) values ('jk rowling'), ('author amazing') \g select * from authors; \set COLNAME name \set NAME amaz \echo `echo hello` select :"COLNAME" from authors where :COLNAME like '%' || :'NAME' || '%' \print \raw \g \gset AUTHOR_ select :'AUTHOR_name'; \begin insert into authors (name) values ('test'); \rollback insert into authors (name) values ('hello'); select * from authors; insert into books (author_id, isbn, title, year, available) values (1, '1', 'one', 2018, '2018-06-01 00:00:00'), (2, '2', 'two', 2019, '2019-06-01 00:00:00') ; select * from books b inner join authors a on a.author_id = b.author_id; /* exiting! */ \q usql-0.19.19/contrib/sqlserver/usql-config000066400000000000000000000001501476173253300205260ustar00rootroot00000000000000DB="sqlserver://sa:Adm1nP@ssw0rd@localhost/" VSQL="SELECT SERVERPROPERTY('productversion') AS version;" usql-0.19.19/contrib/trino/000077500000000000000000000000001476173253300154655ustar00rootroot00000000000000usql-0.19.19/contrib/trino/podman-config000066400000000000000000000000731476173253300201310ustar00rootroot00000000000000NAME=trino IMAGE=docker.io/trinodb/trino PUBLISH=8080:8080 usql-0.19.19/contrib/trino/usql-config000066400000000000000000000001401476173253300176320ustar00rootroot00000000000000DB="trino://localhost" VSQL="SELECT node_version AS version FROM system.runtime.nodes LIMIT 1;" usql-0.19.19/contrib/usql-test.sh000077500000000000000000000011551476173253300166340ustar00rootroot00000000000000#!/bin/bash SRC=$(realpath $(cd -P "$(dirname "${BASH_SOURCE[0]}")" && pwd)) USQL=$(which usql) if [ -f $SRC/../usql ]; then USQL=$(realpath $SRC/../usql) fi export USQL_SHOW_HOST_INFORMATION=false for TARGET in $SRC/*/usql-config; do NAME=$(basename $(dirname $TARGET)) if [[ ! -z "$(podman ps -q --filter "name=$NAME")" || "$NAME" == "duckdb" || "$NAME" == "sqlite3" ]]; then unset DB VSQL source $TARGET if [[ -z "$DB" || -z "$VSQL" ]]; then echo -e "ERROR: DB or VSQL not defined in $TARGET!\n" continue fi (set -x; $USQL "$DB" -X -J -c "$VSQL" ) echo fi done usql-0.19.19/contrib/usqlpass000066400000000000000000000007541476173253300161360ustar00rootroot00000000000000# sample ~/.usqlpass file # # format is: # protocol:host:port:dbname:user:pass postgres:*:*:*:postgres:P4ssw0rd cql:*:*:*:cassandra:cassandra clickhouse:*:*:*:clickhouse:P4ssw0rd couchbase:*:*:*:Administrator:P4ssw0rd godror:*:*:*:system:P4ssw0rd ignite:*:*:*:ignite:ignite mymysql:*:*:*:root:P4ssw0rd mysql:*:*:*:root:P4ssw0rd oracle:*:*:*:system:P4ssw0rd pgx:*:*:*:postgres:P4ssw0rd sqlserver:*:*:*:sa:Adm1nP@ssw0rd vertica:*:*:*:vertica:P4ssw0rd flightsql:*:*:*:flight_username:P4ssw0rd usql-0.19.19/contrib/usqlrc000066400000000000000000000001751476173253300155710ustar00rootroot00000000000000-- example usqlrc file -- put in $HOME/.usqlrc \echo welcome `echo $USER`, today is:`date` \set SYNTAX_HL_STYLE paraiso-dark usql-0.19.19/contrib/vertica/000077500000000000000000000000001476173253300157675ustar00rootroot00000000000000usql-0.19.19/contrib/vertica/podman-config000066400000000000000000000001741476173253300204350ustar00rootroot00000000000000NAME=vertica IMAGE=docker.io/vertica/vertica-ce:latest PUBLISH=5433:5433 ENV="APP_DB_USER=vertica APP_DB_PASSWORD=P4ssw0rd" usql-0.19.19/contrib/vertica/usql-config000066400000000000000000000001261476173253300201400ustar00rootroot00000000000000DB="vertica://vertica:P4ssw0rd@localhost/vertica" VSQL="SELECT version() AS version;" usql-0.19.19/contrib/ydb/000077500000000000000000000000001476173253300151105ustar00rootroot00000000000000usql-0.19.19/contrib/ydb/podman-config000066400000000000000000000003631476173253300175560ustar00rootroot00000000000000NAME=ydb HOSTNAME=localhost IMAGE=cr.yandex/yc/yandex-docker-local-ydb PUBLISH="2135:2135 2136:2136 8765:8765" ENV="YDB_DEFAULT_LOG_LEVEL=NOTICE GRPC_TLS_PORT=2135 GRPC_PORT=2136 MON_PORT=8765" VOLUME="ydb-certs:/ydb_certs ydb-data:/ydb_data" usql-0.19.19/drivers/000077500000000000000000000000001476173253300143505ustar00rootroot00000000000000usql-0.19.19/drivers/adodb/000077500000000000000000000000001476173253300154215ustar00rootroot00000000000000usql-0.19.19/drivers/adodb/adodb.go000066400000000000000000000020621476173253300170210ustar00rootroot00000000000000// Package adodb defines and registers usql's Microsoft ADODB driver. Requires // CGO. Windows only. // // Alias: oleodbc, OLE ODBC // // See: https://github.com/mattn/go-adodb package adodb import ( "database/sql" "regexp" "strings" _ "github.com/mattn/go-adodb" // DRIVER "github.com/xo/dburl" "github.com/xo/usql/drivers" ) func init() { endRE := regexp.MustCompile(`;?\s*$`) endAnchorRE := regexp.MustCompile(`(?i)\send\s*;\s*$`) drivers.Register("adodb", drivers.Driver{ AllowMultilineComments: true, AllowCComments: true, Process: func(u *dburl.URL, prefix string, sqlstr string) (string, string, bool, error) { // trim last ; but only when not END; if s := strings.ToLower(u.Query().Get("usql_trim")); s != "" && s != "off" && s != "0" && s != "false" { if !endAnchorRE.MatchString(sqlstr) { sqlstr = endRE.ReplaceAllString(sqlstr, "") } } typ, q := drivers.QueryExecType(prefix, sqlstr) return typ, sqlstr, q, nil }, RowsAffected: func(res sql.Result) (int64, error) { return 0, nil }, }, "oleodbc") } usql-0.19.19/drivers/athena/000077500000000000000000000000001476173253300156105ustar00rootroot00000000000000usql-0.19.19/drivers/athena/athena.go000066400000000000000000000012521476173253300173770ustar00rootroot00000000000000// Package athena defines and registers usql's AWS Athena driver. // // See: https://github.com/uber/athenadriver package athena import ( "context" _ "github.com/uber/athenadriver/go" // DRIVER: awsathena "github.com/xo/usql/drivers" ) func init() { drivers.Register("awsathena", drivers.Driver{ AllowMultilineComments: true, Process: drivers.StripTrailingSemicolon, Version: func(ctx context.Context, db drivers.DB) (string, error) { var ver string err := db.QueryRowContext( ctx, `SELECT node_version FROM system.runtime.nodes LIMIT 1`, ).Scan(&ver) if err != nil { return "", err } return "Athena " + ver, nil }, }) } usql-0.19.19/drivers/avatica/000077500000000000000000000000001476173253300157605ustar00rootroot00000000000000usql-0.19.19/drivers/avatica/avatica.go000066400000000000000000000011751476173253300177230ustar00rootroot00000000000000// Package avatica defines and registers usql's Apache Avatica driver. // // See: https://github.com/apache/calcite-avatica-go package avatica import ( "strconv" _ "github.com/apache/calcite-avatica-go/v5" // DRIVER avaticaerrors "github.com/apache/calcite-avatica-go/v5/errors" "github.com/xo/usql/drivers" ) func init() { drivers.Register("avatica", drivers.Driver{ AllowMultilineComments: true, AllowCComments: true, Err: func(err error) (string, string) { if e, ok := err.(avaticaerrors.ResponseError); ok { return strconv.Itoa(int(e.ErrorCode)), e.ErrorMessage } return "", err.Error() }, }) } usql-0.19.19/drivers/bigquery/000077500000000000000000000000001476173253300161775ustar00rootroot00000000000000usql-0.19.19/drivers/bigquery/bigquery.go000066400000000000000000000004421476173253300203550ustar00rootroot00000000000000// Package bigquery defines and registers usql's Google BigQuery driver. // // See: https://github.com/go-gorm/bigquery package bigquery import ( "github.com/xo/usql/drivers" _ "gorm.io/driver/bigquery/driver" // DRIVER ) func init() { drivers.Register("bigquery", drivers.Driver{}) } usql-0.19.19/drivers/cassandra/000077500000000000000000000000001476173253300163075ustar00rootroot00000000000000usql-0.19.19/drivers/cassandra/cassandra.go000066400000000000000000000064671476173253300206120ustar00rootroot00000000000000// Package cassandra defines and registers usql's Cassandra driver. // // See: https://github.com/MichaelS11/go-cql-driver package cassandra import ( "context" "database/sql" "encoding/json" "fmt" "io" "log" "os" "regexp" "strings" cql "github.com/MichaelS11/go-cql-driver" // DRIVER: cql "github.com/gocql/gocql" "github.com/xo/dburl" "github.com/xo/usql/drivers" ) func init() { var debug bool if s := os.Getenv("CQL_DEBUG"); s != "" { log.Printf("ENABLING DEBUGGING FOR CQL") debug = true } // error regexp's authReqRE := regexp.MustCompile(`authentication required`) passwordErrRE := regexp.MustCompile(`Provided username (.*)and/or password are incorrect`) var l *logger drivers.Register("cql", drivers.Driver{ AllowDollar: true, AllowMultilineComments: true, AllowCComments: true, LexerName: "cql", ForceParams: func(u *dburl.URL) { if q := u.Query(); q.Get("timeout") == "" { q.Set("timeout", "300s") u.RawQuery = q.Encode() } }, Open: func(_ context.Context, u *dburl.URL, stdout, stderr func() io.Writer) (func(string, string) (*sql.DB, error), error) { // override cql and gocql loggers l = &logger{debug: debug} gocql.Logger, cql.CqlDriver.Logger = l, log.New(l, "", 0) return sql.Open, nil }, Version: func(ctx context.Context, db drivers.DB) (string, error) { var release, protocol, cql string err := db.QueryRowContext( ctx, `SELECT release_version, cql_version, native_protocol_version FROM system.local WHERE key = 'local'`, ).Scan(&release, &cql, &protocol) if err != nil { return "", err } return "Cassandra " + release + ", CQL " + cql + ", Protocol v" + protocol, nil }, ChangePassword: func(db drivers.DB, user, newpw, _ string) error { _, err := db.Exec(`ALTER ROLE ` + user + ` WITH PASSWORD = '` + newpw + `'`) return err }, IsPasswordErr: func(err error) bool { return passwordErrRE.MatchString(l.last) }, Err: func(err error) (string, string) { if authReqRE.MatchString(l.last) { return "", "authentication required" } if m := passwordErrRE.FindStringSubmatch(l.last); m != nil { return "", fmt.Sprintf("invalid username %sor password", m[1]) } return "", strings.TrimPrefix(strings.TrimPrefix(err.Error(), "driver: "), "gocql: ") }, RowsAffected: func(sql.Result) (int64, error) { return 0, nil }, ConvertDefault: func(v interface{}) (string, error) { buf, err := json.Marshal(v) if err != nil { return "", err } return string(buf), nil }, BatchQueryPrefixes: map[string]string{ "BEGIN BATCH": "APPLY BATCH", }, }) } // logger is a null logger that satisfies the gocql.StdLogger and the io.Writer // interfaces in order to capture the last error issued by the cql/gocql // packages, since the cql package does not (at this time) return any error // other than sql.ErrBadConn. type logger struct { debug bool last string } func (l *logger) Print(v ...interface{}) { if l.debug { log.Print(v...) } } func (l *logger) Printf(s string, v ...interface{}) { if l.debug { log.Printf(s, v...) } } func (l *logger) Println(v ...interface{}) { if l.debug { log.Println(v...) } } func (l *logger) Write(buf []byte) (int, error) { if l.debug { log.Printf("WRITE: %s", string(buf)) } l.last = string(buf) return len(buf), nil } usql-0.19.19/drivers/chai/000077500000000000000000000000001476173253300152545ustar00rootroot00000000000000usql-0.19.19/drivers/chai/chai.go000066400000000000000000000004121476173253300165040ustar00rootroot00000000000000// Package chai defines and registers usql's ChaiSQL driver. // // See: https://github.com/chaisql/chai package chai import ( _ "github.com/chaisql/chai/driver" // DRIVER "github.com/xo/usql/drivers" ) func init() { drivers.Register("chai", drivers.Driver{}) } usql-0.19.19/drivers/clickhouse/000077500000000000000000000000001476173253300165015ustar00rootroot00000000000000usql-0.19.19/drivers/clickhouse/clickhouse.go000066400000000000000000000021201476173253300211540ustar00rootroot00000000000000// Package clickhouse defines and registers usql's ClickHouse driver. // // Group: base // See: https://github.com/ClickHouse/clickhouse-go package clickhouse import ( "database/sql" "strconv" "strings" "github.com/ClickHouse/clickhouse-go/v2" // DRIVER "github.com/xo/usql/drivers" ) func init() { drivers.Register("clickhouse", drivers.Driver{ AllowMultilineComments: true, RowsAffected: func(sql.Result) (int64, error) { return 0, nil }, ChangePassword: func(db drivers.DB, user, newpw, oldpw string) error { _, err := db.Exec(`ALTER USER ` + user + ` IDENTIFIED BY '` + newpw + `'`) return err }, Err: func(err error) (string, string) { if e, ok := err.(*clickhouse.Exception); ok { return strconv.Itoa(int(e.Code)), strings.TrimPrefix(e.Message, "clickhouse: ") } return "", err.Error() }, IsPasswordErr: func(err error) bool { if e, ok := err.(*clickhouse.Exception); ok { return e.Code == 516 } return false }, Copy: drivers.CopyWithInsert(func(int) string { return "?" }), NewMetadataReader: NewMetadataReader, }) } usql-0.19.19/drivers/clickhouse/clickhouse_test.go000066400000000000000000001013701476173253300222220ustar00rootroot00000000000000package clickhouse_test import ( "context" "database/sql" "flag" "fmt" "github.com/xo/dburl" "github.com/xo/usql/drivers" "log" "os" "path/filepath" "testing" "time" dt "github.com/ory/dockertest/v3" "github.com/xo/usql/drivers/clickhouse" "github.com/xo/usql/drivers/metadata" "github.com/yookoala/realpath" _ "github.com/xo/usql/drivers/csvq" _ "github.com/xo/usql/drivers/moderncsqlite" ) // db is the database connection. var db struct { db *sql.DB res *dt.Resource r metadata.BasicReader } func TestMain(m *testing.M) { cleanup := flag.Bool("cleanup", true, "cleanup when finished") flag.Parse() code, err := doMain(m, *cleanup) if err != nil { fmt.Fprintf(os.Stderr, "error: %v\n", err) if code == 0 { code = 1 } } os.Exit(code) } func doMain(m *testing.M, cleanup bool) (int, error) { dir, err := os.Getwd() if err != nil { return 0, err } dir, err = realpath.Realpath(dir) if err != nil { return 0, err } pool, err := dt.NewPool("") if err != nil { return 0, fmt.Errorf("could not connect to docker: %w", err) } db.res, err = pool.RunWithOptions(&dt.RunOptions{ Repository: "clickhouse/clickhouse-server", Tag: "22.7", Mounts: []string{filepath.Join(dir, "testdata") + ":/docker-entrypoint-initdb.d"}, }) if err != nil { return 0, fmt.Errorf("unable to run: %w", err) } if cleanup { defer func() { if err := pool.Purge(db.res); err != nil { fmt.Fprintf(os.Stderr, "error: could not purge resource: %v\n", err) } }() } // exponential backoff-retry, because the application in the container // might not be ready to accept connections yet if err := pool.Retry(func() error { port := db.res.GetPort("9000/tcp") var err error if db.db, err = sql.Open("clickhouse", fmt.Sprintf("clickhouse://127.0.0.1:%s", port)); err != nil { return err } return db.db.Ping() }); err != nil { return 0, fmt.Errorf("unable to open database: %w", err) } db.r = clickhouse.NewMetadataReader(db.db).(metadata.BasicReader) code := m.Run() return code, nil } func TestSchemas(t *testing.T) { res, err := db.r.Schemas(metadata.Filter{WithSystem: true}) if err != nil { t.Fatalf("could not read schemas: %v", err) } checkNames(t, "schema", res, "default", "system", "tutorial", "tutorial_unexpected", "INFORMATION_SCHEMA", "information_schema", "copy_test") } func TestTables(t *testing.T) { res, err := db.r.Tables(metadata.Filter{ Schema: "tutorial", Types: []string{"BASE TABLE", "TABLE", "VIEW"}, }) if err != nil { t.Fatalf("could not read tables: %v", err) } checkNames(t, "table", res, "hits_v1", "visits_v1") } func TestFunctions(t *testing.T) { r := clickhouse.NewMetadataReader(db.db).(metadata.FunctionReader) res, err := r.Functions(metadata.Filter{Schema: "tutorial"}) if err != nil { t.Fatalf("could not read functions: %v", err) } checkNames(t, "function", res, funcNames()...) } func TestColumns(t *testing.T) { res, err := db.r.Columns(metadata.Filter{ Schema: "tutorial", Parent: "hits_v1", }) if err != nil { log.Fatalf("could not read columns: %v", err) } checkNames(t, "column", res, colNames()...) } func TestCopy(t *testing.T) { // Tests with csvq source DB. That driver doesn't support ScanType() for _, destTableSpec := range []string{ "copy_test.dest", "copy_test.dest(StringCol, NumCol)", "insert into copy_test.dest values(?, ?)", } { t.Run("csvq_"+destTableSpec, func(t *testing.T) { testCopy(t, destTableSpec, "csvq:.") }) } // Test with a driver that supports ScanType() t.Run("sqlite", func(t *testing.T) { testCopy(t, "copy_test.dest", "moderncsqlite://:memory:") }) } func testCopy(t *testing.T, destTableSpec string, sourceDbUrlStr string) { ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second) defer cancel() _, err := db.db.ExecContext(ctx, "truncate table copy_test.dest") if err != nil { t.Fatalf("could not truncate copy_test table: %v", err) } // Prepare copy destination URL port := db.res.GetPort("9000/tcp") dbUrlStr := fmt.Sprintf("clickhouse://127.0.0.1:%s", port) dbUrl, err := dburl.Parse(dbUrlStr) if err != nil { t.Fatalf("could not parse clickhouse url %s: %v", dbUrlStr, err) } // Prepare source data sourceDbUrl, err := dburl.Parse(sourceDbUrlStr) if err != nil { t.Fatalf("could not parse source DB url %s: %v", sourceDbUrlStr, err) } sourceDb, err := drivers.Open(ctx, sourceDbUrl, nil, nil) if err != nil { t.Fatalf("could not open sourceDb: %v", err) } defer sourceDb.Close() rows, err := sourceDb.QueryContext(ctx, "select 'string', 1") if err != nil { t.Fatalf("could not retrieve source rows: %v", err) } // Do Copy, ignoring copied rows count because clickhouse driver doesn't report RowsAffected _, err = drivers.Copy(ctx, dbUrl, nil, nil, rows, destTableSpec) if err != nil { t.Fatalf("copy failed: %v", err) } rows, err = db.db.QueryContext(ctx, "select StringCol, NumCol from copy_test.dest") if err != nil { t.Fatalf("failed to query: %v", err) } defer rows.Close() var copiedString string var copiedNum int if !rows.Next() { t.Fatalf("nothing copied") } err = rows.Scan(&copiedString, &copiedNum) if err != nil { t.Fatalf("could not read copied data: %v", err) } if copiedString != "string" || copiedNum != 1 { t.Fatalf("copied data differs: %s != string, %d != 1", copiedString, copiedNum) } } func checkNames(t *testing.T, typ string, res interface{ Next() bool }, exp ...string) { n := make(map[string]bool) for _, s := range exp { n[s] = true } names := make(map[string]bool) for res.Next() { name := getName(res) if _, ok := names[name]; ok { t.Errorf("already declared %s %q", typ, name) } names[name] = true } for name := range n { if _, ok := names[name]; !ok { t.Errorf("missing %s %q", typ, name) } } for name := range names { if _, ok := n[name]; !ok { t.Errorf("unexpected %s %q", typ, name) } } } func getName(res interface{}) string { switch x := res.(type) { case *metadata.SchemaSet: return x.Get().Schema case *metadata.TableSet: return x.Get().Name case *metadata.FunctionSet: return x.Get().Name case *metadata.ColumnSet: return x.Get().Name } panic(fmt.Sprintf("unknown type %T", res)) } func funcNames() []string { return []string{ "BIT_AND", "BIT_OR", "BIT_XOR", "CAST", "CHARACTER_LENGTH", "CHAR_LENGTH", "COVAR_POP", "COVAR_SAMP", "CRC32", "CRC32IEEE", "CRC64", "DATABASE", "DATE", "DAY", "DAYOFMONTH", "DAYOFWEEK", "DAYOFYEAR", "FQDN", "FROM_BASE64", "FROM_UNIXTIME", "HOUR", "INET6_ATON", "INET6_NTOA", "INET_ATON", "INET_NTOA", "IPv4CIDRToRange", "IPv4NumToString", "IPv4NumToStringClassC", "IPv4StringToNum", "IPv4StringToNumOrDefault", "IPv4StringToNumOrNull", "IPv4ToIPv6", "IPv6CIDRToRange", "IPv6NumToString", "IPv6StringToNum", "IPv6StringToNumOrDefault", "IPv6StringToNumOrNull", "JSONExtract", "JSONExtractArrayRaw", "JSONExtractBool", "JSONExtractFloat", "JSONExtractInt", "JSONExtractKeys", "JSONExtractKeysAndValues", "JSONExtractKeysAndValuesRaw", "JSONExtractRaw", "JSONExtractString", "JSONExtractUInt", "JSONHas", "JSONKey", "JSONLength", "JSONType", "JSON_EXISTS", "JSON_QUERY", "JSON_VALUE", "L1Distance", "L1Norm", "L1Normalize", "L2Distance", "L2Norm", "L2Normalize", "L2SquaredDistance", "L2SquaredNorm", "LAST_DAY", "LinfDistance", "LinfNorm", "LinfNormalize", "LpDistance", "LpNorm", "LpNormalize", "MACNumToString", "MACStringToNum", "MACStringToOUI", "MD4", "MD5", "MINUTE", "MONTH", "QUARTER", "REGEXP_MATCHES", "REGEXP_REPLACE", "SECOND", "SHA1", "SHA224", "SHA256", "SHA384", "SHA512", "STDDEV_POP", "STDDEV_SAMP", "SVG", "TO_BASE64", "URLHash", "URLHierarchy", "URLPathHierarchy", "UUIDNumToString", "UUIDStringToNum", "VAR_POP", "VAR_SAMP", "YEAR", "_CAST", "__bitBoolMaskAnd", "__bitBoolMaskOr", "__bitSwapLastTwo", "__bitWrapperFunc", "__getScalar", "abs", "accurateCast", "accurateCastOrDefault", "accurateCastOrNull", "accurate_Cast", "accurate_CastOrNull", "acos", "acosh", "addDays", "addHours", "addMicroseconds", "addMilliseconds", "addMinutes", "addMonths", "addNanoseconds", "addQuarters", "addSeconds", "addWeeks", "addYears", "addressToLine", "addressToLineWithInlines", "addressToSymbol", "aes_decrypt_mysql", "aes_encrypt_mysql", "aggThrow", "alphaTokens", "and", "any", "anyHeavy", "anyLast", "appendTrailingCharIfAbsent", "argMax", "argMin", "array", "arrayAUC", "arrayAll", "arrayAvg", "arrayCompact", "arrayConcat", "arrayCount", "arrayCumSum", "arrayCumSumNonNegative", "arrayDifference", "arrayDistinct", "arrayElement", "arrayEnumerate", "arrayEnumerateDense", "arrayEnumerateDenseRanked", "arrayEnumerateUniq", "arrayEnumerateUniqRanked", "arrayExists", "arrayFill", "arrayFilter", "arrayFirst", "arrayFirstIndex", "arrayFirstOrNull", "arrayFlatten", "arrayIntersect", "arrayJoin", "arrayLast", "arrayLastIndex", "arrayLastOrNull", "arrayMap", "arrayMax", "arrayMin", "arrayPopBack", "arrayPopFront", "arrayProduct", "arrayPushBack", "arrayPushFront", "arrayReduce", "arrayReduceInRanges", "arrayResize", "arrayReverse", "arrayReverseFill", "arrayReverseSort", "arrayReverseSplit", "arraySlice", "arraySort", "arraySplit", "arrayStringConcat", "arraySum", "arrayUniq", "arrayWithConstant", "arrayZip", "asin", "asinh", "assumeNotNull", "atan", "atan2", "atanh", "avg", "avgWeighted", "bar", "base58Decode", "base58Encode", "base64Decode", "base64Encode", "basename", "bin", "bitAnd", "bitCount", "bitHammingDistance", "bitNot", "bitOr", "bitPositionsToArray", "bitRotateLeft", "bitRotateRight", "bitShiftLeft", "bitShiftRight", "bitSlice", "bitTest", "bitTestAll", "bitTestAny", "bitXor", "bitmapAnd", "bitmapAndCardinality", "bitmapAndnot", "bitmapAndnotCardinality", "bitmapBuild", "bitmapCardinality", "bitmapContains", "bitmapHasAll", "bitmapHasAny", "bitmapMax", "bitmapMin", "bitmapOr", "bitmapOrCardinality", "bitmapSubsetInRange", "bitmapSubsetLimit", "bitmapToArray", "bitmapTransform", "bitmapXor", "bitmapXorCardinality", "bitmaskToArray", "bitmaskToList", "blockNumber", "blockSerializedSize", "blockSize", "boundingRatio", "buildId", "byteSize", "caseWithExpr", "caseWithExpression", "caseWithoutExpr", "caseWithoutExpression", "categoricalInformationValue", "cbrt", "ceil", "ceiling", "char", "cityHash64", "coalesce", "concat", "concatAssumeInjective", "connectionId", "connection_id", "contingency", "convertCharset", "corr", "corrStable", "cos", "cosh", "cosineDistance", "count", "countDigits", "countEqual", "countMatches", "countMatchesCaseInsensitive", "countSubstrings", "countSubstringsCaseInsensitive", "countSubstringsCaseInsensitiveUTF8", "covarPop", "covarPopStable", "covarSamp", "covarSampStable", "cramersV", "cramersVBiasCorrected", "currentDatabase", "currentProfiles", "currentRoles", "currentUser", "cutFragment", "cutIPv6", "cutQueryString", "cutQueryStringAndFragment", "cutToFirstSignificantSubdomain", "cutToFirstSignificantSubdomainCustom", "cutToFirstSignificantSubdomainCustomWithWWW", "cutToFirstSignificantSubdomainWithWWW", "cutURLParameter", "cutWWW", "dateDiff", "dateName", "dateTime64ToSnowflake", "dateTimeToSnowflake", "dateTrunc", "date_trunc", "decodeURLComponent", "decodeURLFormComponent", "decodeXMLComponent", "decrypt", "defaultProfiles", "defaultRoles", "defaultValueOfArgumentType", "defaultValueOfTypeName", "degrees", "deltaSum", "deltaSumTimestamp", "demangle", "dense_rank", "detectCharset", "detectLanguage", "detectLanguageMixed", "detectLanguageUnknown", "detectProgrammingLanguage", "detectTonality", "dictGet", "dictGetChildren", "dictGetDate", "dictGetDateOrDefault", "dictGetDateTime", "dictGetDateTimeOrDefault", "dictGetDescendants", "dictGetFloat32", "dictGetFloat32OrDefault", "dictGetFloat64", "dictGetFloat64OrDefault", "dictGetHierarchy", "dictGetInt16", "dictGetInt16OrDefault", "dictGetInt32", "dictGetInt32OrDefault", "dictGetInt64", "dictGetInt64OrDefault", "dictGetInt8", "dictGetInt8OrDefault", "dictGetOrDefault", "dictGetOrNull", "dictGetString", "dictGetStringOrDefault", "dictGetUInt16", "dictGetUInt16OrDefault", "dictGetUInt32", "dictGetUInt32OrDefault", "dictGetUInt64", "dictGetUInt64OrDefault", "dictGetUInt8", "dictGetUInt8OrDefault", "dictGetUUID", "dictGetUUIDOrDefault", "dictHas", "dictIsIn", "distanceL1", "distanceL2", "distanceL2Squared", "distanceLinf", "distanceLp", "divide", "domain", "domainWithoutWWW", "dotProduct", "dumpColumnStructure", "e", "empty", "emptyArrayDate", "emptyArrayDateTime", "emptyArrayFloat32", "emptyArrayFloat64", "emptyArrayInt16", "emptyArrayInt32", "emptyArrayInt64", "emptyArrayInt8", "emptyArrayString", "emptyArrayToSingle", "emptyArrayUInt16", "emptyArrayUInt32", "emptyArrayUInt64", "emptyArrayUInt8", "enabledProfiles", "enabledRoles", "encodeURLComponent", "encodeURLFormComponent", "encodeXMLComponent", "encrypt", "endsWith", "entropy", "equals", "erf", "erfc", "errorCodeToName", "evalMLMethod", "exp", "exp10", "exp2", "exponentialMovingAverage", "exponentialTimeDecayedAvg", "exponentialTimeDecayedCount", "exponentialTimeDecayedMax", "exponentialTimeDecayedSum", "extract", "extractAll", "extractAllGroups", "extractAllGroupsHorizontal", "extractAllGroupsVertical", "extractGroups", "extractTextFromHTML", "extractURLParameter", "extractURLParameterNames", "extractURLParameters", "farmFingerprint64", "farmHash64", "file", "filesystemAvailable", "filesystemCapacity", "filesystemFree", "finalizeAggregation", "firstSignificantSubdomain", "firstSignificantSubdomainCustom", "first_value", "flatten", "flattenTuple", "floor", "format", "formatDateTime", "formatReadableQuantity", "formatReadableSize", "formatReadableTimeDelta", "formatRow", "formatRowNoNewline", "fragment", "fromModifiedJulianDay", "fromModifiedJulianDayOrNull", "fromUnixTimestamp", "fromUnixTimestamp64Micro", "fromUnixTimestamp64Milli", "fromUnixTimestamp64Nano", "fullHostName", "fuzzBits", "gccMurmurHash", "gcd", "generateUUIDv4", "geoDistance", "geoToH3", "geoToS2", "geohashDecode", "geohashEncode", "geohashesInBox", "getMacro", "getOSKernelVersion", "getServerPort", "getSetting", "getSizeOfEnumType", "getTypeSerializationStreams", "globalIn", "globalInIgnoreSet", "globalNotIn", "globalNotInIgnoreSet", "globalNotNullIn", "globalNotNullInIgnoreSet", "globalNullIn", "globalNullInIgnoreSet", "globalVariable", "greatCircleAngle", "greatCircleDistance", "greater", "greaterOrEquals", "greatest", "groupArray", "groupArrayInsertAt", "groupArrayMovingAvg", "groupArrayMovingSum", "groupArraySample", "groupBitAnd", "groupBitOr", "groupBitXor", "groupBitmap", "groupBitmapAnd", "groupBitmapOr", "groupBitmapXor", "groupUniqArray", "h3CellAreaM2", "h3CellAreaRads2", "h3Distance", "h3EdgeAngle", "h3EdgeLengthKm", "h3EdgeLengthM", "h3ExactEdgeLengthKm", "h3ExactEdgeLengthM", "h3ExactEdgeLengthRads", "h3GetBaseCell", "h3GetDestinationIndexFromUnidirectionalEdge", "h3GetFaces", "h3GetIndexesFromUnidirectionalEdge", "h3GetOriginIndexFromUnidirectionalEdge", "h3GetPentagonIndexes", "h3GetRes0Indexes", "h3GetResolution", "h3GetUnidirectionalEdge", "h3GetUnidirectionalEdgeBoundary", "h3GetUnidirectionalEdgesFromHexagon", "h3HexAreaKm2", "h3HexAreaM2", "h3HexRing", "h3IndexesAreNeighbors", "h3IsPentagon", "h3IsResClassIII", "h3IsValid", "h3Line", "h3NumHexagons", "h3PointDistKm", "h3PointDistM", "h3PointDistRads", "h3ToCenterChild", "h3ToChildren", "h3ToGeo", "h3ToGeoBoundary", "h3ToParent", "h3ToString", "h3UnidirectionalEdgeIsValid", "h3kRing", "halfMD5", "has", "hasAll", "hasAny", "hasColumnInTable", "hasSubstr", "hasThreadFuzzer", "hasToken", "hasTokenCaseInsensitive", "hashid", "hex", "histogram", "hiveHash", "hop", "hopEnd", "hopStart", "hostName", "hostname", "hypot", "identity", "if", "ifNotFinite", "ifNull", "ignore", "ilike", "in", "inIgnoreSet", "indexHint", "indexOf", "initialQueryID", "initial_query_id", "initializeAggregation", "intDiv", "intDivOrZero", "intExp10", "intExp2", "intHash32", "intHash64", "intervalLengthSum", "isConstant", "isDecimalOverflow", "isFinite", "isIPAddressInRange", "isIPv4String", "isIPv6String", "isInfinite", "isNaN", "isNotNull", "isNull", "isNullable", "isValidJSON", "isValidUTF8", "isZeroOrNull", "javaHash", "javaHashUTF16LE", "joinGet", "joinGetOrNull", "jumpConsistentHash", "kostikConsistentHash", "kurtPop", "kurtSamp", "lagInFrame", "last_value", "lcase", "lcm", "leadInFrame", "least", "left", "leftPad", "leftPadUTF8", "leftUTF8", "lemmatize", "length", "lengthUTF8", "less", "lessOrEquals", "lgamma", "like", "ln", "locate", "log", "log10", "log1p", "log2", "logTrace", "lowCardinalityIndices", "lowCardinalityKeys", "lower", "lowerUTF8", "lpad", "makeDate", "makeDate32", "makeDateTime", "makeDateTime64", "mannWhitneyUTest", "map", "mapAdd", "mapApply", "mapContains", "mapContainsKeyLike", "mapExtractKeyLike", "mapFilter", "mapKeys", "mapPopulateSeries", "mapSubtract", "mapUpdate", "mapValues", "match", "materialize", "max", "max2", "maxIntersections", "maxIntersectionsPosition", "maxMappedArrays", "meanZTest", "median", "medianBFloat16", "medianBFloat16Weighted", "medianDeterministic", "medianExact", "medianExactHigh", "medianExactLow", "medianExactWeighted", "medianTDigest", "medianTDigestWeighted", "medianTiming", "medianTimingWeighted", "meiliMatch", "metroHash64", "mid", "min", "min2", "minMappedArrays", "minSampleSizeContinous", "minSampleSizeConversion", "minus", "mod", "modelEvaluate", "modulo", "moduloLegacy", "moduloOrZero", "monthName", "multiFuzzyMatchAllIndices", "multiFuzzyMatchAny", "multiFuzzyMatchAnyIndex", "multiIf", "multiMatchAllIndices", "multiMatchAny", "multiMatchAnyIndex", "multiSearchAllPositions", "multiSearchAllPositionsCaseInsensitive", "multiSearchAllPositionsCaseInsensitiveUTF8", "multiSearchAllPositionsUTF8", "multiSearchAny", "multiSearchAnyCaseInsensitive", "multiSearchAnyCaseInsensitiveUTF8", "multiSearchAnyUTF8", "multiSearchFirstIndex", "multiSearchFirstIndexCaseInsensitive", "multiSearchFirstIndexCaseInsensitiveUTF8", "multiSearchFirstIndexUTF8", "multiSearchFirstPosition", "multiSearchFirstPositionCaseInsensitive", "multiSearchFirstPositionCaseInsensitiveUTF8", "multiSearchFirstPositionUTF8", "multiply", "murmurHash2_32", "murmurHash2_64", "murmurHash3_128", "murmurHash3_32", "murmurHash3_64", "negate", "neighbor", "netloc", "ngramDistance", "ngramDistanceCaseInsensitive", "ngramDistanceCaseInsensitiveUTF8", "ngramDistanceUTF8", "ngramMinHash", "ngramMinHashArg", "ngramMinHashArgCaseInsensitive", "ngramMinHashArgCaseInsensitiveUTF8", "ngramMinHashArgUTF8", "ngramMinHashCaseInsensitive", "ngramMinHashCaseInsensitiveUTF8", "ngramMinHashUTF8", "ngramSearch", "ngramSearchCaseInsensitive", "ngramSearchCaseInsensitiveUTF8", "ngramSearchUTF8", "ngramSimHash", "ngramSimHashCaseInsensitive", "ngramSimHashCaseInsensitiveUTF8", "ngramSimHashUTF8", "ngrams", "nonNegativeDerivative", "normL1", "normL2", "normL2Squared", "normLinf", "normLp", "normalizeL1", "normalizeL2", "normalizeLinf", "normalizeLp", "normalizeQuery", "normalizeQueryKeepNames", "normalizeUTF8NFC", "normalizeUTF8NFD", "normalizeUTF8NFKC", "normalizeUTF8NFKD", "normalizedQueryHash", "normalizedQueryHashKeepNames", "not", "notEmpty", "notEquals", "notILike", "notIn", "notInIgnoreSet", "notLike", "notNullIn", "notNullInIgnoreSet", "nothing", "now", "now64", "nth_value", "nullIf", "nullIn", "nullInIgnoreSet", "or", "parseDateTime32BestEffort", "parseDateTime32BestEffortOrNull", "parseDateTime32BestEffortOrZero", "parseDateTime64BestEffort", "parseDateTime64BestEffortOrNull", "parseDateTime64BestEffortOrZero", "parseDateTimeBestEffort", "parseDateTimeBestEffortOrNull", "parseDateTimeBestEffortOrZero", "parseDateTimeBestEffortUS", "parseDateTimeBestEffortUSOrNull", "parseDateTimeBestEffortUSOrZero", "parseTimeDelta", "partitionId", "path", "pathFull", "pi", "plus", "pointInEllipses", "pointInPolygon", "polygonAreaCartesian", "polygonAreaSpherical", "polygonConvexHullCartesian", "polygonPerimeterCartesian", "polygonPerimeterSpherical", "polygonsDistanceCartesian", "polygonsDistanceSpherical", "polygonsEqualsCartesian", "polygonsIntersectionCartesian", "polygonsIntersectionSpherical", "polygonsSymDifferenceCartesian", "polygonsSymDifferenceSpherical", "polygonsUnionCartesian", "polygonsUnionSpherical", "polygonsWithinCartesian", "polygonsWithinSpherical", "port", "position", "positionCaseInsensitive", "positionCaseInsensitiveUTF8", "positionUTF8", "pow", "power", "proportionsZTest", "protocol", "quantile", "quantileBFloat16", "quantileBFloat16Weighted", "quantileDeterministic", "quantileExact", "quantileExactExclusive", "quantileExactHigh", "quantileExactInclusive", "quantileExactLow", "quantileExactWeighted", "quantileTDigest", "quantileTDigestWeighted", "quantileTiming", "quantileTimingWeighted", "quantiles", "quantilesBFloat16", "quantilesBFloat16Weighted", "quantilesDeterministic", "quantilesExact", "quantilesExactExclusive", "quantilesExactHigh", "quantilesExactInclusive", "quantilesExactLow", "quantilesExactWeighted", "quantilesTDigest", "quantilesTDigestWeighted", "quantilesTiming", "quantilesTimingWeighted", "queryID", "queryString", "queryStringAndFragment", "query_id", "radians", "rand", "rand32", "rand64", "randConstant", "randomFixedString", "randomPrintableASCII", "randomString", "randomStringUTF8", "range", "rank", "rankCorr", "readWKTMultiPolygon", "readWKTPoint", "readWKTPolygon", "readWKTRing", "regexpQuoteMeta", "regionHierarchy", "regionIn", "regionToArea", "regionToCity", "regionToContinent", "regionToCountry", "regionToDistrict", "regionToName", "regionToPopulation", "regionToTopContinent", "reinterpret", "reinterpretAsDate", "reinterpretAsDateTime", "reinterpretAsFixedString", "reinterpretAsFloat32", "reinterpretAsFloat64", "reinterpretAsInt128", "reinterpretAsInt16", "reinterpretAsInt256", "reinterpretAsInt32", "reinterpretAsInt64", "reinterpretAsInt8", "reinterpretAsString", "reinterpretAsUInt128", "reinterpretAsUInt16", "reinterpretAsUInt256", "reinterpretAsUInt32", "reinterpretAsUInt64", "reinterpretAsUInt8", "reinterpretAsUUID", "repeat", "replace", "replaceAll", "replaceOne", "replaceRegexpAll", "replaceRegexpOne", "replicate", "retention", "reverse", "reverseUTF8", "revision", "right", "rightPad", "rightPadUTF8", "rightUTF8", "round", "roundAge", "roundBankers", "roundDown", "roundDuration", "roundToExp2", "rowNumberInAllBlocks", "rowNumberInBlock", "row_number", "rpad", "runningAccumulate", "runningConcurrency", "runningDifference", "runningDifferenceStartingWithFirstValue", "s2CapContains", "s2CapUnion", "s2CellsIntersect", "s2GetNeighbors", "s2RectAdd", "s2RectContains", "s2RectIntersection", "s2RectUnion", "s2ToGeo", "scalarProduct", "sequenceCount", "sequenceMatch", "sequenceNextNode", "serverUUID", "shardCount", "shardNum", "showCertificate", "sigmoid", "sign", "simpleJSONExtractBool", "simpleJSONExtractFloat", "simpleJSONExtractInt", "simpleJSONExtractRaw", "simpleJSONExtractString", "simpleJSONExtractUInt", "simpleJSONHas", "simpleLinearRegression", "sin", "singleValueOrNull", "sinh", "sipHash128", "sipHash64", "skewPop", "skewSamp", "sleep", "sleepEachRow", "snowflakeToDateTime", "snowflakeToDateTime64", "sparkbar", "splitByChar", "splitByNonAlpha", "splitByRegexp", "splitByString", "splitByWhitespace", "sqrt", "startsWith", "stddevPop", "stddevPopStable", "stddevSamp", "stddevSampStable", "stem", "stochasticLinearRegression", "stochasticLogisticRegression", "stringToH3", "studentTTest", "subBitmap", "substr", "substring", "substringUTF8", "subtractDays", "subtractHours", "subtractMicroseconds", "subtractMilliseconds", "subtractMinutes", "subtractMonths", "subtractNanoseconds", "subtractQuarters", "subtractSeconds", "subtractWeeks", "subtractYears", "sum", "sumCount", "sumKahan", "sumMapFiltered", "sumMapFilteredWithOverflow", "sumMapWithOverflow", "sumMappedArrays", "sumWithOverflow", "svg", "synonyms", "tan", "tanh", "tcpPort", "tgamma", "theilsU", "throwIf", "tid", "timeSlot", "timeSlots", "timeZone", "timeZoneOf", "timeZoneOffset", "timezone", "timezoneOf", "timezoneOffset", "toBool", "toColumnTypeName", "toDate", "toDate32", "toDate32OrDefault", "toDate32OrNull", "toDate32OrZero", "toDateOrDefault", "toDateOrNull", "toDateOrZero", "toDateTime", "toDateTime32", "toDateTime64", "toDateTime64OrDefault", "toDateTime64OrNull", "toDateTime64OrZero", "toDateTimeOrDefault", "toDateTimeOrNull", "toDateTimeOrZero", "toDayOfMonth", "toDayOfWeek", "toDayOfYear", "toDecimal128", "toDecimal128OrDefault", "toDecimal128OrNull", "toDecimal128OrZero", "toDecimal256", "toDecimal256OrDefault", "toDecimal256OrNull", "toDecimal256OrZero", "toDecimal32", "toDecimal32OrDefault", "toDecimal32OrNull", "toDecimal32OrZero", "toDecimal64", "toDecimal64OrDefault", "toDecimal64OrNull", "toDecimal64OrZero", "toFixedString", "toFloat32", "toFloat32OrDefault", "toFloat32OrNull", "toFloat32OrZero", "toFloat64", "toFloat64OrDefault", "toFloat64OrNull", "toFloat64OrZero", "toHour", "toIPv4", "toIPv4OrDefault", "toIPv4OrNull", "toIPv6", "toIPv6OrDefault", "toIPv6OrNull", "toISOWeek", "toISOYear", "toInt128", "toInt128OrDefault", "toInt128OrNull", "toInt128OrZero", "toInt16", "toInt16OrDefault", "toInt16OrNull", "toInt16OrZero", "toInt256", "toInt256OrDefault", "toInt256OrNull", "toInt256OrZero", "toInt32", "toInt32OrDefault", "toInt32OrNull", "toInt32OrZero", "toInt64", "toInt64OrDefault", "toInt64OrNull", "toInt64OrZero", "toInt8", "toInt8OrDefault", "toInt8OrNull", "toInt8OrZero", "toIntervalDay", "toIntervalHour", "toIntervalMicrosecond", "toIntervalMillisecond", "toIntervalMinute", "toIntervalMonth", "toIntervalNanosecond", "toIntervalQuarter", "toIntervalSecond", "toIntervalWeek", "toIntervalYear", "toJSONString", "toLastDayOfMonth", "toLowCardinality", "toMinute", "toModifiedJulianDay", "toModifiedJulianDayOrNull", "toMonday", "toMonth", "toNullable", "toQuarter", "toRelativeDayNum", "toRelativeHourNum", "toRelativeMinuteNum", "toRelativeMonthNum", "toRelativeQuarterNum", "toRelativeSecondNum", "toRelativeWeekNum", "toRelativeYearNum", "toSecond", "toStartOfDay", "toStartOfFifteenMinutes", "toStartOfFiveMinute", "toStartOfFiveMinutes", "toStartOfHour", "toStartOfISOYear", "toStartOfInterval", "toStartOfMicrosecond", "toStartOfMillisecond", "toStartOfMinute", "toStartOfMonth", "toStartOfNanosecond", "toStartOfQuarter", "toStartOfSecond", "toStartOfTenMinutes", "toStartOfWeek", "toStartOfYear", "toString", "toStringCutToZero", "toTime", "toTimeZone", "toTimezone", "toTypeName", "toUInt128", "toUInt128OrNull", "toUInt128OrZero", "toUInt16", "toUInt16OrDefault", "toUInt16OrNull", "toUInt16OrZero", "toUInt256", "toUInt256OrDefault", "toUInt256OrNull", "toUInt256OrZero", "toUInt32", "toUInt32OrDefault", "toUInt32OrNull", "toUInt32OrZero", "toUInt64", "toUInt64OrDefault", "toUInt64OrNull", "toUInt64OrZero", "toUInt8", "toUInt8OrDefault", "toUInt8OrNull", "toUInt8OrZero", "toUUID", "toUUIDOrDefault", "toUUIDOrNull", "toUUIDOrZero", "toUnixTimestamp", "toUnixTimestamp64Micro", "toUnixTimestamp64Milli", "toUnixTimestamp64Nano", "toValidUTF8", "toWeek", "toYYYYMM", "toYYYYMMDD", "toYYYYMMDDhhmmss", "toYear", "toYearWeek", "today", "tokens", "topK", "topKWeighted", "topLevelDomain", "transactionID", "transactionLatestSnapshot", "transactionOldestSnapshot", "transform", "translate", "translateUTF8", "trimBoth", "trimLeft", "trimRight", "trunc", "truncate", "tryBase64Decode", "tumble", "tumbleEnd", "tumbleStart", "tuple", "tupleDivide", "tupleDivideByNumber", "tupleElement", "tupleHammingDistance", "tupleMinus", "tupleMultiply", "tupleMultiplyByNumber", "tupleNegate", "tuplePlus", "tupleToNameValuePairs", "ucase", "unbin", "unhex", "uniq", "uniqCombined", "uniqCombined64", "uniqExact", "uniqHLL12", "uniqTheta", "uniqUpTo", "upper", "upperUTF8", "uptime", "user", "validateNestedArraySizes", "varPop", "varPopStable", "varSamp", "varSampStable", "vectorDifference", "vectorSum", "version", "visibleWidth", "visitParamExtractBool", "visitParamExtractFloat", "visitParamExtractInt", "visitParamExtractRaw", "visitParamExtractString", "visitParamExtractUInt", "visitParamHas", "week", "welchTTest", "windowFunnel", "windowID", "wkt", "wordShingleMinHash", "wordShingleMinHashArg", "wordShingleMinHashArgCaseInsensitive", "wordShingleMinHashArgCaseInsensitiveUTF8", "wordShingleMinHashArgUTF8", "wordShingleMinHashCaseInsensitive", "wordShingleMinHashCaseInsensitiveUTF8", "wordShingleMinHashUTF8", "wordShingleSimHash", "wordShingleSimHashCaseInsensitive", "wordShingleSimHashCaseInsensitiveUTF8", "wordShingleSimHashUTF8", "wyHash64", "xor", "xxHash32", "xxHash64", "yandexConsistentHash", "yearweek", "yesterday", "zookeeperSessionUptime", } } func colNames() []string { return []string{ "AdvEngineID", "Age", "BrowserCountry", "BrowserLanguage", "CLID", "ClientEventTime", "ClientIP", "ClientIP", "ClientIP6", "ClientTimeZone", "CodeVersion", "ConnectTiming", "CookieEnable", "CounterClass", "CounterID", "DNSTiming", "DOMCompleteTiming", "DOMContentLoadedTiming", "DOMInteractiveTiming", "DontCountHits", "EventDate", "EventTime", "FUniqID", "FetchTiming", "FirstPaintTiming", "FlashMajor", "FlashMinor", "FlashMinor2", "FromTag", "GeneralInterests", "GoalsReached", "GoodEvent", "HID", "HTTPError", "HasGCLID", "HistoryLength", "HitColor", "IPNetworkID", "Income", "Interests", "IsArtifical", "IsDownload", "IsEvent", "IsLink", "IsMobile", "IsNotBounce", "IsOldCounter", "IsParameter", "IsRobot", "IslandID", "JavaEnable", "JavascriptEnable", "LoadEventEndTiming", "LoadEventStartTiming", "MobilePhone", "MobilePhoneModel", "NSToDOMContentLoadedTiming", "NetMajor", "NetMinor", "OS", "OpenerName", "OpenstatAdID", "OpenstatCampaignID", "OpenstatServiceName", "OpenstatSourceID", "PageCharset", "ParamCurrency", "ParamCurrencyID", "ParamOrderID", "ParamPrice", "Params", "ParsedParams.Key1", "ParsedParams.Key2", "ParsedParams.Key3", "ParsedParams.Key4", "ParsedParams.Key5", "ParsedParams.ValueDouble", "RedirectCount", "RedirectTiming", "Referer", "RefererCategories", "RefererDomain", "RefererHash", "RefererRegions", "Refresh", "RegionID", "RemoteIP", "RemoteIP6", "RequestNum", "RequestTry", "ResolutionDepth", "ResolutionHeight", "ResolutionWidth", "ResponseEndTiming", "ResponseStartTiming", "Robotness", "SearchEngineID", "SearchPhrase", "SendTiming", "Sex", "ShareService", "ShareTitle", "ShareURL", "SilverlightVersion1", "SilverlightVersion2", "SilverlightVersion3", "SilverlightVersion4", "SocialAction", "SocialNetwork", "SocialSourceNetworkID", "SocialSourcePage", "Title", "TraficSourceID", "URL", "URLCategories", "URLDomain", "URLHash", "URLRegions", "UTCEventTime", "UTMCampaign", "UTMContent", "UTMMedium", "UTMSource", "UTMTerm", "UserAgent", "UserAgentMajor", "UserAgentMinor", "UserID", "WatchID", "WindowClientHeight", "WindowClientWidth", "WindowName", "WithHash", "YCLID", } } usql-0.19.19/drivers/clickhouse/reader.go000066400000000000000000000117751476173253300203050ustar00rootroot00000000000000package clickhouse import ( "database/sql" "strings" "github.com/xo/usql/drivers" "github.com/xo/usql/drivers/metadata" ) type MetadataReader struct { metadata.LoggingReader } // NewMetadataReader creates the metadata reader for clickhouse databases. func NewMetadataReader(db drivers.DB, opts ...metadata.ReaderOption) metadata.Reader { return &MetadataReader{ LoggingReader: metadata.NewLoggingReader(db, opts...), } } func (r MetadataReader) Tables(f metadata.Filter) (*metadata.TableSet, error) { qstr := `SELECT database AS Schema, name AS Name, COALESCE( IF(database LIKE 'system', 'SYSTEM TABLE', null), IF(is_temporary,'LOCAL TEMPORARY', null), IF(engine LIKE 'View', 'VIEW', null), 'TABLE' ) AS Type, COALESCE(total_bytes, 0) AS Size, comment as Comment FROM system.tables` var conds []string var vals []interface{} if !f.WithSystem { conds = append(conds, "database NOT LIKE 'system'") } if f.Schema != "" { vals = append(vals, f.Schema) conds = append(conds, "database LIKE ?") } if f.Name != "" { vals = append(vals, f.Name) conds = append(conds, "name LIKE ?") } if len(f.Types) != 0 { var pholders []string for _, t := range f.Types { vals = append(vals, t) pholders = append(pholders, "?") } if len(pholders) != 0 { conds = append(conds, "Type IN ("+strings.Join(pholders, ", ")+")") } } rows, closeRows, err := r.query(qstr, conds, "Schema, Name", vals...) if err != nil { return nil, err } defer closeRows() var results []metadata.Table for rows.Next() { var rec metadata.Table if err := rows.Scan(&rec.Schema, &rec.Name, &rec.Type, &rec.Size, &rec.Comment); err != nil { return nil, err } results = append(results, rec) } if rows.Err() != nil { return nil, rows.Err() } return metadata.NewTableSet(results), nil } func (r MetadataReader) Columns(f metadata.Filter) (*metadata.ColumnSet, error) { qstr := `SELECT position, database as schema, name, type, COALESCE(default_expression, '') FROM system.columns` vals := []interface{}{f.Parent} conds := []string{"table LIKE ?"} if f.Schema != "" { vals = append(vals, f.Schema) conds = append(conds, "database LIKE ?") } if f.Name != "" { vals = append(vals, f.Name) conds = append(conds, "name LIKE ?") } if len(f.Types) != 0 { var pholders []string for _, t := range f.Types { vals = append(vals, t) pholders = append(pholders, "?") } if len(pholders) != 0 { conds = append(conds, "Type IN ("+strings.Join(pholders, ", ")+")") } } rows, closeRows, err := r.query(qstr, conds, "name", vals...) if err != nil { return nil, err } defer closeRows() var results []metadata.Column for rows.Next() { rec := metadata.Column{ Catalog: f.Catalog, Table: f.Parent, } if err := rows.Scan( &rec.OrdinalPosition, &rec.Schema, &rec.Name, &rec.DataType, &rec.Default, ); err != nil { return nil, err } results = append(results, rec) } if rows.Err() != nil { return nil, rows.Err() } return metadata.NewColumnSet(results), nil } func (r MetadataReader) Schemas(f metadata.Filter) (*metadata.SchemaSet, error) { qstr := `SELECT name FROM system.databases` var conds []string var vals []interface{} if f.Name != "" { vals = append(vals, f.Name) conds = append(conds, "name LIKE ?") } rows, closeRows, err := r.query(qstr, conds, "name", vals...) if err != nil { return nil, err } defer closeRows() var results []metadata.Schema for rows.Next() { var rec metadata.Schema if err := rows.Scan(&rec.Schema); err != nil { return nil, err } results = append(results, rec) } if rows.Err() != nil { return nil, rows.Err() } return metadata.NewSchemaSet(results), nil } func (r MetadataReader) Functions(f metadata.Filter) (*metadata.FunctionSet, error) { qstr := `SELECT name AS specific_name, name AS routine_name, (IF(is_aggregate = 1,'AGGREGATE','FUNCTION')) AS type FROM system.functions` var conds []string var vals []interface{} if f.Name != "" { conds = append(conds, "name LIKE ?") vals = append(vals, f.Name) } if len(f.Types) != 0 { var pholders []string for _, t := range f.Types { vals = append(vals, t) pholders = append(pholders, "?") } if len(pholders) != 0 { conds = append(conds, "type IN ("+strings.Join(pholders, ", ")+")") } } rows, closeRows, err := r.query(qstr, conds, "name, type", vals...) if err != nil { return nil, err } defer closeRows() var results []metadata.Function for rows.Next() { var rec metadata.Function if err := rows.Scan( &rec.SpecificName, &rec.Name, &rec.Type, ); err != nil { return nil, err } results = append(results, rec) } if rows.Err() != nil { return nil, rows.Err() } return metadata.NewFunctionSet(results), nil } func (r MetadataReader) query(qstr string, conds []string, order string, vals ...interface{}) (*sql.Rows, func(), error) { if len(conds) != 0 { qstr += "\nWHERE " + strings.Join(conds, " AND ") } if order != "" { qstr += "\nORDER BY " + order } return r.Query(qstr, vals...) } usql-0.19.19/drivers/clickhouse/testdata/000077500000000000000000000000001476173253300203125ustar00rootroot00000000000000usql-0.19.19/drivers/clickhouse/testdata/clickhouse.sql000066400000000000000000000222141476173253300231650ustar00rootroot00000000000000-- https://clickhouse.tech/docs/en/getting-started/tutorial/#create-tables CREATE DATABASE tutorial; CREATE TABLE tutorial.hits_v1 ( `WatchID` UInt64, `JavaEnable` UInt8, `Title` String, `GoodEvent` Int16, `EventTime` DateTime, `EventDate` Date, `CounterID` UInt32, `ClientIP` UInt32, `ClientIP6` FixedString(16), `RegionID` UInt32, `UserID` UInt64, `CounterClass` Int8, `OS` UInt8, `UserAgent` UInt8, `URL` String, `Referer` String, `URLDomain` String, `RefererDomain` String, `Refresh` UInt8, `IsRobot` UInt8, `RefererCategories` Array(UInt16), `URLCategories` Array(UInt16), `URLRegions` Array(UInt32), `RefererRegions` Array(UInt32), `ResolutionWidth` UInt16, `ResolutionHeight` UInt16, `ResolutionDepth` UInt8, `FlashMajor` UInt8, `FlashMinor` UInt8, `FlashMinor2` String, `NetMajor` UInt8, `NetMinor` UInt8, `UserAgentMajor` UInt16, `UserAgentMinor` FixedString(2), `CookieEnable` UInt8, `JavascriptEnable` UInt8, `IsMobile` UInt8, `MobilePhone` UInt8, `MobilePhoneModel` String, `Params` String, `IPNetworkID` UInt32, `TraficSourceID` Int8, `SearchEngineID` UInt16, `SearchPhrase` String, `AdvEngineID` UInt8, `IsArtifical` UInt8, `WindowClientWidth` UInt16, `WindowClientHeight` UInt16, `ClientTimeZone` Int16, `ClientEventTime` DateTime, `SilverlightVersion1` UInt8, `SilverlightVersion2` UInt8, `SilverlightVersion3` UInt32, `SilverlightVersion4` UInt16, `PageCharset` String, `CodeVersion` UInt32, `IsLink` UInt8, `IsDownload` UInt8, `IsNotBounce` UInt8, `FUniqID` UInt64, `HID` UInt32, `IsOldCounter` UInt8, `IsEvent` UInt8, `IsParameter` UInt8, `DontCountHits` UInt8, `WithHash` UInt8, `HitColor` FixedString(1), `UTCEventTime` DateTime, `Age` UInt8, `Sex` UInt8, `Income` UInt8, `Interests` UInt16, `Robotness` UInt8, `GeneralInterests` Array(UInt16), `RemoteIP` UInt32, `RemoteIP6` FixedString(16), `WindowName` Int32, `OpenerName` Int32, `HistoryLength` Int16, `BrowserLanguage` FixedString(2), `BrowserCountry` FixedString(2), `SocialNetwork` String, `SocialAction` String, `HTTPError` UInt16, `SendTiming` Int32, `DNSTiming` Int32, `ConnectTiming` Int32, `ResponseStartTiming` Int32, `ResponseEndTiming` Int32, `FetchTiming` Int32, `RedirectTiming` Int32, `DOMInteractiveTiming` Int32, `DOMContentLoadedTiming` Int32, `DOMCompleteTiming` Int32, `LoadEventStartTiming` Int32, `LoadEventEndTiming` Int32, `NSToDOMContentLoadedTiming` Int32, `FirstPaintTiming` Int32, `RedirectCount` Int8, `SocialSourceNetworkID` UInt8, `SocialSourcePage` String, `ParamPrice` Int64, `ParamOrderID` String, `ParamCurrency` FixedString(3), `ParamCurrencyID` UInt16, `GoalsReached` Array(UInt32), `OpenstatServiceName` String, `OpenstatCampaignID` String, `OpenstatAdID` String, `OpenstatSourceID` String, `UTMSource` String, `UTMMedium` String, `UTMCampaign` String, `UTMContent` String, `UTMTerm` String, `FromTag` String, `HasGCLID` UInt8, `RefererHash` UInt64, `URLHash` UInt64, `CLID` UInt32, `YCLID` UInt64, `ShareService` String, `ShareURL` String, `ShareTitle` String, `ParsedParams` Nested( Key1 String, Key2 String, Key3 String, Key4 String, Key5 String, ValueDouble Float64), `IslandID` FixedString(16), `RequestNum` UInt32, `RequestTry` UInt8 ) ENGINE = MergeTree() PARTITION BY toYYYYMM(EventDate) ORDER BY (CounterID, EventDate, intHash32(UserID)) SAMPLE BY intHash32(UserID); CREATE TABLE tutorial.visits_v1 ( `CounterID` UInt32, `StartDate` Date, `Sign` Int8, `IsNew` UInt8, `VisitID` UInt64, `UserID` UInt64, `StartTime` DateTime, `Duration` UInt32, `UTCStartTime` DateTime, `PageViews` Int32, `Hits` Int32, `IsBounce` UInt8, `Referer` String, `StartURL` String, `RefererDomain` String, `StartURLDomain` String, `EndURL` String, `LinkURL` String, `IsDownload` UInt8, `TraficSourceID` Int8, `SearchEngineID` UInt16, `SearchPhrase` String, `AdvEngineID` UInt8, `PlaceID` Int32, `RefererCategories` Array(UInt16), `URLCategories` Array(UInt16), `URLRegions` Array(UInt32), `RefererRegions` Array(UInt32), `IsYandex` UInt8, `GoalReachesDepth` Int32, `GoalReachesURL` Int32, `GoalReachesAny` Int32, `SocialSourceNetworkID` UInt8, `SocialSourcePage` String, `MobilePhoneModel` String, `ClientEventTime` DateTime, `RegionID` UInt32, `ClientIP` UInt32, `ClientIP6` FixedString(16), `RemoteIP` UInt32, `RemoteIP6` FixedString(16), `IPNetworkID` UInt32, `SilverlightVersion3` UInt32, `CodeVersion` UInt32, `ResolutionWidth` UInt16, `ResolutionHeight` UInt16, `UserAgentMajor` UInt16, `UserAgentMinor` UInt16, `WindowClientWidth` UInt16, `WindowClientHeight` UInt16, `SilverlightVersion2` UInt8, `SilverlightVersion4` UInt16, `FlashVersion3` UInt16, `FlashVersion4` UInt16, `ClientTimeZone` Int16, `OS` UInt8, `UserAgent` UInt8, `ResolutionDepth` UInt8, `FlashMajor` UInt8, `FlashMinor` UInt8, `NetMajor` UInt8, `NetMinor` UInt8, `MobilePhone` UInt8, `SilverlightVersion1` UInt8, `Age` UInt8, `Sex` UInt8, `Income` UInt8, `JavaEnable` UInt8, `CookieEnable` UInt8, `JavascriptEnable` UInt8, `IsMobile` UInt8, `BrowserLanguage` UInt16, `BrowserCountry` UInt16, `Interests` UInt16, `Robotness` UInt8, `GeneralInterests` Array(UInt16), `Params` Array(String), `Goals` Nested( ID UInt32, Serial UInt32, EventTime DateTime, Price Int64, OrderID String, CurrencyID UInt32), `WatchIDs` Array(UInt64), `ParamSumPrice` Int64, `ParamCurrency` FixedString(3), `ParamCurrencyID` UInt16, `ClickLogID` UInt64, `ClickEventID` Int32, `ClickGoodEvent` Int32, `ClickEventTime` DateTime, `ClickPriorityID` Int32, `ClickPhraseID` Int32, `ClickPageID` Int32, `ClickPlaceID` Int32, `ClickTypeID` Int32, `ClickResourceID` Int32, `ClickCost` UInt32, `ClickClientIP` UInt32, `ClickDomainID` UInt32, `ClickURL` String, `ClickAttempt` UInt8, `ClickOrderID` UInt32, `ClickBannerID` UInt32, `ClickMarketCategoryID` UInt32, `ClickMarketPP` UInt32, `ClickMarketCategoryName` String, `ClickMarketPPName` String, `ClickAWAPSCampaignName` String, `ClickPageName` String, `ClickTargetType` UInt16, `ClickTargetPhraseID` UInt64, `ClickContextType` UInt8, `ClickSelectType` Int8, `ClickOptions` String, `ClickGroupBannerID` Int32, `OpenstatServiceName` String, `OpenstatCampaignID` String, `OpenstatAdID` String, `OpenstatSourceID` String, `UTMSource` String, `UTMMedium` String, `UTMCampaign` String, `UTMContent` String, `UTMTerm` String, `FromTag` String, `HasGCLID` UInt8, `FirstVisit` DateTime, `PredLastVisit` Date, `LastVisit` Date, `TotalVisits` UInt32, `TraficSource` Nested( ID Int8, SearchEngineID UInt16, AdvEngineID UInt8, PlaceID UInt16, SocialSourceNetworkID UInt8, Domain String, SearchPhrase String, SocialSourcePage String), `Attendance` FixedString(16), `CLID` UInt32, `YCLID` UInt64, `NormalizedRefererHash` UInt64, `SearchPhraseHash` UInt64, `RefererDomainHash` UInt64, `NormalizedStartURLHash` UInt64, `StartURLDomainHash` UInt64, `NormalizedEndURLHash` UInt64, `TopLevelDomain` UInt64, `URLScheme` UInt64, `OpenstatServiceNameHash` UInt64, `OpenstatCampaignIDHash` UInt64, `OpenstatAdIDHash` UInt64, `OpenstatSourceIDHash` UInt64, `UTMSourceHash` UInt64, `UTMMediumHash` UInt64, `UTMCampaignHash` UInt64, `UTMContentHash` UInt64, `UTMTermHash` UInt64, `FromHash` UInt64, `WebVisorEnabled` UInt8, `WebVisorActivity` UInt32, `ParsedParams` Nested( Key1 String, Key2 String, Key3 String, Key4 String, Key5 String, ValueDouble Float64), `Market` Nested( Type UInt8, GoalID UInt32, OrderID String, OrderPrice Int64, PP UInt32, DirectPlaceID UInt32, DirectOrderID UInt32, DirectBannerID UInt32, GoodID String, GoodName String, GoodQuantity Int32, GoodPrice Int64), `IslandID` FixedString(16) ) ENGINE = CollapsingMergeTree(Sign) PARTITION BY toYYYYMM(StartDate) ORDER BY (CounterID, StartDate, intHash32(UserID), VisitID) SAMPLE BY intHash32(UserID); CREATE DATABASE tutorial_unexpected; CREATE TABLE tutorial_unexpected.hits_v1 ( `Unexpected` String ) ENGINE = MergeTree() ORDER BY (Unexpected); CREATE DATABASE copy_test; CREATE TABLE copy_test.dest ( StringCol String, NumCol UInt32 ) ENGINE = MergeTree() ORDER BY (StringCol);usql-0.19.19/drivers/completer/000077500000000000000000000000001476173253300163425ustar00rootroot00000000000000usql-0.19.19/drivers/completer/completer.go000066400000000000000000000664111476173253300206730ustar00rootroot00000000000000// completer package provides a generic SQL command line completer package completer import ( "fmt" "log" "os" "path/filepath" "sort" "strings" "unicode" "github.com/gohxs/readline" "github.com/xo/usql/drivers/metadata" "github.com/xo/usql/env" "github.com/xo/usql/text" ) const ( WORD_BREAKS = "\t\n$><=;|&{() " ) type caseType bool var ( IGNORE_CASE = caseType(true) MATCH_CASE = caseType(false) CommonSqlStartCommands = []string{ "ABORT", "ALTER", "ANALYZE", "BEGIN", "CALL", "CHECKPOINT", "CLOSE", "CLUSTER", "COMMENT", "COMMIT", "COPY", "CREATE", "DEALLOCATE", "DECLARE", "DELETE FROM", "DESC", "DESCRIBE", "DISCARD", "DO", "DROP", "END", "EXEC", "EXECUTE", "EXPLAIN", "FETCH", "GRANT", "IMPORT", "INSERT", "LIST", "LISTEN", "LOAD", "LOCK", "MOVE", "NOTIFY", "PRAGMA", "PREPARE", "REASSIGN", "REFRESH MATERIALIZED VIEW", "REINDEX", "RELEASE", "RESET", "REVOKE", "ROLLBACK", "SAVEPOINT", "SECURITY LABEL", "SELECT", "SET", "SHOW", "START", "TABLE", "TRUNCATE", "UNLISTEN", "UPDATE", "VACUUM", "VALUES", "WITH", } CommonSqlCommands = []string{ "AND", "CASE", "CROSS JOIN", "ELSE", "END", "FETCH", "FROM", "FULL OUTER JOIN", "GROUP BY", "HAVING", "IN", "INNER JOIN", "IS NOT NULL", "IS NULL", "JOIN", "LEFT JOIN", "LIMIT", "NOT", "ON", "OR", "ORDER BY", "THEN", "WHEN", "WHERE", } ) func NewDefaultCompleter(opts ...Option) readline.AutoCompleter { c := completer{ // an empty struct satisfies the metadata.Reader interface, because it is actually empty reader: struct{}{}, logger: log.New(os.Stdout, "ERROR: ", log.LstdFlags), sqlStartCommands: CommonSqlStartCommands, // TODO do we need to add built-in functions like, COALESCE, CAST, NULLIF, CONCAT etc? sqlCommands: CommonSqlCommands, backslashCommands: []string{ `\!`, `\?`, `\C`, `\H`, `\T`, `\Z`, `\a`, `\begin`, `\bind`, `\c`, `\cd`, `\commit`, `\connect`, `\conninfo`, `\copy`, `\copyright`, `\cset`, `\d+`, `\dS+`, `\dS`, `\da+`, `\daS+`, `\daS`, `\da`, `\df+`, `\dfS+`, `\dfS`, `\df`, `\di+`, `\diS+`, `\diS`, `\di`, `\dm+`, `\dmS+`, `\dmS`, `\dm`, `\dn+`, `\dnS+`, `\dnS`, `\dn`, `\drivers`, `\ds+`, `\dsS+`, `\dsS`, `\ds`, `\dt+`, `\dtS+`, `\dtS`, `\dt`, `\dv+`, `\dvS+`, `\dvS`, `\dv`, `\e`, `\echo`, `\f`, `\g`, `\getenv`, `\gexec`, `\gset`, `\gx`, `\i`, `\ir`, `\l+`, `\l`, `\p`, `\password`, `\prompt`, `\pset`, `\q`, `\r`, `\raw`, `\rollback`, `\set`, `\setenv`, `\t`, `\timing`, `\unset`, `\w`, `\watch`, `\x`, }, } for _, o := range opts { o(&c) } return c } // Option to configure the reader type Option func(*completer) // WithDB option func WithDB(db metadata.DB) Option { return func(c *completer) { c.db = db } } // WithReader option func WithReader(r metadata.Reader) Option { return func(c *completer) { c.reader = r } } // WithLogger option func WithLogger(l logger) Option { return func(c *completer) { c.logger = l } } // WithSQLStartCommands that can begin a query func WithSQLStartCommands(commands []string) Option { return func(c *completer) { c.sqlStartCommands = commands } } // WithSQLCommands that can be any part of a query func WithSQLCommands(commands []string) Option { return func(c *completer) { c.sqlCommands = commands } } // WithConnStrings option func WithConnStrings(connStrings []string) Option { return func(c *completer) { c.connStrings = connStrings } } // WithBeforeComplete option func WithBeforeComplete(f CompleteFunc) Option { return func(c *completer) { c.beforeComplete = f } } // completer based on https://github.com/postgres/postgres/blob/9f3665fbfc34b963933e51778c7feaa8134ac885/src/bin/psql/tab-complete.c type completer struct { db metadata.DB reader metadata.Reader logger logger sqlStartCommands []string sqlCommands []string backslashCommands []string connStrings []string beforeComplete CompleteFunc } // CompleteFunc returns patterns completing current text, using previous words as context type CompleteFunc func(previousWords []string, text []rune) [][]rune type logger interface { Println(...interface{}) } func (c completer) Do(line []rune, start int) (newLine [][]rune, length int) { var i int for i = start - 1; i > 0; i-- { if strings.ContainsRune(WORD_BREAKS, line[i]) { i++ break } } if i == -1 { i = 0 } previousWords := getPreviousWords(start, line) text := line[i:start] if c.beforeComplete != nil { result := c.beforeComplete(previousWords, text) if result != nil { return result, len(text) } } result := c.complete(previousWords, text) if result != nil { return result, len(text) } return nil, 0 } func (c completer) complete(previousWords []string, text []rune) [][]rune { if len(text) > 0 { if len(previousWords) == 0 && text[0] == '\\' { /* If current word is a backslash command, offer completions for that */ return CompleteFromListCase(MATCH_CASE, text, c.backslashCommands...) } if text[0] == ':' { if len(text) == 1 || text[1] == ':' { return nil } /* If current word is a variable interpolation, handle that case */ if text[1] == '\'' { return completeFromVariables(text, ":'", "'", true) } if text[1] == '"' { return completeFromVariables(text, ":\"", "\"", true) } return completeFromVariables(text, ":", "", true) } } if len(previousWords) == 0 { /* If no previous word, suggest one of the basic sql commands */ return CompleteFromList(text, c.sqlStartCommands...) } /* DELETE --- can be inside EXPLAIN, RULE, etc */ /* ... despite which, only complete DELETE with FROM at start of line */ if matches(IGNORE_CASE, previousWords, "DELETE") { return CompleteFromList(text, "FROM") } /* Complete DELETE FROM with a list of tables */ if TailMatches(IGNORE_CASE, previousWords, "DELETE", "FROM") { return c.completeWithUpdatables(text) } /* Complete DELETE FROM
*/ if TailMatches(IGNORE_CASE, previousWords, "DELETE", "FROM", "*") { return CompleteFromList(text, "USING", "WHERE") } /* XXX: implement tab completion for DELETE ... USING */ /* Complete CREATE */ if TailMatches(IGNORE_CASE, previousWords, "CREATE") { return CompleteFromList(text, "DATABASE", "SCHEMA", "SEQUENCE", "TABLE", "VIEW", "TEMPORARY") } if TailMatches(IGNORE_CASE, previousWords, "CREATE", "TEMP|TEMPORARY") { return CompleteFromList(text, "TABLE", "VIEW") } if TailMatches(IGNORE_CASE, previousWords, "CREATE", "TABLE", "*") || TailMatches(IGNORE_CASE, previousWords, "CREATE", "TEMP|TEMPORARY", "TABLE", "*") { return CompleteFromList(text, "(") } /* INSERT --- can be inside EXPLAIN, RULE, etc */ /* Complete INSERT with "INTO" */ if TailMatches(IGNORE_CASE, previousWords, "INSERT") { return CompleteFromList(text, "INTO") } /* Complete INSERT INTO with table names */ if TailMatches(IGNORE_CASE, previousWords, "INSERT", "INTO") { return c.completeWithUpdatables(text) } /* Complete "INSERT INTO
(" with attribute names */ if TailMatches(IGNORE_CASE, previousWords, "INSERT", "INTO", "*", "(") { return c.completeWithAttributes(IGNORE_CASE, previousWords[1], text) } /* * Complete INSERT INTO
with "(" or "VALUES" or "SELECT" or * "TABLE" or "DEFAULT VALUES" or "OVERRIDING" */ if TailMatches(IGNORE_CASE, previousWords, "INSERT", "INTO", "*") { return CompleteFromList(text, "(", "DEFAULT VALUES", "SELECT", "TABLE", "VALUES", "OVERRIDING") } /* * Complete INSERT INTO
(attribs) with "VALUES" or "SELECT" or * "TABLE" or "OVERRIDING" */ if TailMatches(IGNORE_CASE, previousWords, "INSERT", "INTO", "*", "*") && strings.HasSuffix(previousWords[0], ")") { return CompleteFromList(text, "SELECT", "TABLE", "VALUES", "OVERRIDING") } /* Complete OVERRIDING */ if TailMatches(IGNORE_CASE, previousWords, "OVERRIDING") { return CompleteFromList(text, "SYSTEM VALUE", "USER VALUE") } /* Complete after OVERRIDING clause */ if TailMatches(IGNORE_CASE, previousWords, "OVERRIDING", "*", "VALUE") { return CompleteFromList(text, "SELECT", "TABLE", "VALUES") } /* Insert an open parenthesis after "VALUES" */ if TailMatches(IGNORE_CASE, previousWords, "VALUES") && !TailMatches(IGNORE_CASE, previousWords, "DEFAULT", "VALUES") { return CompleteFromList(text, "(") } /* UPDATE --- can be inside EXPLAIN, RULE, etc */ /* If prev. word is UPDATE suggest a list of tables */ if TailMatches(IGNORE_CASE, previousWords, "UPDATE") { return c.completeWithUpdatables(text) } /* Complete UPDATE
with "SET" */ if TailMatches(IGNORE_CASE, previousWords, "UPDATE", "*") { return CompleteFromList(text, "SET") } /* Complete UPDATE
SET with list of attributes */ if TailMatches(IGNORE_CASE, previousWords, "UPDATE", "*", "SET") { return c.completeWithAttributes(IGNORE_CASE, previousWords[1], text) } /* UPDATE
SET = */ if TailMatches(IGNORE_CASE, previousWords, "UPDATE", "*", "SET", "!*=") { return CompleteFromList(text, "=") } /* WHERE */ /* Simple case of the word before the where being the table name */ if TailMatches(IGNORE_CASE, previousWords, "*", "WHERE") { // TODO would be great to _try_ to parse the (incomplete) query // and get a list of possible selectables to filter by return c.completeWithAttributes(IGNORE_CASE, previousWords[1], text, "AND", "OR", "CASE", "WHEN", "THEN", "ELSE", "END", ) } /* ... FROM | JOIN ... */ if TailMatches(IGNORE_CASE, previousWords, "FROM|JOIN") { return c.completeWithSelectables(text) } /* TABLE, but not TABLE embedded in other commands */ if matches(IGNORE_CASE, previousWords, "TABLE") { return c.completeWithUpdatables(text) } /* Backslash commands */ if TailMatches(MATCH_CASE, previousWords, `\cd|\e|\edit|\g|\gx|\i|\include|\ir|\include_relative|\o|\out|\s|\w|\write`) { return completeFromFiles(text) } if TailMatches(MATCH_CASE, previousWords, `\c|\connect|\copy`) || TailMatches(MATCH_CASE, previousWords, `\copy`, `*`) { return CompleteFromList(text, c.connStrings...) } if TailMatches(MATCH_CASE, previousWords, `\copy`, `*`, `*`) { return nil } if TailMatches(MATCH_CASE, previousWords, `\da*`) { return c.completeWithFunctions(text, []string{"AGGREGATE"}) } if TailMatches(MATCH_CASE, previousWords, `\df*`) { return c.completeWithFunctions(text, []string{}) } if TailMatches(MATCH_CASE, previousWords, `\di*`) { return c.completeWithIndexes(text) } if TailMatches(MATCH_CASE, previousWords, `\dn*`) { return c.completeWithSchemas(text) } if TailMatches(MATCH_CASE, previousWords, `\ds*`) { return c.completeWithSequences(text) } if TailMatches(MATCH_CASE, previousWords, `\dt*`) { return c.completeWithTables(text, []string{"TABLE", "BASE TABLE", "SYSTEM TABLE", "SYNONYM", "LOCAL TEMPORARY", "GLOBAL TEMPORARY"}) } if TailMatches(MATCH_CASE, previousWords, `\dv*`) { return c.completeWithTables(text, []string{"VIEW", "SYSTEM VIEW"}) } if TailMatches(MATCH_CASE, previousWords, `\dm*`) { return c.completeWithTables(text, []string{"MATERIALIZED VIEW"}) } if TailMatches(MATCH_CASE, previousWords, `\d*`) { return c.completeWithSelectables(text) } if TailMatches(MATCH_CASE, previousWords, `\l*`) || TailMatches(MATCH_CASE, previousWords, `\lo*`) { return c.completeWithCatalogs(text) } if TailMatches(MATCH_CASE, previousWords, `\pset`) { return CompleteFromList(text, `border`, `columns`, `expanded`, `fieldsep`, `fieldsep_zero`, `footer`, `format`, `linestyle`, `null`, `numericlocale`, `pager`, `pager_min_lines`, `recordsep`, `recordsep_zero`, `tableattr`, `title`, `title`, `tuples_only`, `unicode_border_linestyle`, `unicode_column_linestyle`, `unicode_header_linestyle`) } if TailMatches(MATCH_CASE, previousWords, `\pset`, `expanded`) { return CompleteFromList(text, "auto", "on", "off") } if TailMatches(MATCH_CASE, previousWords, `\pset`, `pager`) { return CompleteFromList(text, "always", "on", "off") } if TailMatches(MATCH_CASE, previousWords, `\pset`, `fieldsep_zero|footer|numericlocale|pager|recordsep_zero|tuples_only`) { return CompleteFromList(text, "on", "off") } if TailMatches(MATCH_CASE, previousWords, `\pset`, `format`) { return CompleteFromList(text, "unaligned", "aligned", "wrapped", "html", "asciidoc", "latex", "latex-longtable", "troff-ms", "csv", "json", "vertical") } if TailMatches(MATCH_CASE, previousWords, `\pset`, `linestyle`) { return CompleteFromList(text, "ascii", "old-ascii", "unicode") } if TailMatches(MATCH_CASE, previousWords, `\pset`, `unicode_border_linestyle|unicode_column_linestyle|unicode_header_linestyle`) { return CompleteFromList(text, "single", "double") } if TailMatches(MATCH_CASE, previousWords, `\pset`, `*`) || TailMatches(MATCH_CASE, previousWords, `\pset`, `*`, `*`) { return nil } if TailMatches(MATCH_CASE, previousWords, `\?`) { return CompleteFromList(text, "commands", "options", "variables") } // is suggesting basic sql commands better than nothing? return CompleteFromList(text, c.sqlCommands...) } func getPreviousWords(point int, buf []rune) []string { var i int /* * Allocate a slice of strings (rune slices). The worst case is that the line contains only * non-whitespace WORD_BREAKS characters, making each one a separate word. * This is usually much more space than we need, but it's cheaper than * doing a separate malloc() for each word. */ previousWords := make([]string, 0, point*2) /* * First we look for a non-word char before the current point. (This is * probably useless, if readline is on the same page as we are about what * is a word, but if so it's cheap.) */ for i = point - 1; i >= 0; i-- { if strings.ContainsRune(WORD_BREAKS, buf[i]) { break } } point = i /* * Now parse words, working backwards, until we hit start of line. The * backwards scan has some interesting but intentional properties * concerning parenthesis handling. */ for point >= 0 { var start, end int inquotes := false parentheses := 0 /* now find the first non-space which then constitutes the end */ end = -1 for i = point; i >= 0; i-- { if !unicode.IsSpace(buf[i]) { end = i break } } /* if no end found, we're done */ if end < 0 { break } /* * Otherwise we now look for the start. The start is either the last * character before any word-break character going backwards from the * end, or it's simply character 0. We also handle open quotes and * parentheses. */ for start = end; start > 0; start-- { if buf[start] == '"' { inquotes = !inquotes } if inquotes { continue } if buf[start] == ')' { parentheses++ } else if buf[start] == '(' { parentheses -= 1 if parentheses <= 0 { break } } else if parentheses == 0 && strings.ContainsRune(WORD_BREAKS, buf[start-1]) { break } } /* Return the word located at start to end inclusive */ i = end - start + 1 previousWords = append(previousWords, string(buf[start:start+i])) /* Continue searching */ point = start - 1 } return previousWords } // TailMatches when last words match all patterns func TailMatches(ct caseType, words []string, patterns ...string) bool { if len(words) < len(patterns) { return false } for i, p := range patterns { if !wordMatches(ct, p, words[len(patterns)-i-1]) { return false } } return true } func matches(ct caseType, words []string, patterns ...string) bool { if len(words) != len(patterns) { return false } for i, p := range patterns { if !wordMatches(ct, p, words[len(patterns)-i-1]) { return false } } return true } func wordMatches(ct caseType, pattern, word string) bool { if pattern == "*" { return true } if pattern[0] == '!' { return !wordMatches(ct, pattern[1:], word) } cmp := func(a, b string) bool { return a == b } if ct == IGNORE_CASE { cmp = strings.EqualFold } for _, p := range strings.Split(pattern, "|") { star := strings.IndexByte(p, '*') if star == -1 { if cmp(p, word) { return true } } else { if len(word) >= len(p)-1 && cmp(p[0:star], word[0:star]) && (star >= len(p) || cmp(p[star+1:], word[len(word)-len(p)+star+1:])) { return true } } } return false } // CompleteFromList where items starts with text, ignoring case func CompleteFromList(text []rune, options ...string) [][]rune { return CompleteFromListCase(IGNORE_CASE, text, options...) } // CompleteFromList where items starts with text func CompleteFromListCase(ct caseType, text []rune, options ...string) [][]rune { if len(options) == 0 { return nil } isLower := false if len(text) > 0 { isLower = unicode.IsLower(text[0]) } prefix := string(text) if ct == IGNORE_CASE { prefix = strings.ToUpper(prefix) } result := make([][]rune, 0, len(options)) for _, o := range options { if (ct == IGNORE_CASE && !strings.HasPrefix(strings.ToUpper(o), prefix)) || (ct == MATCH_CASE && !strings.HasPrefix(o, prefix)) { continue } match := o[len(text):] if ct == IGNORE_CASE && isLower { match = strings.ToLower(match) } result = append(result, []rune(match)) } return result } func completeFromVariables(text []rune, prefix, suffix string, needValue bool) [][]rune { vars := env.All() names := make([]string, 0, len(vars)) for name, value := range vars { if needValue && value == "" { continue } names = append(names, fmt.Sprintf("%s%s%s", prefix, name, suffix)) } return CompleteFromListCase(MATCH_CASE, text, names...) } func (c completer) completeWithSelectables(text []rune) [][]rune { filter := parseIdentifier(string(text)) names := c.getNamespaces(filter) if r, ok := c.reader.(metadata.TableReader); ok { tables := c.getNames( func() (iterator, error) { return r.Tables(filter) }, func(res interface{}) string { t := res.(*metadata.TableSet).Get() return qualifiedIdentifier(filter, t.Catalog, t.Schema, t.Name) }, ) names = append(names, tables...) } if r, ok := c.reader.(metadata.FunctionReader); ok { functions := c.getNames( func() (iterator, error) { return r.Functions(filter) }, func(res interface{}) string { f := res.(*metadata.FunctionSet).Get() return qualifiedIdentifier(filter, f.Catalog, f.Schema, f.Name) }, ) names = append(names, functions...) } if r, ok := c.reader.(metadata.SequenceReader); ok { sequences := c.getNames( func() (iterator, error) { return r.Sequences(filter) }, func(res interface{}) string { s := res.(*metadata.SequenceSet).Get() return qualifiedIdentifier(filter, s.Catalog, s.Schema, s.Name) }, ) names = append(names, sequences...) } sort.Strings(names) // TODO make sure CompleteFromList would properly handle quoted identifiers return CompleteFromList(text, names...) } func (c completer) completeWithTables(text []rune, types []string) [][]rune { r, ok := c.reader.(metadata.TableReader) if !ok { return [][]rune{} } filter := parseIdentifier(string(text)) filter.Types = types names := c.getNamespaces(filter) tables := c.getNames( func() (iterator, error) { return r.Tables(filter) }, func(res interface{}) string { t := res.(*metadata.TableSet).Get() return qualifiedIdentifier(filter, t.Catalog, t.Schema, t.Name) }, ) names = append(names, tables...) sort.Strings(names) return CompleteFromList(text, names...) } func (c completer) completeWithFunctions(text []rune, types []string) [][]rune { r, ok := c.reader.(metadata.FunctionReader) if !ok { return [][]rune{} } filter := parseIdentifier(string(text)) filter.Types = types names := c.getNamespaces(filter) functions := c.getNames( func() (iterator, error) { return r.Functions(filter) }, func(res interface{}) string { f := res.(*metadata.FunctionSet).Get() return qualifiedIdentifier(filter, f.Catalog, f.Schema, f.Name) }, ) names = append(names, functions...) sort.Strings(names) return CompleteFromList(text, names...) } func (c completer) completeWithIndexes(text []rune) [][]rune { r, ok := c.reader.(metadata.IndexReader) if !ok { return [][]rune{} } filter := parseIdentifier(string(text)) names := c.getNamespaces(filter) indexes := c.getNames( func() (iterator, error) { return r.Indexes(filter) }, func(res interface{}) string { f := res.(*metadata.IndexSet).Get() return qualifiedIdentifier(filter, f.Catalog, f.Schema, f.Name) }, ) names = append(names, indexes...) sort.Strings(names) return CompleteFromList(text, names...) } func (c completer) completeWithSequences(text []rune) [][]rune { r, ok := c.reader.(metadata.SequenceReader) if !ok { return [][]rune{} } filter := parseIdentifier(string(text)) names := c.getNamespaces(filter) sequences := c.getNames( func() (iterator, error) { return r.Sequences(filter) }, func(res interface{}) string { s := res.(*metadata.SequenceSet).Get() return qualifiedIdentifier(filter, s.Catalog, s.Schema, s.Name) }, ) names = append(names, sequences...) sort.Strings(names) return CompleteFromList(text, names...) } func (c completer) completeWithSchemas(text []rune) [][]rune { r, ok := c.reader.(metadata.SchemaReader) if !ok { return [][]rune{} } filter := parseIdentifier(string(text)) names := c.getNames( func() (iterator, error) { if filter.Schema != "" { // name should already have a wildcard appended return r.Schemas(metadata.Filter{Catalog: filter.Schema, Name: filter.Name, WithSystem: true}) } return r.Schemas(filter) }, func(res interface{}) string { s := res.(*metadata.SchemaSet).Get() return qualifiedIdentifier(filter, "", s.Catalog, s.Schema) }, ) return CompleteFromList(text, names...) } func (c completer) completeWithCatalogs(text []rune) [][]rune { r, ok := c.reader.(metadata.CatalogReader) if !ok { return [][]rune{} } filter := parseIdentifier(string(text)) names := c.getNames( func() (iterator, error) { return r.Catalogs(filter) }, func(res interface{}) string { s := res.(*metadata.CatalogSet).Get() return s.Catalog }, ) return CompleteFromList(text, names...) } func (c completer) completeWithUpdatables(text []rune) [][]rune { filter := parseIdentifier(string(text)) names := c.getNamespaces(filter) if r, ok := c.reader.(metadata.TableReader); ok { // exclude materialized views, sequences, system tables, synonyms filter.Types = []string{"TABLE", "BASE TABLE", "LOCAL TEMPORARY", "GLOBAL TEMPORARY", "VIEW"} tables := c.getNames( func() (iterator, error) { return r.Tables(filter) }, func(res interface{}) string { t := res.(*metadata.TableSet).Get() return qualifiedIdentifier(filter, t.Catalog, t.Schema, t.Name) }, ) names = append(names, tables...) } sort.Strings(names) // TODO make sure CompleteFromList would properly handle quoted identifiers return CompleteFromList(text, names...) } func (c completer) getNamespaces(f metadata.Filter) []string { names := make([]string, 0, 10) if f.Catalog == "" && f.Schema == "" { if r, ok := c.reader.(metadata.CatalogReader); ok { catalogs := c.getNames( func() (iterator, error) { return r.Catalogs(metadata.Filter{}) }, func(res interface{}) string { return res.(*metadata.CatalogSet).Get().Catalog }, ) names = append(names, catalogs...) } } if f.Catalog != "" { // filter is already fully qualified, so don't return any namespaces return names } if r, ok := c.reader.(metadata.SchemaReader); ok { schemas := c.getNames( func() (iterator, error) { if f.Schema != "" { // name should already have a wildcard appended return r.Schemas(metadata.Filter{Catalog: f.Schema, Name: f.Name, WithSystem: true}) } return r.Schemas(f) }, func(res interface{}) string { s := res.(*metadata.SchemaSet).Get() return qualifiedIdentifier(f, "", s.Catalog, s.Schema) }, ) names = append(names, schemas...) } return names } func (c completer) completeWithAttributes(ct caseType, selectable string, text []rune, options ...string) [][]rune { names := make([]string, 0, 10) if r, ok := c.reader.(metadata.ColumnReader); ok { parent := parseParentIdentifier(selectable) columns := c.getNames( func() (iterator, error) { return r.Columns(parent) }, func(res interface{}) string { return res.(*metadata.ColumnSet).Get().Name }, ) names = append(names, columns...) } if r, ok := c.reader.(metadata.FunctionReader); ok { filter := parseIdentifier(string(text)) // functions don't have to be fully qualified to be callable filter.OnlyVisible = false functions := c.getNames( func() (iterator, error) { return r.Functions(filter) }, func(res interface{}) string { return res.(*metadata.FunctionSet).Get().Name }, ) names = append(names, functions...) } names = append(names, options...) return CompleteFromList(text, names...) } // parseIdentifier into catalog, schema and name func parseIdentifier(name string) metadata.Filter { // TODO handle quoted identifiers result := metadata.Filter{} if !strings.ContainsRune(name, '.') { result.Name = name + "%" result.OnlyVisible = true } else { parts := strings.SplitN(name, ".", 3) if len(parts) == 2 { result.Schema = parts[0] result.Name = parts[1] + "%" } else { result.Catalog = parts[0] result.Schema = parts[1] result.Name = parts[2] + "%" } } if result.Schema != "" || len(result.Name) > 3 { result.WithSystem = true } return result } // parseParentIdentifier into catalog, schema and parent func parseParentIdentifier(name string) metadata.Filter { // TODO handle quoted identifiers result := metadata.Filter{} if !strings.ContainsRune(name, '.') { result.Parent = name result.OnlyVisible = true } else { parts := strings.SplitN(name, ".", 3) if len(parts) == 2 { result.Schema = parts[0] result.Parent = parts[1] } else { result.Catalog = parts[0] result.Schema = parts[1] result.Parent = parts[2] } } if result.Schema != "" { result.WithSystem = true } return result } func qualifiedIdentifier(filter metadata.Filter, catalog, schema, name string) string { // TODO handle quoted identifiers if filter.Catalog != "" && filter.Schema != "" { return catalog + "." + schema + "." + name } if filter.Schema != "" { return schema + "." + name } return name } func (c completer) getNames(query func() (iterator, error), mapper func(interface{}) string) []string { res, err := query() if err != nil { if err != text.ErrNotSupported { c.logger.Println("Error getting selectables", err) } return nil } defer res.Close() // there can be duplicates if names are not qualified values := make(map[string]struct{}, 10) for res.Next() { values[mapper(res)] = struct{}{} } result := make([]string, 0, len(values)) for v := range values { result = append(result, v) } return result } type iterator interface { Next() bool Close() error } func completeFromFiles(text []rune) [][]rune { // TODO handle quotes properly dir := filepath.Dir(string(text)) dirs, err := os.ReadDir(dir) if err != nil { return nil } matches := make([]string, 0, len(dirs)) switch dir { case ".": dir = "" case "/": // pass default: dir += "/" } for _, entry := range dirs { name := entry.Name() if entry.IsDir() { name += "/" } matches = append(matches, dir+name) } return CompleteFromList(text, matches...) } usql-0.19.19/drivers/completer/completer_test.go000066400000000000000000000126531476173253300217310ustar00rootroot00000000000000package completer import ( "testing" "github.com/xo/usql/drivers/metadata" ) func TestCompleter(t *testing.T) { cases := []struct { name string line string start int expSuggestions []string expLength int }{ { "Single SQL keyword, uppercase", "SEL", 3, []string{ "ECT", }, 3, }, { "Single SQL keyword, lowercase", "ex", 2, []string{ "ec", "ecute", "plain", }, 2, }, { "usql command", `\dt`, 3, []string{ `+`, ``, `S+`, `S`, }, 3, }, { "files", `\i comp`, 7, []string{ "leter.go", "leter_test.go", }, 4, }, { "connections", `\c p`, 4, []string{ "g://", }, 1, }, { "3rd word", "SELECT * F", 10, []string{ "ULL OUTER JOIN", "ROM", "ETCH", }, 1, }, { "Selectables", "SELECT * FROM ", 14, []string{ "main", "remote", "default", "system", "film", "factory", }, 0, }, { "Namespaced with catalog", "SELECT * FROM remote.", 21, []string{ "film", "factory", }, 7, }, { "Namespaced with schema", "SELECT * FROM system.", 21, []string{ "film", "factory", }, 7, }, { "Namespaced with catalog.schema", "SELECT * FROM remote.default.f", 30, []string{ "ilm", "actory", }, 16, }, { "Attributes", "SELECT * FROM film WHERE ", 25, []string{ "id", "name", "CASE", "AND", "OR", "WHEN", "THEN", "ELSE", "END", }, 0, }, { "insert", "INS", 3, []string{ "ERT", }, 3, }, { "insert into", "INSERT IN", 9, []string{ "TO", }, 2, }, { "insert into table", "INSERT INTO fi", 14, []string{ "lm", }, 2, }, { "insert into table select from", "INSERT INTO film SE", 19, []string{ "LECT", }, 2, }, { "insert into table attrs", "INSERT INTO film (", 18, []string{ "id", "name", }, 0, }, { "insert into table values", "INSERT INTO film (a)", 20, []string{ "SELECT", "TABLE", "VALUES", "OVERRIDING", }, 0, }, { "update table set attrs", "UPDATE film SET ", 16, []string{ "id", "name", }, 0, }, { "update table set", "update film set name ", 21, []string{ "=", }, 0, }, { "variables", ":a", 2, []string{}, 2, }, { "type on create", "CREATE ", 7, []string{ "SCHEMA", "DATABASE", "TABLE", "SEQUENCE", "VIEW", "TEMPORARY", }, 0, }, { "brackets on create table", "CREATE TABLE p ", 15, []string{ "(", }, 0, }, { "TABLE Selectables", "TABLE ", 6, []string{ "main", "remote", "default", "system", "film", "factory", }, 0, }, { "TABLE namespaced with catalog", "TABLE remote.", 13, []string{ "film", "factory", }, 7, }, { "TABLE namespaced with schema", "TABLE system.", 13, []string{ "film", "factory", }, 7, }, { "TABLE namespaced with catalog.schema", "TABLE remote.default.f", 22, []string{ "ilm", "actory", }, 16, }, } completer := NewDefaultCompleter(WithReader(mockReader{}), WithConnStrings([]string{"pg://"})) for _, test := range cases { t.Run(test.name, func(t *testing.T) { suggestions, length := completer.Do([]rune(test.line), test.start) // need at least 2 pairs of nested loops, one for what's missing, second for what's extra for _, exp := range test.expSuggestions { found := false for _, act := range suggestions { if string(act) == exp { found = true break } } if !found { t.Errorf("Missing expected suggestion: %s", exp) } } for _, act := range suggestions { found := false for _, exp := range test.expSuggestions { if string(act) == exp { found = true break } } if !found { t.Errorf("Unexpected suggestion: %s", string(act)) } } if length != test.expLength { t.Errorf("Expected Do() to return length %d, got %d", test.expLength, length) } }) } } type mockReader struct{} var _ metadata.CatalogReader = &mockReader{} var _ metadata.BasicReader = &mockReader{} func (r mockReader) Catalogs(metadata.Filter) (*metadata.CatalogSet, error) { return metadata.NewCatalogSet([]metadata.Catalog{ { Catalog: "main", }, { Catalog: "remote", }, }), nil } func (r mockReader) Schemas(metadata.Filter) (*metadata.SchemaSet, error) { return metadata.NewSchemaSet([]metadata.Schema{ { Schema: "default", Catalog: "main", }, { Schema: "system", Catalog: "main", }, }), nil } func (r mockReader) Tables(f metadata.Filter) (*metadata.TableSet, error) { return metadata.NewTableSet([]metadata.Table{ { Catalog: f.Catalog, Schema: f.Schema, Name: "film", }, { Catalog: f.Catalog, Schema: f.Schema, Name: "factory", }, }), nil } func (r mockReader) Columns(f metadata.Filter) (*metadata.ColumnSet, error) { if f.Parent == "film" { return metadata.NewColumnSet([]metadata.Column{ { Name: "id", }, { Name: "name", }, }), nil } return metadata.NewColumnSet([]metadata.Column{ { Name: f.Catalog, }, { Name: f.Schema, }, { Name: f.Name, }, }), nil } usql-0.19.19/drivers/cosmos/000077500000000000000000000000001476173253300156535ustar00rootroot00000000000000usql-0.19.19/drivers/cosmos/cosmos.go000066400000000000000000000005271476173253300175110ustar00rootroot00000000000000// Package cosmos defines and registers usql's Azure CosmosDB driver. // // See: https://github.com/btnguyen2k/gocosmos package cosmos import ( _ "github.com/btnguyen2k/gocosmos" // DRIVER "github.com/xo/usql/drivers" ) func init() { drivers.Register("cosmos", drivers.Driver{ Process: drivers.StripTrailingSemicolon, }, "gocosmos") } usql-0.19.19/drivers/couchbase/000077500000000000000000000000001476173253300163045ustar00rootroot00000000000000usql-0.19.19/drivers/couchbase/couchbase.go000066400000000000000000000020311476173253300205630ustar00rootroot00000000000000// Package couchbase defines and registers usql's Couchbase driver. // // See: https://github.com/couchbase/go_n1ql package couchbase import ( "context" "strconv" "strings" _ "github.com/couchbase/go_n1ql" // DRIVER: n1ql "github.com/xo/usql/drivers" ) func init() { drivers.Register("n1ql", drivers.Driver{ AllowMultilineComments: true, Version: func(ctx context.Context, db drivers.DB) (string, error) { ver := "" /* var buf []byte if err := db.QueryRowContext(ctx, `SELECT ds_version() AS version`).Scan(&buf); err == nil { var m map[string]string if err := json.Unmarshal(buf, &m); err == nil { if s, ok := m["version"]; ok { ver = s } } } */ var v string if err := db.QueryRowContext(ctx, `SELECT RAW ds_version()`).Scan(&v); err == nil { if s, err := strconv.Unquote(v); err == nil { ver = s } } return "Couchbase " + ver, nil }, Err: func(err error) (string, string) { return "", strings.TrimPrefix(err.Error(), "N1QL: ") }, }) } usql-0.19.19/drivers/csvq/000077500000000000000000000000001476173253300153245ustar00rootroot00000000000000usql-0.19.19/drivers/csvq/csvq.go000066400000000000000000000017651476173253300166400ustar00rootroot00000000000000// Package csvq defines and registers usql's CSVQ driver. // // See: https://github.com/mithrandie/csvq-driver // Group: base package csvq import ( "context" "os" "strings" "github.com/mithrandie/csvq-driver" // DRIVER "github.com/mithrandie/csvq/lib/query" "github.com/xo/dburl" "github.com/xo/usql/drivers" ) func init() { csvq.SetStdout(query.NewDiscard()) drivers.Register("csvq", drivers.Driver{ AllowMultilineComments: true, Process: func(_ *dburl.URL, prefix string, sqlstr string) (string, string, bool, error) { typ, q := drivers.QueryExecType(prefix, sqlstr) if strings.HasPrefix(prefix, "SHOW") { csvq.SetStdout(os.Stdout) q = false } return typ, sqlstr, q, nil }, Version: func(ctx context.Context, db drivers.DB) (string, error) { var ver string if err := db.QueryRowContext(ctx, `SELECT @#VERSION`).Scan(&ver); err != nil { return "", err } return "CSVQ " + ver, nil }, Copy: drivers.CopyWithInsert(func(int) string { return "?" }), }) } usql-0.19.19/drivers/databend/000077500000000000000000000000001476173253300161125ustar00rootroot00000000000000usql-0.19.19/drivers/databend/databend.go000066400000000000000000000016741476173253300202130ustar00rootroot00000000000000// Package databend defines and registers usql's Databend driver. // // See: https://github.com/datafuselabs/databend-go package databend import ( "io" _ "github.com/datafuselabs/databend-go" // DRIVER "github.com/xo/usql/drivers" "github.com/xo/usql/drivers/metadata" infos "github.com/xo/usql/drivers/metadata/informationschema" ) func init() { newReader := infos.New( infos.WithPlaceholder(func(int) string { return "?" }), infos.WithCustomClauses(map[infos.ClauseName]string{ infos.SequenceColumnsIncrement: "''", }), infos.WithFunctions(false), infos.WithIndexes(false), infos.WithConstraints(false), infos.WithColumnPrivileges(false), ) drivers.Register("databend", drivers.Driver{ UseColumnTypes: true, NewMetadataReader: newReader, NewMetadataWriter: func(db drivers.DB, w io.Writer, opts ...metadata.ReaderOption) metadata.Writer { return metadata.NewDefaultWriter(newReader(db, opts...))(db, w) }, }) } usql-0.19.19/drivers/databricks/000077500000000000000000000000001476173253300164575ustar00rootroot00000000000000usql-0.19.19/drivers/databricks/databricks.go000066400000000000000000000010531476173253300211140ustar00rootroot00000000000000// Package databricks defines and registers usql's Databricks driver. // // See: https://github.com/databricks/databricks-sql-go package databricks import ( "errors" _ "github.com/databricks/databricks-sql-go" // DRIVER dberrs "github.com/databricks/databricks-sql-go/errors" "github.com/xo/usql/drivers" ) func init() { drivers.Register("databricks", drivers.Driver{ Err: func(err error) (string, string) { var e dberrs.DBExecutionError if errors.As(err, &e) { return e.SqlState(), e.Error() } return "", err.Error() }, }) } usql-0.19.19/drivers/drivers.go000066400000000000000000000466331476173253300163710ustar00rootroot00000000000000// Package drivers handles the registration, default implementation, and // handles hooks for usql database drivers. package drivers import ( "context" "database/sql" "encoding/json" "fmt" "io" "reflect" "regexp" "strings" "time" "unicode" "github.com/alecthomas/chroma/v2" "github.com/alecthomas/chroma/v2/lexers" "github.com/gohxs/readline" "github.com/xo/dburl" "github.com/xo/usql/drivers/completer" "github.com/xo/usql/drivers/metadata" "github.com/xo/usql/stmt" "github.com/xo/usql/text" ) // DB is the common interface for database operations, compatible with // database/sql.DB and database/sql.Tx. type DB interface { Exec(string, ...interface{}) (sql.Result, error) ExecContext(context.Context, string, ...interface{}) (sql.Result, error) Query(string, ...interface{}) (*sql.Rows, error) QueryContext(context.Context, string, ...interface{}) (*sql.Rows, error) QueryRow(string, ...interface{}) *sql.Row QueryRowContext(context.Context, string, ...interface{}) *sql.Row Prepare(string) (*sql.Stmt, error) PrepareContext(context.Context, string) (*sql.Stmt, error) } // Driver holds funcs for a driver. type Driver struct { // Name is a name to override the driver name with. Name string // AllowDollar will be passed to query buffers to enable dollar ($$) style // strings. AllowDollar bool // AllowMultilineComments will be passed to query buffers to enable // multiline (/**/) style comments. AllowMultilineComments bool // AllowCComments will be passed to query buffers to enable C (//) style // comments. AllowCComments bool // AllowHashComments will be passed to query buffers to enable hash (#) // style comments. AllowHashComments bool // RequirePreviousPassword will be used by RequirePreviousPassword. RequirePreviousPassword bool // LexerName is the name of the syntax lexer to use. LexerName string // LowerColumnNames will cause column names to be lowered cased. LowerColumnNames bool // UseColumnTypes will cause database's ColumnTypes func to be used for // types. UseColumnTypes bool // ForceParams will be used to force parameters if defined. ForceParams func(*dburl.URL) // Open will be used by Open if defined. Open func(context.Context, *dburl.URL, func() io.Writer, func() io.Writer) (func(string, string) (*sql.DB, error), error) // Version will be used by Version if defined. Version func(context.Context, DB) (string, error) // User will be used by User if defined. User func(context.Context, DB) (string, error) // ChangePassword will be used by ChangePassword if defined. ChangePassword func(DB, string, string, string) error // IsPasswordErr will be used by IsPasswordErr if defined. IsPasswordErr func(error) bool // Process will be used by Process if defined. Process func(*dburl.URL, string, string) (string, string, bool, error) // ColumnTypes is a callback that will be used if ColumnTypes func(*sql.ColumnType) (interface{}, error) // RowsAffected will be used by RowsAffected if defined. RowsAffected func(sql.Result) (int64, error) // Err will be used by Error.Error if defined. Err func(error) (string, string) // ConvertBytes will be used by ConvertBytes to convert a raw []byte // slice to a string if defined. ConvertBytes func([]byte, string) (string, error) // ConvertMap will be used by ConvertMap to convert a map[string]interface{} // to a string if defined. ConvertMap func(map[string]interface{}) (string, error) // ConvertSlice will be used by ConvertSlice to convert a []interface{} to // a string if defined. ConvertSlice func([]interface{}) (string, error) // ConvertDefault will be used by ConvertDefault to convert a interface{} // to a string if defined. ConvertDefault func(interface{}) (string, error) // BatchAsTransaction will cause batched queries to be done in a // transaction block. BatchAsTransaction bool // BatchQueryPrefixes will be used by BatchQueryPrefixes if defined. BatchQueryPrefixes map[string]string // NewMetadataReader returns a db metadata introspector. NewMetadataReader func(db DB, opts ...metadata.ReaderOption) metadata.Reader // NewMetadataWriter returns a db metadata printer. NewMetadataWriter func(db DB, w io.Writer, opts ...metadata.ReaderOption) metadata.Writer // NewCompleter returns a db auto-completer. NewCompleter func(db DB, opts ...completer.Option) readline.AutoCompleter // Copy rows into the database table Copy func(ctx context.Context, db *sql.DB, rows *sql.Rows, table string) (int64, error) } // drivers are registered drivers. var drivers = make(map[string]Driver) // Available returns the available drivers. func Available() map[string]Driver { return drivers } // Register registers driver d with name and associated aliases. func Register(name string, d Driver, aliases ...string) { if _, ok := drivers[name]; ok { panic(fmt.Sprintf("driver %s is already registered", name)) } drivers[name] = d for _, alias := range aliases { if _, ok := drivers[alias]; ok { panic(fmt.Sprintf("alias %s is already registered", name)) } drivers[alias] = d } } // Registered returns whether or not a driver is registered. func Registered(name string) bool { _, ok := drivers[name] return ok } // LowerColumnNames returns whether or not column names should be converted to // lower case for a driver. func LowerColumnNames(u *dburl.URL) bool { if d, ok := drivers[u.Driver]; ok { return d.LowerColumnNames } return false } // UseColumnTypes returns whether or not a driver should uses column types. func UseColumnTypes(u *dburl.URL) bool { if d, ok := drivers[u.Driver]; ok { return d.UseColumnTypes } return false } // ForceParams forces parameters on the DSN for a driver. func ForceParams(u *dburl.URL) { d, ok := drivers[u.Driver] if ok && d.ForceParams != nil { d.ForceParams(u) } } // Open opens a sql.DB connection for a driver. func Open(ctx context.Context, u *dburl.URL, stdout, stderr func() io.Writer) (*sql.DB, error) { d, ok := drivers[u.Driver] if !ok { return nil, WrapErr(u.Driver, text.ErrDriverNotAvailable) } f := sql.Open if d.Open != nil { var err error if f, err = d.Open(ctx, u, stdout, stderr); err != nil { return nil, WrapErr(u.Driver, err) } } driver := u.Driver if u.GoDriver != "" { driver = u.GoDriver } db, err := f(driver, u.DSN) if err != nil { return nil, WrapErr(u.Driver, err) } return db, nil } // stmtOpts returns statement options for a driver. func stmtOpts(u *dburl.URL) []stmt.Option { if u != nil { if d, ok := drivers[u.Driver]; ok { return []stmt.Option{ stmt.WithAllowDollar(d.AllowDollar), stmt.WithAllowMultilineComments(d.AllowMultilineComments), stmt.WithAllowCComments(d.AllowCComments), stmt.WithAllowHashComments(d.AllowHashComments), } } } return []stmt.Option{ stmt.WithAllowDollar(true), stmt.WithAllowMultilineComments(true), stmt.WithAllowCComments(true), stmt.WithAllowHashComments(true), } } // NewStmt wraps creating a new stmt.Stmt for a driver. func NewStmt(u *dburl.URL, f func() ([]rune, error), opts ...stmt.Option) *stmt.Stmt { return stmt.New(f, append(opts, stmtOpts(u)...)...) } // ConfigStmt sets the stmt.Stmt options for a driver. func ConfigStmt(u *dburl.URL, s *stmt.Stmt) { if u == nil { return } for _, o := range stmtOpts(u) { o(s) } } // Version returns information about the database connection for a driver. func Version(ctx context.Context, u *dburl.URL, db DB) (string, error) { if d, ok := drivers[u.Driver]; ok && d.Version != nil { ver, err := d.Version(ctx, db) return ver, WrapErr(u.Driver, err) } var ver string err := db.QueryRowContext(ctx, `SELECT version();`).Scan(&ver) if err != nil || ver == "" { ver = "" } return ver, nil } // User returns the current database user for a driver. func User(ctx context.Context, u *dburl.URL, db DB) (string, error) { if d, ok := drivers[u.Driver]; ok && d.User != nil { user, err := d.User(ctx, db) return user, WrapErr(u.Driver, err) } var user string _ = db.QueryRowContext(ctx, `SELECT current_user`).Scan(&user) return user, nil } // Process processes the sql query for a driver. func Process(u *dburl.URL, prefix, sqlstr string) (string, string, bool, error) { if d, ok := drivers[u.Driver]; ok && d.Process != nil { a, b, c, err := d.Process(u, prefix, sqlstr) return a, b, c, WrapErr(u.Driver, err) } typ, q := QueryExecType(prefix, sqlstr) return typ, sqlstr, q, nil } // ColumnTypes returns the column types callback for a driver. func ColumnTypes(u *dburl.URL) func(*sql.ColumnType) (interface{}, error) { return drivers[u.Driver].ColumnTypes } // IsPasswordErr returns true if an err is a password error for a driver. func IsPasswordErr(u *dburl.URL, err error) bool { drv := u.Driver if e, ok := err.(*Error); ok { drv, err = e.Driver, e.Err } if d, ok := drivers[drv]; ok && d.IsPasswordErr != nil { return d.IsPasswordErr(err) } return false } // RequirePreviousPassword returns true if a driver requires a previous // password when changing a user's password. func RequirePreviousPassword(u *dburl.URL) bool { if d, ok := drivers[u.Driver]; ok { return d.RequirePreviousPassword } return false } // CanChangePassword returns whether or not the a driver supports changing // passwords. func CanChangePassword(u *dburl.URL) error { if d, ok := drivers[u.Driver]; ok && d.ChangePassword != nil { return nil } return text.ErrPasswordNotSupportedByDriver } // ChangePassword initiates a user password change for the a driver. If user is // not supplied, then the current user will be retrieved from User. func ChangePassword(u *dburl.URL, db DB, user, new, old string) (string, error) { if d, ok := drivers[u.Driver]; ok && d.ChangePassword != nil { if user == "" { var err error if user, err = User(context.Background(), u, db); err != nil { return "", err } } return user, d.ChangePassword(db, user, new, old) } return "", text.ErrPasswordNotSupportedByDriver } // Columns returns the column names for the SQL row result for a driver. func Columns(u *dburl.URL, rows *sql.Rows) ([]string, error) { cols, err := rows.Columns() if err != nil { return nil, WrapErr(u.Driver, err) } if drivers[u.Driver].LowerColumnNames { for i, s := range cols { if j := strings.IndexFunc(s, func(r rune) bool { return unicode.IsLetter(r) && unicode.IsLower(r) }); j == -1 { cols[i] = strings.ToLower(s) } } } for i, c := range cols { if strings.TrimSpace(c) == "" { cols[i] = fmt.Sprintf("col%d", i) } } return cols, nil } // ConvertBytes returns a func to handle converting bytes for a driver. func ConvertBytes(u *dburl.URL) func([]byte, string) (string, error) { if d, ok := drivers[u.Driver]; ok && d.ConvertBytes != nil { return d.ConvertBytes } return func(buf []byte, _ string) (string, error) { return string(buf), nil } } // ConvertMap returns a func to handle converting a map[string]interface{} for // a driver. func ConvertMap(u *dburl.URL) func(map[string]interface{}) (string, error) { if d, ok := drivers[u.Driver]; ok && d.ConvertMap != nil { return d.ConvertMap } return func(v map[string]interface{}) (string, error) { buf, err := json.Marshal(v) if err != nil { return "", err } return string(buf), nil } } // ConvertSlice returns a func to handle converting a []interface{} for a // driver. func ConvertSlice(u *dburl.URL) func([]interface{}) (string, error) { if d, ok := drivers[u.Driver]; ok && d.ConvertSlice != nil { return d.ConvertSlice } return func(v []interface{}) (string, error) { buf, err := json.Marshal(v) if err != nil { return "", err } return string(buf), nil } } // ConvertDefault returns a func to handle converting a interface{} for a // driver. func ConvertDefault(u *dburl.URL) func(interface{}) (string, error) { if d, ok := drivers[u.Driver]; ok && d.ConvertDefault != nil { return d.ConvertDefault } return func(v interface{}) (string, error) { return fmt.Sprintf("%v", v), nil } } // BatchAsTransaction returns whether or not a driver requires batched queries // to be done within a transaction block. func BatchAsTransaction(u *dburl.URL) bool { if d, ok := drivers[u.Driver]; ok { return d.BatchAsTransaction } return false } // IsBatchQueryPrefix returns whether or not the supplied query prefix is a // batch query prefix, and the closing prefix. Used to direct the handler to // continue accumulating statements. func IsBatchQueryPrefix(u *dburl.URL, prefix string) (string, string, bool) { // normalize typ, q := QueryExecType(prefix, "") d, ok := drivers[u.Driver] if q || !ok || d.BatchQueryPrefixes == nil { return typ, "", false } end, ok := d.BatchQueryPrefixes[typ] return typ, end, ok } // RowsAffected returns the rows affected for the SQL result for a driver. func RowsAffected(u *dburl.URL, res sql.Result) (int64, error) { var count int64 var err error if d, ok := drivers[u.Driver]; ok && d.RowsAffected != nil { count, err = d.RowsAffected(res) } else { count, err = res.RowsAffected() } if err != nil && err.Error() == "no RowsAffected available after DDL statement" { return 0, nil } if err != nil { return 0, WrapErr(u.Driver, err) } return count, nil } // Ping pings the database for a driver. func Ping(ctx context.Context, u *dburl.URL, db *sql.DB) error { return WrapErr(u.Driver, db.PingContext(ctx)) } // Lexer returns the syntax lexer for a driver. func Lexer(u *dburl.URL) chroma.Lexer { var l chroma.Lexer if u != nil { if d, ok := drivers[u.Driver]; ok && d.LexerName != "" { l = lexers.Get(d.LexerName) } } if l == nil { l = lexers.Get("sql") } l.Config().EnsureNL = false return l } // ForceQueryParameters is a utility func that wraps forcing params of name, // value pairs. func ForceQueryParameters(params []string) func(*dburl.URL) { if len(params)%2 != 0 { panic("invalid query params") } return func(u *dburl.URL) { if len(params) != 0 { v := u.Query() for i := 0; i < len(params); i += 2 { v.Set(params[i], params[i+1]) } u.RawQuery = v.Encode() } } } // NewMetadataReader wraps creating a new database introspector for a driver. func NewMetadataReader(ctx context.Context, u *dburl.URL, db DB, w io.Writer, opts ...metadata.ReaderOption) (metadata.Reader, error) { d, ok := drivers[u.Driver] if !ok || d.NewMetadataReader == nil { return nil, fmt.Errorf(text.NotSupportedByDriver, `describe commands`, u.Driver) } return d.NewMetadataReader(db, opts...), nil } // NewMetadataWriter wraps creating a new database metadata printer for a driver. func NewMetadataWriter(ctx context.Context, u *dburl.URL, db DB, w io.Writer, opts ...metadata.ReaderOption) (metadata.Writer, error) { d, ok := drivers[u.Driver] if !ok { return nil, fmt.Errorf(text.NotSupportedByDriver, `describe commands`, u.Driver) } if d.NewMetadataWriter != nil { return d.NewMetadataWriter(db, w, opts...), nil } if d.NewMetadataReader == nil { return nil, fmt.Errorf(text.NotSupportedByDriver, `describe commands`, u.Driver) } newMetadataWriter := metadata.NewDefaultWriter(d.NewMetadataReader(db, opts...)) return newMetadataWriter(db, w), nil } // NewCompleter creates a metadata completer for a driver and database // connection. func NewCompleter(ctx context.Context, u *dburl.URL, db DB, readerOpts []metadata.ReaderOption, opts ...completer.Option) readline.AutoCompleter { d, ok := drivers[u.Driver] if !ok { return nil } if d.NewCompleter != nil { return d.NewCompleter(db, opts...) } if d.NewMetadataReader == nil { return nil } // prepend to allow to override default options readerOpts = append([]metadata.ReaderOption{ // this needs to be relatively low, since autocomplete is very interactive metadata.WithTimeout(3 * time.Second), metadata.WithLimit(1000), }, readerOpts...) opts = append([]completer.Option{ completer.WithReader(d.NewMetadataReader(db, readerOpts...)), completer.WithDB(db), }, opts...) return completer.NewDefaultCompleter(opts...) } // Copy copies the result set to the destination sql.DB. func Copy(ctx context.Context, u *dburl.URL, stdout, stderr func() io.Writer, rows *sql.Rows, table string) (int64, error) { d, ok := drivers[u.Driver] if !ok { return 0, WrapErr(u.Driver, text.ErrDriverNotAvailable) } if d.Copy == nil { return 0, fmt.Errorf(text.NotSupportedByDriver, "copy", u.Driver) } db, err := Open(ctx, u, stdout, stderr) if err != nil { return 0, err } defer db.Close() return d.Copy(ctx, db, rows, table) } // CopyWithInsert builds a copy handler based on insert. func CopyWithInsert(placeholder func(int) string) func(ctx context.Context, db *sql.DB, rows *sql.Rows, table string) (int64, error) { if placeholder == nil { placeholder = func(n int) string { return fmt.Sprintf("$%d", n) } } return func(ctx context.Context, db *sql.DB, rows *sql.Rows, table string) (int64, error) { columns, err := rows.Columns() if err != nil { return 0, fmt.Errorf("failed to fetch source rows columns: %w", err) } clen := len(columns) query := table if !strings.HasPrefix(strings.ToLower(query), "insert into") { leftParen := strings.IndexRune(table, '(') if leftParen == -1 { colRows, err := db.QueryContext(ctx, "SELECT * FROM "+table+" WHERE 1=0") if err != nil { return 0, fmt.Errorf("failed to execute query to determine target table columns: %w", err) } columns, err := colRows.Columns() _ = colRows.Close() if err != nil { return 0, fmt.Errorf("failed to fetch target table columns: %w", err) } table += "(" + strings.Join(columns, ", ") + ")" } // TODO if the db supports multiple rows per insert, create batches of 100 rows placeholders := make([]string, clen) for i := 0; i < clen; i++ { placeholders[i] = placeholder(i + 1) } query = "INSERT INTO " + table + " VALUES (" + strings.Join(placeholders, ", ") + ")" } tx, err := db.BeginTx(ctx, nil) if err != nil { return 0, fmt.Errorf("failed to begin transaction: %w", err) } stmt, err := tx.PrepareContext(ctx, query) if err != nil { return 0, fmt.Errorf("failed to prepare insert query: %w", err) } defer stmt.Close() columnTypes, err := rows.ColumnTypes() if err != nil { return 0, fmt.Errorf("failed to fetch source column types: %w", err) } values := make([]interface{}, clen) valueRefs := make([]reflect.Value, clen) actuals := make([]interface{}, clen) for i := 0; i < len(columnTypes); i++ { valueRefs[i] = reflect.New(columnTypes[i].ScanType()) values[i] = valueRefs[i].Interface() } var n int64 for rows.Next() { err = rows.Scan(values...) if err != nil { return n, fmt.Errorf("failed to scan row: %w", err) } //We can't use values... in Exec() below, because some drivers //don't accept pointer to an argument instead of the arg itself. for i := range values { actuals[i] = valueRefs[i].Elem().Interface() } res, err := stmt.ExecContext(ctx, actuals...) if err != nil { return n, fmt.Errorf("failed to exec insert: %w", err) } rn, err := res.RowsAffected() if err != nil { return n, fmt.Errorf("failed to check rows affected: %w", err) } n += rn } // TODO if using batches, flush the last batch, // TODO prepare another statement and count remaining rows err = tx.Commit() if err != nil { return n, fmt.Errorf("failed to commit transaction: %w", err) } return n, rows.Err() } } func init() { dburl.OdbcIgnoreQueryPrefixes = []string{"usql_"} } var endRE = regexp.MustCompile(`;?\s*$`) func StripTrailingSemicolon(_ *dburl.URL, prefix string, sqlstr string) (string, string, bool, error) { sqlstr = endRE.ReplaceAllString(sqlstr, "") typ, q := QueryExecType(prefix, sqlstr) return typ, sqlstr, q, nil } usql-0.19.19/drivers/drivers_test.go000066400000000000000000000375561476173253300174340ustar00rootroot00000000000000// Package drivers_test runs integration tests for drivers package // on real databases running in containers. During development, to avoid rebuilding // containers every run, add the `-cleanup=false` flags when calling `go test github.com/xo/usql/drivers`. package drivers_test import ( "bytes" "context" "database/sql" "flag" "fmt" "log" "net/url" "os" "regexp" "strings" "testing" "time" dt "github.com/ory/dockertest/v3" dc "github.com/ory/dockertest/v3/docker" "github.com/xo/dburl" "github.com/xo/usql/drivers" "github.com/xo/usql/drivers/metadata" _ "github.com/xo/usql/internal" ) type Database struct { BuildArgs []dc.BuildArg RunOptions *dt.RunOptions DSN string ReadyDSN string Exec []string DockerPort string Resource *dt.Resource URL *dburl.URL DB *sql.DB } const ( pw = "yourStrong123_Password" ) var ( dbs = map[string]*Database{ "pgsql": { BuildArgs: []dc.BuildArg{ {Name: "BASE_IMAGE", Value: "postgres:13"}, {Name: "SCHEMA_URL", Value: "https://raw.githubusercontent.com/jOOQ/sakila/main/postgres-sakila-db/postgres-sakila-schema.sql"}, {Name: "TARGET", Value: "/docker-entrypoint-initdb.d"}, {Name: "USER", Value: "root"}, }, RunOptions: &dt.RunOptions{ Name: "usql-pgsql", Cmd: []string{"-c", "log_statement=all", "-c", "log_min_duration_statement=0"}, Env: []string{"POSTGRES_PASSWORD=pw"}, }, DSN: "postgres://postgres:pw@localhost:%s/postgres?sslmode=disable", DockerPort: "5432/tcp", }, "pgx": { BuildArgs: []dc.BuildArg{ {Name: "BASE_IMAGE", Value: "postgres:13"}, {Name: "SCHEMA_URL", Value: "https://raw.githubusercontent.com/jOOQ/sakila/main/postgres-sakila-db/postgres-sakila-schema.sql"}, {Name: "TARGET", Value: "/docker-entrypoint-initdb.d"}, {Name: "USER", Value: "root"}, }, RunOptions: &dt.RunOptions{ Name: "usql-pgsql", Cmd: []string{"-c", "log_statement=all", "-c", "log_min_duration_statement=0"}, Env: []string{"POSTGRES_PASSWORD=pw"}, }, DSN: "pgx://postgres:pw@localhost:%s/postgres?sslmode=disable", DockerPort: "5432/tcp", }, "mysql": { BuildArgs: []dc.BuildArg{ {Name: "BASE_IMAGE", Value: "mysql:8"}, {Name: "SCHEMA_URL", Value: "https://raw.githubusercontent.com/jOOQ/sakila/main/mysql-sakila-db/mysql-sakila-schema.sql"}, {Name: "TARGET", Value: "/docker-entrypoint-initdb.d"}, {Name: "USER", Value: "root"}, }, RunOptions: &dt.RunOptions{ Name: "usql-mysql", Cmd: []string{"--general-log=1", "--general-log-file=/var/lib/mysql/mysql.log"}, Env: []string{"MYSQL_ROOT_PASSWORD=pw"}, }, DSN: "mysql://root:pw@localhost:%s/sakila?parseTime=true", DockerPort: "3306/tcp", }, "sqlserver": { BuildArgs: []dc.BuildArg{ {Name: "BASE_IMAGE", Value: "mcr.microsoft.com/mssql/server:2019-latest"}, {Name: "SCHEMA_URL", Value: "https://raw.githubusercontent.com/jOOQ/sakila/main/sql-server-sakila-db/sql-server-sakila-schema.sql"}, {Name: "TARGET", Value: "/schema"}, {Name: "USER", Value: "mssql:0"}, }, RunOptions: &dt.RunOptions{ Name: "usql-sqlserver", Env: []string{"ACCEPT_EULA=Y", "SA_PASSWORD=" + pw}, }, DSN: "sqlserver://sa:" + url.QueryEscape(pw) + "@127.0.0.1:%s?database=sakila", ReadyDSN: "sqlserver://sa:" + url.QueryEscape(pw) + "@127.0.0.1:%s?database=master", Exec: []string{"/opt/mssql-tools/bin/sqlcmd", "-S", "localhost", "-U", "sa", "-P", pw, "-d", "master", "-i", "/schema/sql-server-sakila-schema.sql"}, DockerPort: "1433/tcp", }, "trino": { BuildArgs: []dc.BuildArg{ {Name: "BASE_IMAGE", Value: "trinodb/trino:359"}, }, RunOptions: &dt.RunOptions{ Name: "usql-trino", }, DSN: "trino://test@localhost:%s/tpch/sf1", DockerPort: "8080/tcp", }, "csvq": { // go test sets working directory to current package regardless of initial working directory DSN: "csvq://./testdata/csvq", }, } cleanup bool ) func TestMain(m *testing.M) { var only string flag.BoolVar(&cleanup, "cleanup", true, "delete containers when finished") flag.StringVar(&only, "dbs", "", "comma separated list of dbs to test: pgsql, mysql, sqlserver, trino") flag.Parse() if only != "" { runOnly := map[string]struct{}{} for _, dbName := range strings.Split(only, ",") { dbName = strings.TrimSpace(dbName) runOnly[dbName] = struct{}{} } for dbName := range dbs { if _, ok := runOnly[dbName]; !ok { delete(dbs, dbName) } } } pool, err := dt.NewPool("") if err != nil { log.Fatalf("Could not connect to docker: %s", err) } for dbName, db := range dbs { dsn, hostPort := getConnInfo(dbName, db, pool) db.URL, err = dburl.Parse(dsn) if err != nil { log.Fatalf("Failed to parse %s URL %s: %v", dbName, db.DSN, err) } if len(db.Exec) != 0 { readyDSN := db.ReadyDSN if db.ReadyDSN == "" { readyDSN = db.DSN } if hostPort != "" { readyDSN = fmt.Sprintf(db.ReadyDSN, hostPort) } readyURL, err := dburl.Parse(readyDSN) if err != nil { log.Fatalf("Failed to parse %s ready URL %s: %v", dbName, db.ReadyDSN, err) } if err := pool.Retry(func() error { readyDB, err := drivers.Open(context.Background(), readyURL, nil, nil) if err != nil { return err } return readyDB.Ping() }); err != nil { log.Fatalf("Timed out waiting for %s to be ready: %s", dbName, err) } // No TTY attached to facilitate debugging with delve exitCode, err := db.Resource.Exec(db.Exec, dt.ExecOptions{}) if err != nil || exitCode != 0 { log.Fatalf("Could not load schema for %s: %s", dbName, err) } } // exponential backoff-retry, because the application in the container might not be ready to accept connections yet var openErr error if retryErr := pool.Retry(func() error { db.DB, openErr = drivers.Open(context.Background(), db.URL, nil, nil) if openErr != nil { return openErr } return db.DB.Ping() }); retryErr != nil { log.Fatalf("Timed out waiting for %s:\n%s\n%s", dbName, retryErr, openErr) } } code := m.Run() // You can't defer this because os.Exit doesn't care for defer if cleanup { for _, db := range dbs { if db.Resource != nil { if err := pool.Purge(db.Resource); err != nil { log.Fatal("Could not purge resource: ", err) } } } } os.Exit(code) } func getConnInfo(dbName string, db *Database, pool *dt.Pool) (string, string) { if db.RunOptions == nil { return db.DSN, "" } var ok bool db.Resource, ok = pool.ContainerByName(db.RunOptions.Name) if ok && !db.Resource.Container.State.Running { err := db.Resource.Close() if err != nil { log.Fatalf("Failed to clean up stale container %s: %s", dbName, err) } ok = false } if !ok { buildOpts := &dt.BuildOptions{ ContextDir: "./testdata/docker", BuildArgs: db.BuildArgs, } var err error db.Resource, err = pool.BuildAndRunWithBuildOptions(buildOpts, db.RunOptions) if err != nil { log.Fatalf("Failed to start %s: %s", dbName, err) } } hostPort := db.Resource.GetPort(db.DockerPort) return fmt.Sprintf(db.DSN, hostPort), hostPort } func TestWriter(t *testing.T) { type testFunc struct { label string f func(w metadata.Writer, u *dburl.URL) error ignore string } testCases := []struct { dbName string funcs []testFunc }{ { dbName: "pgsql", funcs: []testFunc{ { label: "descTable", f: func(w metadata.Writer, u *dburl.URL) error { return w.DescribeTableDetails(u, "film*", true, false) }, }, { label: "listTables", f: func(w metadata.Writer, u *dburl.URL) error { return w.ListTables(u, "tvmsE", "film*", true, false) }, }, { label: "listFuncs", f: func(w metadata.Writer, u *dburl.URL) error { return w.DescribeFunctions(u, "", "", false, false) }, }, { label: "listIndexes", f: func(w metadata.Writer, u *dburl.URL) error { return w.ListIndexes(u, "", true, false) }, }, { label: "listSchemas", f: func(w metadata.Writer, u *dburl.URL) error { return w.ListSchemas(u, "", true, false) }, }, }, }, { dbName: "mysql", funcs: []testFunc{ { label: "descTable", f: func(w metadata.Writer, u *dburl.URL) error { return w.DescribeTableDetails(u, "film*", true, false) }, }, { label: "listTables", f: func(w metadata.Writer, u *dburl.URL) error { return w.ListTables(u, "tvmsE", "film*", true, false) }, }, { label: "listFuncs", f: func(w metadata.Writer, u *dburl.URL) error { return w.DescribeFunctions(u, "", "", false, false) }, }, { label: "listIndexes", f: func(w metadata.Writer, u *dburl.URL) error { return w.ListIndexes(u, "", true, false) }, }, { label: "listSchemas", f: func(w metadata.Writer, u *dburl.URL) error { return w.ListSchemas(u, "", true, false) }, }, }, }, { dbName: "sqlserver", funcs: []testFunc{ { label: "descTable", f: func(w metadata.Writer, u *dburl.URL) error { return w.DescribeTableDetails(u, "film*", true, false) }, // primary key indices get random names; ignore them ignore: "PK__.*__.{16}", }, { label: "listTables", f: func(w metadata.Writer, u *dburl.URL) error { return w.ListTables(u, "tvmsE", "film*", true, false) }, }, { label: "listFuncs", f: func(w metadata.Writer, u *dburl.URL) error { return w.DescribeFunctions(u, "", "", false, false) }, }, { label: "listIndexes", f: func(w metadata.Writer, u *dburl.URL) error { return w.ListIndexes(u, "", true, false) }, // primary key indices get random names; ignore them ignore: "PK__.*__.{16}", }, { label: "listSchemas", f: func(w metadata.Writer, u *dburl.URL) error { return w.ListSchemas(u, "", true, false) }, }, }, }, { dbName: "trino", funcs: []testFunc{ { label: "descTable", f: func(w metadata.Writer, u *dburl.URL) error { return w.DescribeTableDetails(u, "order*", true, false) }, }, { label: "listTables", f: func(w metadata.Writer, u *dburl.URL) error { return w.ListTables(u, "tvmsE", "order*", true, false) }, }, { label: "listSchemas", f: func(w metadata.Writer, u *dburl.URL) error { return w.ListSchemas(u, "", true, false) }, }, }, }, } for _, test := range testCases { for _, testFunc := range test.funcs { actual := fmt.Sprintf("testdata/%s.%s.actual.txt", test.dbName, testFunc.label) fo, err := os.Create(actual) if err != nil { t.Fatalf("Cannot create results file %s: %v", actual, err) } db, ok := dbs[test.dbName] if !ok { continue } w, err := drivers.NewMetadataWriter(context.Background(), db.URL, db.DB, fo) if err != nil { log.Fatalf("Could not create writer %s %s: %v", test.dbName, testFunc.label, err) } err = testFunc.f(w, db.URL) if err != nil { log.Fatalf("Could not write %s %s: %v", test.dbName, testFunc.label, err) } err = fo.Close() if err != nil { t.Fatalf("Cannot close results file %s: %v", actual, err) } expected := fmt.Sprintf("testdata/%s.%s.expected.txt", test.dbName, testFunc.label) err = filesEqual(expected, actual, testFunc.ignore) if err != nil { t.Error(err) } } } } func TestCopy(t *testing.T) { pg, ok := dbs["pgsql"] if !ok { t.Skip("Skipping copy tests, as they require PostgreSQL which was not selected for tests") } // setup test data, ignoring errors, since there'll be duplicates _, _ = pg.DB.Exec("ALTER TABLE staff DROP CONSTRAINT staff_address_id_fkey") _, _ = pg.DB.Exec("ALTER TABLE staff DROP CONSTRAINT staff_store_id_fkey") _, _ = pg.DB.Exec("INSERT INTO staff VALUES (1, 'John', 'Doe', 1, 'john@invalid.com', 1, true, 'jdoe', 'abc', now(), 'abcd')") type setupQuery struct { query string check bool } testCases := []struct { dbName string setupQueries []setupQuery src string dest string }{ { dbName: "pgsql", setupQueries: []setupQuery{ {query: "DROP TABLE staff_copy"}, {query: "CREATE TABLE staff_copy AS SELECT * FROM staff WHERE 0=1", check: true}, }, src: "select * from staff", dest: "staff_copy", }, { dbName: "pgsql", setupQueries: []setupQuery{ {query: "DROP TABLE staff_copy"}, {query: "CREATE TABLE staff_copy AS SELECT * FROM staff WHERE 0=1", check: true}, }, src: "select * from staff", dest: "public.staff_copy", }, { dbName: "pgx", setupQueries: []setupQuery{ {query: "DROP TABLE staff_copy"}, {query: "CREATE TABLE staff_copy AS SELECT * FROM staff WHERE 0=1", check: true}, }, src: "select * from staff", dest: "staff_copy", }, { dbName: "pgx", setupQueries: []setupQuery{ {query: "DROP TABLE staff_copy"}, {query: "CREATE TABLE staff_copy AS SELECT * FROM staff WHERE 0=1", check: true}, }, src: "select * from staff", dest: "public.staff_copy", }, { dbName: "mysql", setupQueries: []setupQuery{ {query: "DROP TABLE staff_copy"}, {query: "CREATE TABLE staff_copy AS SELECT * FROM staff WHERE 0=1", check: true}, }, src: "select staff_id, first_name, last_name, address_id, picture, email, store_id, active, username, password, last_update from staff", dest: "staff_copy(staff_id, first_name, last_name, address_id, picture, email, store_id, active, username, password, last_update)", }, { dbName: "sqlserver", setupQueries: []setupQuery{ {query: "DROP TABLE staff_copy"}, {query: "SELECT * INTO staff_copy FROM staff WHERE 0=1", check: true}, }, src: "select first_name, last_name, address_id, picture, email, store_id, active, username, password, last_update from staff", dest: "staff_copy(first_name, last_name, address_id, picture, email, store_id, active, username, password, last_update)", }, { dbName: "csvq", setupQueries: []setupQuery{ {query: "CREATE TABLE IF NOT EXISTS staff_copy AS SELECT * FROM `staff.csv` WHERE 0=1", check: true}, }, src: "select first_name, last_name, address_id, email, store_id, active, username, password, last_update from staff", dest: "staff_copy", }, } for _, test := range testCases { db, ok := dbs[test.dbName] if !ok { continue } t.Run(test.dbName, func(t *testing.T) { // TODO test copy from a different DB, maybe csvq? // TODO test copy from same DB for _, q := range test.setupQueries { _, err := db.DB.Exec(q.query) if q.check && err != nil { t.Fatalf("Failed to run setup query `%s`: %v", q.query, err) } } rows, err := pg.DB.Query(test.src) if err != nil { t.Fatalf("Could not get rows to copy: %v", err) } ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second) defer cancel() var rlen int64 = 1 n, err := drivers.Copy(ctx, db.URL, nil, nil, rows, test.dest) if err != nil { t.Fatalf("Could not copy: %v", err) } if n != rlen { t.Fatalf("Expected to copy %d rows but got %d", rlen, n) } }) } } // filesEqual compares the files at paths a and b and returns an error if // the content is not equal. Ignore is a regex. All matches will be removed // from the file contents before comparison. func filesEqual(a, b, ignore string) error { // per comment, better to not read an entire file into memory // this is simply a trivial example. f1, err := os.ReadFile(a) if err != nil { return fmt.Errorf("Cannot read file %s: %w", a, err) } f2, err := os.ReadFile(b) if err != nil { return fmt.Errorf("Cannot read file %s: %w", b, err) } if ignore != "" { reg, err := regexp.Compile(ignore) if err != nil { return fmt.Errorf("Cannot compile regex (%s): %w", ignore, err) } f1 = reg.ReplaceAllLiteral(f1, []byte{}) f2 = reg.ReplaceAllLiteral(f2, []byte{}) } if !bytes.Equal(f1, f2) { return fmt.Errorf("Files %s and %s have different contents", a, b) } return nil } usql-0.19.19/drivers/duckdb/000077500000000000000000000000001476173253300156045ustar00rootroot00000000000000usql-0.19.19/drivers/duckdb/duckdb.go000066400000000000000000000074361476173253300174010ustar00rootroot00000000000000// Package duckdb defines and registers usql's DuckDB driver. Requires CGO. // // See: https://github.com/marcboeker/go-duckdb package duckdb import ( "context" "database/sql" "fmt" "io" "strings" _ "github.com/marcboeker/go-duckdb" // DRIVER "github.com/xo/usql/drivers" "github.com/xo/usql/drivers/metadata" infos "github.com/xo/usql/drivers/metadata/informationschema" mymeta "github.com/xo/usql/drivers/metadata/mysql" ) type metaReader struct { metadata.LoggingReader } var ( _ metadata.CatalogReader = &metaReader{} _ metadata.ColumnStatReader = &metaReader{} ) func (r metaReader) Catalogs(metadata.Filter) (*metadata.CatalogSet, error) { qstr := `SHOW catalogs` rows, closeRows, err := r.Query(qstr) if err != nil { return nil, err } defer closeRows() results := []metadata.Catalog{} for rows.Next() { rec := metadata.Catalog{} err = rows.Scan(&rec.Catalog) if err != nil { return nil, err } results = append(results, rec) } if rows.Err() != nil { return nil, rows.Err() } return metadata.NewCatalogSet(results), nil } func (r metaReader) ColumnStats(f metadata.Filter) (*metadata.ColumnStatSet, error) { names := []string{} if f.Catalog != "" { names = append(names, f.Catalog+".") } if f.Schema != "" { names = append(names, f.Schema+".") } names = append(names, f.Parent) rows, closeRows, err := r.Query(fmt.Sprintf("SHOW STATS FOR %s", strings.Join(names, ""))) if err != nil { return nil, err } defer closeRows() results := []metadata.ColumnStat{} for rows.Next() { rec := metadata.ColumnStat{Catalog: f.Catalog, Schema: f.Schema, Table: f.Parent} name := sql.NullString{} avgWidth := sql.NullInt32{} numDistinct := sql.NullInt64{} nullFrac := sql.NullFloat64{} numRows := sql.NullInt64{} min := sql.NullString{} max := sql.NullString{} err = rows.Scan( &name, &avgWidth, &numDistinct, &nullFrac, &numRows, &min, &max, ) if err != nil { return nil, err } if !name.Valid { continue } rec.Name = name.String if avgWidth.Valid { rec.AvgWidth = int(avgWidth.Int32) } if numDistinct.Valid { rec.NumDistinct = numDistinct.Int64 } if nullFrac.Valid { rec.NullFrac = nullFrac.Float64 } if min.Valid { rec.Min = min.String } if max.Valid { rec.Max = max.String } results = append(results, rec) } if rows.Err() != nil { return nil, rows.Err() } return metadata.NewColumnStatSet(results), nil } func init() { newReader := func(db drivers.DB, opts ...metadata.ReaderOption) metadata.Reader { ir := infos.New( infos.WithPlaceholder(func(int) string { return "?" }), infos.WithCustomClauses(map[infos.ClauseName]string{ infos.ColumnsColumnSize: "0", infos.ColumnsNumericScale: "0", infos.ColumnsNumericPrecRadix: "0", infos.ColumnsCharOctetLength: "0", }), infos.WithFunctions(false), infos.WithSequences(false), infos.WithIndexes(false), infos.WithConstraints(false), infos.WithColumnPrivileges(false), infos.WithUsagePrivileges(false), )(db, opts...) mr := &metaReader{ LoggingReader: metadata.NewLoggingReader(db, opts...), } return metadata.NewPluginReader(ir, mr) } drivers.Register("duckdb", drivers.Driver{ AllowMultilineComments: true, Version: func(ctx context.Context, db drivers.DB) (string, error) { var ver string err := db.QueryRowContext(ctx, `SELECT library_version FROM pragma_version()`).Scan(&ver) if err != nil { return "", err } return "DuckDB " + ver, nil }, NewMetadataReader: newReader, NewMetadataWriter: func(db drivers.DB, w io.Writer, opts ...metadata.ReaderOption) metadata.Writer { return metadata.NewDefaultWriter(newReader(db, opts...))(db, w) }, Copy: drivers.CopyWithInsert(func(int) string { return "?" }), NewCompleter: mymeta.NewCompleter, }) } usql-0.19.19/drivers/dynamodb/000077500000000000000000000000001476173253300161455ustar00rootroot00000000000000usql-0.19.19/drivers/dynamodb/dynamodb.go000066400000000000000000000004361476173253300202740ustar00rootroot00000000000000// Package dynamodb defines and registers usql's DynamoDb driver. // // See: https://github.com/btnguyen2k/godynamo package dynamodb import ( _ "github.com/btnguyen2k/godynamo" // DRIVER "github.com/xo/usql/drivers" ) func init() { drivers.Register("godynamo", drivers.Driver{}) } usql-0.19.19/drivers/errors.go000066400000000000000000000022651476173253300162200ustar00rootroot00000000000000package drivers import ( "strings" "unicode" ) // Error is a wrapper to standardize errors. type Error struct { Driver string Err error } // WrapErr wraps an error using the specified driver when err is not nil. func WrapErr(driver string, err error) error { if err == nil { return nil } // avoid double wrapping error if _, ok := err.(*Error); ok { return err } return &Error{driver, err} } // Error satisfies the error interface, returning simple information about the // wrapped error in standardized way. func (e *Error) Error() string { if d, ok := drivers[e.Driver]; ok { n := e.Driver if d.Name != "" { n = d.Name } s := n var msg string if d.Err != nil { var code string code, msg = d.Err(e.Err) if code != "" { s += ": " + code } } else { msg = e.Err.Error() } return s + ": " + chop(msg, n) } return e.Driver + ": " + chop(e.Err.Error(), e.Driver) } // Unwrap returns the original error. func (e *Error) Unwrap() error { return e.Err } // chop chops off a "prefix: " prefix from a string. func chop(s, prefix string) string { return strings.TrimLeftFunc(strings.TrimPrefix(strings.TrimSpace(s), prefix+":"), unicode.IsSpace) } usql-0.19.19/drivers/exasol/000077500000000000000000000000001476173253300156435ustar00rootroot00000000000000usql-0.19.19/drivers/exasol/exasol.go000066400000000000000000000017761476173253300175000ustar00rootroot00000000000000// Package exasol defines and registers usql's Exasol driver. // // See: https://github.com/exasol/exasol-driver-go package exasol import ( "context" "regexp" _ "github.com/exasol/exasol-driver-go" // DRIVER "github.com/xo/usql/drivers" ) func init() { errCodeRE := regexp.MustCompile(`^\[([0-9]+)]\s+`) drivers.Register("exasol", drivers.Driver{ AllowMultilineComments: true, LowerColumnNames: true, Copy: drivers.CopyWithInsert(func(int) string { return "?" }), Err: func(err error) (string, string) { code, msg := "", err.Error() if m := errCodeRE.FindStringSubmatch(msg); m != nil { code, msg = m[1], errCodeRE.ReplaceAllString(msg, "") } return code, msg }, Version: func(ctx context.Context, db drivers.DB) (string, error) { var ver string if err := db.QueryRowContext(ctx, `SELECT param_value FROM exa_metadata WHERE param_name = 'databaseProductVersion'`).Scan(&ver); err != nil { return "", err } return "Exasol " + ver, nil }, }) } usql-0.19.19/drivers/firebird/000077500000000000000000000000001476173253300161365ustar00rootroot00000000000000usql-0.19.19/drivers/firebird/firebird.go000066400000000000000000000012131476173253300202500ustar00rootroot00000000000000// Package firebird defines and registers usql's Firebird driver. // // See: https://github.com/nakagami/firebirdsql package firebird import ( "context" _ "github.com/nakagami/firebirdsql" // DRIVER: firebirdsql "github.com/xo/usql/drivers" ) func init() { drivers.Register("firebirdsql", drivers.Driver{ AllowMultilineComments: true, Version: func(ctx context.Context, db drivers.DB) (string, error) { var ver string err := db.QueryRowContext( ctx, `SELECT rdb$get_context('SYSTEM', 'ENGINE_VERSION') FROM rdb$database;`, ).Scan(&ver) if err != nil { return "", err } return "Firebird " + ver, nil }, }) } usql-0.19.19/drivers/flightsql/000077500000000000000000000000001476173253300163455ustar00rootroot00000000000000usql-0.19.19/drivers/flightsql/flightsql.go000066400000000000000000000005441476173253300206740ustar00rootroot00000000000000// Package flightsql defines and registers usql's FlightSQL driver. // // See: https://github.com/apache/arrow/tree/main/go/arrow/flight/flightsql/driver package flightsql import ( _ "github.com/apache/arrow/go/v17/arrow/flight/flightsql/driver" // DRIVER "github.com/xo/usql/drivers" ) func init() { drivers.Register("flightsql", drivers.Driver{}) } usql-0.19.19/drivers/godror/000077500000000000000000000000001476173253300156445ustar00rootroot00000000000000usql-0.19.19/drivers/godror/godror.go000066400000000000000000000023241476173253300174700ustar00rootroot00000000000000// Package godror defines and registers usql's GO DRiver for ORacle driver. // Requires CGO. Uses Oracle's ODPI-C (instant client) library. // // See: https://github.com/godror/godror // Group: all package godror import ( "errors" "fmt" "strings" _ "github.com/godror/godror" // DRIVER "github.com/xo/usql/drivers/oracle/orshared" ) func init() { orshared.Register( "godror", // unwrap error func(err error) (string, string) { if e := errors.Unwrap(err); e != nil { err = e } code, msg := "", err.Error() if e, ok := err.(interface { Code() int }); ok { code = fmt.Sprintf("ORA-%05d", e.Code()) } if e, ok := err.(interface { Message() string }); ok { msg = e.Message() } if i := strings.LastIndex(msg, "ORA-"); msg == "" && i != -1 { msg = msg[i:] if j := strings.Index(msg, ":"); j != -1 { msg = msg[j+1:] if code == "" { code = msg[i:j] } } } return code, strings.TrimSpace(msg) }, // is password error func(err error) bool { if e := errors.Unwrap(err); e != nil { err = e } if e, ok := err.(interface { Code() int }); ok { return e.Code() == 1017 || e.Code() == 1005 } return false }, ) } usql-0.19.19/drivers/h2/000077500000000000000000000000001476173253300146615ustar00rootroot00000000000000usql-0.19.19/drivers/h2/h2.go000066400000000000000000000005431476173253300155230ustar00rootroot00000000000000// Package h2 defines and registers usql's Apache H2 driver. // // See: https://github.com/jmrobles/h2go package h2 import ( _ "github.com/jmrobles/h2go" // DRIVER "github.com/xo/usql/drivers" ) func init() { drivers.Register("h2", drivers.Driver{ AllowDollar: true, AllowMultilineComments: true, AllowCComments: true, }) } usql-0.19.19/drivers/hive/000077500000000000000000000000001476173253300153035ustar00rootroot00000000000000usql-0.19.19/drivers/hive/hive.go000066400000000000000000000007141476173253300165670ustar00rootroot00000000000000// Package hive defines and registers usql's Apache Hive driver. // // See: https://github.com/sql-machine-learning/gohive package hive import ( "github.com/xo/dburl" "github.com/xo/usql/drivers" _ "sqlflow.org/gohive" // DRIVER ) func init() { drivers.Register("hive", drivers.Driver{ ForceParams: func(u *dburl.URL) { if u.User != nil && u.Query().Get("auth") == "" { drivers.ForceQueryParameters([]string{"auth", "PLAIN"})(u) } }, }) } usql-0.19.19/drivers/ignite/000077500000000000000000000000001476173253300156275ustar00rootroot00000000000000usql-0.19.19/drivers/ignite/ignite.go000066400000000000000000000010511476173253300174320ustar00rootroot00000000000000// Package ignite defines and registers usql's Apache Ignite driver. // // See: https://github.com/amsokol/ignite-go-client package ignite import ( "strconv" "github.com/amsokol/ignite-go-client/binary/errors" _ "github.com/amsokol/ignite-go-client/sql" // DRIVER "github.com/xo/usql/drivers" ) func init() { drivers.Register("ignite", drivers.Driver{ Err: func(err error) (string, string) { if e, ok := err.(*errors.IgniteError); ok { return strconv.Itoa(int(e.IgniteStatus)), e.IgniteMessage } return "", err.Error() }, }) } usql-0.19.19/drivers/impala/000077500000000000000000000000001476173253300156135ustar00rootroot00000000000000usql-0.19.19/drivers/impala/impala.go000066400000000000000000000004451476173253300174100ustar00rootroot00000000000000// Package impala defines and registers usql's Apache Impala driver. // // See: https://github.com/bippio/go-impala // Group: bad package impala import ( _ "github.com/bippio/go-impala" // DRIVER "github.com/xo/usql/drivers" ) func init() { drivers.Register("impala", drivers.Driver{}) } usql-0.19.19/drivers/maxcompute/000077500000000000000000000000001476173253300165325ustar00rootroot00000000000000usql-0.19.19/drivers/maxcompute/maxcompute.go000066400000000000000000000004661476173253300212510ustar00rootroot00000000000000// Package maxcompute defines and registers usql's Alibaba MaxCompute driver. // // See: https://github.com/sql-machine-learning/gomaxcompute package maxcompute import ( "github.com/xo/usql/drivers" _ "sqlflow.org/gomaxcompute" // DRIVER ) func init() { drivers.Register("maxcompute", drivers.Driver{}) } usql-0.19.19/drivers/metadata/000077500000000000000000000000001476173253300161305ustar00rootroot00000000000000usql-0.19.19/drivers/metadata/informationschema/000077500000000000000000000000001476173253300216365ustar00rootroot00000000000000usql-0.19.19/drivers/metadata/informationschema/metadata.go000066400000000000000000001007171476173253300237530ustar00rootroot00000000000000// Package informationschema provides metadata readers that query tables from // the information_schema schema. It tries to be database agnostic, // but there is a set of options to configure what tables and columns to expect. package informationschema import ( "database/sql" "fmt" "strings" "github.com/xo/usql/drivers" "github.com/xo/usql/drivers/metadata" "github.com/xo/usql/text" ) // InformationSchema metadata reader type InformationSchema struct { metadata.LoggingReader pf func(int) string hasFunctions bool hasSequences bool hasIndexes bool hasConstraints bool hasCheckConstraints bool hasTablePrivileges bool hasColumnPrivileges bool hasUsagePrivileges bool clauses map[ClauseName]string limit int systemSchemas []string currentSchema string dataTypeFormatter func(metadata.Column) string } var _ metadata.BasicReader = &InformationSchema{} type Logger interface { Println(...interface{}) } type ClauseName string const ( ColumnsDataType = ClauseName("columns.data_type") ColumnsColumnSize = ClauseName("columns.column_size") ColumnsNumericScale = ClauseName("columns.numeric_scale") ColumnsNumericPrecRadix = ClauseName("columns.numeric_precision_radix") ColumnsCharOctetLength = ClauseName("columns.character_octet_length") FunctionColumnsColumnSize = ClauseName("function_columns.column_size") FunctionColumnsNumericScale = ClauseName("function_columns.numeric_scale") FunctionColumnsNumericPrecRadix = ClauseName("function_columns.numeric_precision_radix") FunctionColumnsCharOctetLength = ClauseName("function_columns.character_octet_length") FunctionsSecurityType = ClauseName("functions.security_type") ConstraintIsDeferrable = ClauseName("constraint_columns.is_deferrable") ConstraintInitiallyDeferred = ClauseName("constraint_columns.initially_deferred") ConstraintJoinCond = ClauseName("constraint_join.fk") SequenceColumnsIncrement = ClauseName("sequence_columns.increment") PrivilegesGrantor = ClauseName("privileges.grantor") ) // New InformationSchema reader func New(opts ...metadata.ReaderOption) func(drivers.DB, ...metadata.ReaderOption) metadata.Reader { s := &InformationSchema{ pf: func(n int) string { return fmt.Sprintf("$%d", n) }, hasFunctions: true, hasSequences: true, hasIndexes: true, hasConstraints: true, hasCheckConstraints: true, hasTablePrivileges: true, hasColumnPrivileges: true, hasUsagePrivileges: true, clauses: map[ClauseName]string{ ColumnsDataType: "data_type", ColumnsColumnSize: "COALESCE(character_maximum_length, numeric_precision, datetime_precision, 0)", ColumnsNumericScale: "COALESCE(numeric_scale, 0)", ColumnsNumericPrecRadix: "COALESCE(numeric_precision_radix, 10)", ColumnsCharOctetLength: "COALESCE(character_octet_length, 0)", FunctionColumnsColumnSize: "COALESCE(character_maximum_length, numeric_precision, datetime_precision, 0)", FunctionColumnsNumericScale: "COALESCE(numeric_scale, 0)", FunctionColumnsNumericPrecRadix: "COALESCE(numeric_precision_radix, 10)", FunctionColumnsCharOctetLength: "COALESCE(character_octet_length, 0)", FunctionsSecurityType: "security_type", ConstraintIsDeferrable: "t.is_deferrable", ConstraintInitiallyDeferred: "t.initially_deferred", SequenceColumnsIncrement: "increment", PrivilegesGrantor: "grantor", }, systemSchemas: []string{"information_schema"}, dataTypeFormatter: func(col metadata.Column) string { return col.DataType }, } // aply InformationSchema specific options for _, o := range opts { o(s) } return func(db drivers.DB, opts ...metadata.ReaderOption) metadata.Reader { s.LoggingReader = metadata.NewLoggingReader(db, opts...) return s } } // WithPlaceholder generator function, that usually returns either `?` or `$n`, // where `n` is the argument. func WithPlaceholder(pf func(int) string) metadata.ReaderOption { return func(r metadata.Reader) { r.(*InformationSchema).pf = pf } } // WithCustomClauses to use different expressions for some columns func WithCustomClauses(cols map[ClauseName]string) metadata.ReaderOption { return func(r metadata.Reader) { for k, v := range cols { r.(*InformationSchema).clauses[k] = v } } } // WithFunctions when the `routines` and `parameters` tables exists func WithFunctions(fun bool) metadata.ReaderOption { return func(r metadata.Reader) { r.(*InformationSchema).hasFunctions = fun } } // WithIndexes when the `statistics` table exists func WithIndexes(ind bool) metadata.ReaderOption { return func(r metadata.Reader) { r.(*InformationSchema).hasIndexes = ind } } // WithConstraints when the `statistics` table exists func WithConstraints(con bool) metadata.ReaderOption { return func(r metadata.Reader) { r.(*InformationSchema).hasConstraints = con } } // WithCheckConstraints when the `statistics` table exists func WithCheckConstraints(con bool) metadata.ReaderOption { return func(r metadata.Reader) { r.(*InformationSchema).hasCheckConstraints = con } } // WithSequences when the `sequences` table exists func WithSequences(seq bool) metadata.ReaderOption { return func(r metadata.Reader) { r.(*InformationSchema).hasSequences = seq } } // WithTablePrivileges when the `table_privileges` table exists func WithTablePrivileges(t bool) metadata.ReaderOption { return func(r metadata.Reader) { r.(*InformationSchema).hasTablePrivileges = t } } // WithColumnPrivileges when the `column_privileges` table exists func WithColumnPrivileges(c bool) metadata.ReaderOption { return func(r metadata.Reader) { r.(*InformationSchema).hasColumnPrivileges = c } } // WithUsagePrivileges when the `usage_privileges` table exists func WithUsagePrivileges(u bool) metadata.ReaderOption { return func(r metadata.Reader) { r.(*InformationSchema).hasUsagePrivileges = u } } // WithSystemSchemas that are ignored unless WithSystem filter is true func WithSystemSchemas(schemas []string) metadata.ReaderOption { return func(r metadata.Reader) { r.(*InformationSchema).systemSchemas = schemas } } // WithCurrentSchema expression to filter by when OnlyVisible filter is true func WithCurrentSchema(expr string) metadata.ReaderOption { return func(r metadata.Reader) { r.(*InformationSchema).currentSchema = expr } } // WithDataTypeFormatter function to build updated string representation of data type // from Column func WithDataTypeFormatter(f func(metadata.Column) string) metadata.ReaderOption { return func(r metadata.Reader) { r.(*InformationSchema).dataTypeFormatter = f } } func (s *InformationSchema) SetLimit(l int) { s.limit = l } // Columns from selected catalog (or all, if empty), matching schemas and tables func (s InformationSchema) Columns(f metadata.Filter) (*metadata.ColumnSet, error) { columns := []string{ "table_catalog", "table_schema", "table_name", "column_name", "ordinal_position", s.clauses[ColumnsDataType], "COALESCE(column_default, '')", "COALESCE(is_nullable, '') AS is_nullable", s.clauses[ColumnsColumnSize], s.clauses[ColumnsNumericScale], s.clauses[ColumnsNumericPrecRadix], s.clauses[ColumnsCharOctetLength], } qstr := "SELECT\n " + strings.Join(columns, ",\n ") + " FROM information_schema.columns\n" conds, vals := s.conditions(1, f, formats{ catalog: "table_catalog LIKE %s", schema: "table_schema LIKE %s", notSchemas: "table_schema NOT IN (%s)", parent: "table_name LIKE %s", }) rows, closeRows, err := s.query(qstr, conds, "table_catalog, table_schema, table_name, ordinal_position", vals...) if err != nil { if err == sql.ErrNoRows { return metadata.NewColumnSet([]metadata.Column{}), nil } return nil, err } defer closeRows() results := []metadata.Column{} for rows.Next() { rec := metadata.Column{} err = rows.Scan( &rec.Catalog, &rec.Schema, &rec.Table, &rec.Name, &rec.OrdinalPosition, &rec.DataType, &rec.Default, &rec.IsNullable, &rec.ColumnSize, &rec.DecimalDigits, &rec.NumPrecRadix, &rec.CharOctetLength, ) if err != nil { return nil, err } rec.DataType = s.dataTypeFormatter(rec) results = append(results, rec) } if rows.Err() != nil { return nil, rows.Err() } return metadata.NewColumnSet(results), nil } // Tables from selected catalog (or all, if empty), matching schemas, names and types func (s InformationSchema) Tables(f metadata.Filter) (*metadata.TableSet, error) { qstr := `SELECT table_catalog, table_schema, table_name, table_type FROM information_schema.tables ` conds, vals := s.conditions(1, f, formats{ catalog: "table_catalog LIKE %s", schema: "table_schema LIKE %s", notSchemas: "table_schema NOT IN (%s)", name: "table_name LIKE %s", types: "table_type IN (%s)", }) if len(conds) != 0 { qstr += " WHERE " + strings.Join(conds, " AND ") } addSequences := false for _, t := range f.Types { if t == "SEQUENCE" && s.hasSequences { addSequences = true } } if addSequences { qstr += ` UNION ALL SELECT sequence_catalog AS table_catalog, sequence_schema AS table_schema, sequence_name AS table_name, 'SEQUENCE' AS table_type FROM information_schema.sequences ` conds, seqVals := s.conditions(len(vals)+1, f, formats{ catalog: "sequence_catalog LIKE %s", schema: "sequence_schema LIKE %s", notSchemas: "sequence_schema NOT IN (%s)", name: "sequence_name LIKE %s", }) vals = append(vals, seqVals...) if len(conds) != 0 { qstr += " WHERE " + strings.Join(conds, " AND ") } } rows, closeRows, err := s.query(qstr, []string{}, "table_catalog, table_schema, table_type, table_name", vals...) if err != nil { if err == sql.ErrNoRows { return metadata.NewTableSet([]metadata.Table{}), nil } return nil, err } defer closeRows() results := []metadata.Table{} for rows.Next() { rec := metadata.Table{} err = rows.Scan(&rec.Catalog, &rec.Schema, &rec.Name, &rec.Type) if err != nil { return nil, err } results = append(results, rec) } if rows.Err() != nil { return nil, rows.Err() } return metadata.NewTableSet(results), nil } // Schemas from selected catalog (or all, if empty), matching schemas and tables func (s InformationSchema) Schemas(f metadata.Filter) (*metadata.SchemaSet, error) { qstr := `SELECT schema_name, catalog_name FROM information_schema.schemata ` conds, vals := s.conditions(1, f, formats{ catalog: "catalog_name LIKE %s", name: "schema_name LIKE %s", notSchemas: "schema_name NOT IN (%s)", }) rows, closeRows, err := s.query(qstr, conds, "catalog_name, schema_name", vals...) if err != nil { if err == sql.ErrNoRows { return metadata.NewSchemaSet([]metadata.Schema{}), nil } return nil, err } defer closeRows() results := []metadata.Schema{} for rows.Next() { rec := metadata.Schema{} err = rows.Scan(&rec.Schema, &rec.Catalog) if err != nil { return nil, err } results = append(results, rec) } if rows.Err() != nil { return nil, rows.Err() } return metadata.NewSchemaSet(results), nil } // Functions from selected catalog (or all, if empty), matching schemas, names and types func (s InformationSchema) Functions(f metadata.Filter) (*metadata.FunctionSet, error) { if !s.hasFunctions { return nil, text.ErrNotSupported } columns := []string{ "specific_name", "routine_catalog", "routine_schema", "routine_name", "COALESCE(routine_type, '')", "COALESCE(data_type, '')", "routine_definition", "COALESCE(external_language, routine_body) AS language", "is_deterministic", s.clauses[FunctionsSecurityType], } qstr := "SELECT\n " + strings.Join(columns, ",\n ") + " FROM information_schema.routines\n" conds, vals := s.conditions(1, f, formats{ catalog: "routine_catalog LIKE %s", schema: "routine_schema LIKE %s", notSchemas: "routine_schema NOT IN (%s)", name: "routine_name LIKE %s", types: "routine_type IN (%s)", }) rows, closeRows, err := s.query(qstr, conds, "routine_catalog, routine_schema, routine_name, COALESCE(routine_type, '')", vals...) if err != nil { if err == sql.ErrNoRows { return metadata.NewFunctionSet([]metadata.Function{}), nil } return nil, err } defer closeRows() results := []metadata.Function{} for rows.Next() { rec := metadata.Function{} err = rows.Scan( &rec.SpecificName, &rec.Catalog, &rec.Schema, &rec.Name, &rec.Type, &rec.ResultType, &rec.Source, &rec.Language, &rec.Volatility, &rec.Security, ) if err != nil { return nil, err } results = append(results, rec) } if rows.Err() != nil { return nil, rows.Err() } return metadata.NewFunctionSet(results), nil } // FunctionColumns (arguments) from selected catalog (or all, if empty), matching schemas and functions func (s InformationSchema) FunctionColumns(f metadata.Filter) (*metadata.FunctionColumnSet, error) { if !s.hasFunctions { return nil, text.ErrNotSupported } columns := []string{ "specific_catalog", "specific_schema", "specific_name", "COALESCE(parameter_name, '')", "ordinal_position", "COALESCE(parameter_mode, '')", "COALESCE(data_type, '')", s.clauses[FunctionColumnsColumnSize], s.clauses[FunctionColumnsNumericScale], s.clauses[FunctionColumnsNumericPrecRadix], s.clauses[FunctionColumnsCharOctetLength], } qstr := "SELECT\n " + strings.Join(columns, ",\n ") + " FROM information_schema.parameters\n" conds, vals := s.conditions(1, f, formats{ catalog: "specific_catalog LIKE %s", schema: "specific_schema LIKE %s", notSchemas: "specific_schema NOT IN (%s)", parent: "specific_name LIKE %s", }) rows, closeRows, err := s.query(qstr, conds, "specific_catalog, specific_schema, specific_name, ordinal_position, COALESCE(parameter_name, '')", vals...) if err != nil { if err == sql.ErrNoRows { return metadata.NewFunctionColumnSet([]metadata.FunctionColumn{}), nil } return nil, err } defer closeRows() results := []metadata.FunctionColumn{} for rows.Next() { rec := metadata.FunctionColumn{} err = rows.Scan( &rec.Catalog, &rec.Schema, &rec.FunctionName, &rec.Name, &rec.OrdinalPosition, &rec.Type, &rec.DataType, &rec.ColumnSize, &rec.DecimalDigits, &rec.NumPrecRadix, &rec.CharOctetLength, ) if err != nil { return nil, err } results = append(results, rec) } if rows.Err() != nil { return nil, rows.Err() } return metadata.NewFunctionColumnSet(results), nil } // Indexes from selected catalog (or all, if empty), matching schemas and names func (s InformationSchema) Indexes(f metadata.Filter) (*metadata.IndexSet, error) { if !s.hasIndexes { return nil, text.ErrNotSupported } qstr := `SELECT table_catalog, index_schema, table_name, index_name, CASE WHEN non_unique = 0 THEN 'YES' ELSE 'NO' END AS is_unique, CASE WHEN index_name = 'PRIMARY' THEN 'YES' ELSE 'NO' END AS is_primary, index_type FROM information_schema.statistics ` conds, vals := s.conditions(1, f, formats{ catalog: "table_catalog LIKE %s", schema: "index_schema LIKE %s", notSchemas: "index_schema NOT IN (%s)", parent: "table_name LIKE %s", name: "index_name LIKE %s", }) if len(conds) != 0 { qstr += " WHERE " + strings.Join(conds, " AND ") } qstr += ` GROUP BY table_catalog, index_schema, table_name, index_name, CASE WHEN non_unique = 0 THEN 'YES' ELSE 'NO' END, CASE WHEN index_name = 'PRIMARY' THEN 'YES' ELSE 'NO' END, index_type` rows, closeRows, err := s.query(qstr, []string{}, "table_catalog, index_schema, table_name, index_name", vals...) if err != nil { if err == sql.ErrNoRows { return metadata.NewIndexSet([]metadata.Index{}), nil } return nil, err } defer closeRows() results := []metadata.Index{} for rows.Next() { rec := metadata.Index{} err = rows.Scan(&rec.Catalog, &rec.Schema, &rec.Table, &rec.Name, &rec.IsUnique, &rec.IsPrimary, &rec.Type) if err != nil { return nil, err } results = append(results, rec) } if rows.Err() != nil { return nil, rows.Err() } return metadata.NewIndexSet(results), nil } // IndexColumns from selected catalog (or all, if empty), matching schemas and indexes func (s InformationSchema) IndexColumns(f metadata.Filter) (*metadata.IndexColumnSet, error) { if !s.hasIndexes { return nil, text.ErrNotSupported } qstr := `SELECT i.table_catalog, i.table_schema, i.table_name, i.index_name, i.column_name, c.data_type, i.seq_in_index FROM information_schema.statistics i JOIN information_schema.columns c ON i.table_catalog = c.table_catalog AND i.table_schema = c.table_schema AND i.table_name = c.table_name AND i.column_name = c.column_name ` conds, vals := s.conditions(1, f, formats{ catalog: "i.table_catalog LIKE %s", schema: "index_schema LIKE %s", notSchemas: "index_schema NOT IN (%s)", parent: "i.table_name LIKE %s", name: "index_name LIKE %s", }) rows, closeRows, err := s.query(qstr, conds, "i.table_catalog, index_schema, table_name, index_name, seq_in_index", vals...) if err != nil { if err == sql.ErrNoRows { return metadata.NewIndexColumnSet([]metadata.IndexColumn{}), nil } return nil, err } defer closeRows() results := []metadata.IndexColumn{} for rows.Next() { rec := metadata.IndexColumn{} err = rows.Scan(&rec.Catalog, &rec.Schema, &rec.Table, &rec.IndexName, &rec.Name, &rec.DataType, &rec.OrdinalPosition) if err != nil { return nil, err } results = append(results, rec) } if rows.Err() != nil { return nil, rows.Err() } return metadata.NewIndexColumnSet(results), nil } // Constraints from selected catalog (or all, if empty), matching schemas and names func (s InformationSchema) Constraints(f metadata.Filter) (*metadata.ConstraintSet, error) { if !s.hasConstraints { return nil, text.ErrNotSupported } columns := []string{ "t.constraint_catalog", "t.table_schema", "t.table_name", "t.constraint_name", "t.constraint_type", s.clauses[ConstraintIsDeferrable], s.clauses[ConstraintInitiallyDeferred], "COALESCE(r.unique_constraint_catalog, '') AS foreign_catalog", "COALESCE(r.unique_constraint_schema, '') AS foreign_schema", "COALESCE(f.table_name, '') AS foreign_table", "COALESCE(r.unique_constraint_name, '') AS foreign_constraint", "COALESCE(r.match_option, '') AS match_options", "COALESCE(r.update_rule, '') AS update_rule", "COALESCE(r.delete_rule, '') AS delete_rule", "COALESCE(c.check_clause, '') AS check_clause", } qstr := "SELECT\n " + strings.Join(columns, ",\n ") + ` FROM information_schema.table_constraints t LEFT JOIN information_schema.referential_constraints r ON t.constraint_catalog = r.constraint_catalog AND t.constraint_schema = r.constraint_schema AND t.constraint_name = r.constraint_name AND t.constraint_type = 'FOREIGN KEY' LEFT JOIN information_schema.table_constraints f ON r.unique_constraint_catalog = f.constraint_catalog AND r.unique_constraint_schema = f.constraint_schema AND r.unique_constraint_name = f.constraint_name ` + s.clauses[ConstraintJoinCond] + ` LEFT JOIN information_schema.check_constraints c ON t.constraint_catalog = c.constraint_catalog AND t.constraint_schema = c.constraint_schema AND t.constraint_name = c.constraint_name ` conds, vals := s.conditions(1, f, formats{ catalog: "t.constraint_catalog LIKE %s", schema: "t.table_schema LIKE %s", notSchemas: "t.table_schema NOT IN (%s)", parent: "t.table_name LIKE %s", reference: "f.table_name LIKE %s", name: "t.constraint_name LIKE %s", }) if len(conds) != 0 { qstr += " WHERE " + strings.Join(conds, " AND ") } rows, closeRows, err := s.query(qstr, []string{}, "t.constraint_catalog, t.table_schema, t.table_name, t.constraint_name", vals...) if err != nil { if err == sql.ErrNoRows { return metadata.NewConstraintSet([]metadata.Constraint{}), nil } return nil, err } defer closeRows() results := []metadata.Constraint{} for rows.Next() { rec := metadata.Constraint{} err = rows.Scan( &rec.Catalog, &rec.Schema, &rec.Table, &rec.Name, &rec.Type, &rec.IsDeferrable, &rec.IsInitiallyDeferred, &rec.ForeignCatalog, &rec.ForeignSchema, &rec.ForeignTable, &rec.ForeignName, &rec.MatchType, &rec.UpdateRule, &rec.DeleteRule, &rec.CheckClause, ) if err != nil { return nil, err } results = append(results, rec) } if rows.Err() != nil { return nil, rows.Err() } return metadata.NewConstraintSet(results), nil } // ConstraintColumns from selected catalog (or all, if empty), matching schemas and constraints func (s InformationSchema) ConstraintColumns(f metadata.Filter) (*metadata.ConstraintColumnSet, error) { if !s.hasConstraints { return nil, text.ErrNotSupported } vals := []interface{}{} qstr := "" if s.hasCheckConstraints { qstr = `SELECT c.constraint_catalog, c.table_schema, c.table_name, c.constraint_name, c.column_name, 1 AS ordinal_position, '' AS foreign_catalog, '' AS foreign_schema, '' AS foreign_table, '' AS foreign_name FROM information_schema.constraint_column_usage c ` conds, checkVals := s.conditions(len(vals)+1, f, formats{ catalog: "c.constraint_catalog LIKE %s", schema: "c.table_schema LIKE %s", notSchemas: "c.table_schema NOT IN (%s)", parent: "c.table_name LIKE %s", name: "c.constraint_name LIKE %s", }) if len(conds) != 0 { qstr += " WHERE " + strings.Join(conds, " AND ") vals = append(vals, checkVals...) } qstr += ` UNION ALL ` } qstr += `SELECT c.constraint_catalog, c.table_schema, c.table_name, c.constraint_name, c.column_name, c.ordinal_position, COALESCE(f.constraint_catalog, '') AS foreign_catalog, COALESCE(f.table_schema, '') AS foreign_schema, COALESCE(f.table_name, '') AS foreign_table, COALESCE(f.column_name, '') AS foreign_name FROM information_schema.key_column_usage c LEFT JOIN information_schema.referential_constraints r ON c.constraint_catalog = r.constraint_catalog AND c.constraint_schema = r.constraint_schema AND c.constraint_name = r.constraint_name LEFT JOIN information_schema.key_column_usage f ON r.unique_constraint_catalog = f.constraint_catalog AND r.unique_constraint_schema = f.constraint_schema AND r.unique_constraint_name = f.constraint_name ` + s.clauses[ConstraintJoinCond] + ` AND c.position_in_unique_constraint = f.ordinal_position ` conds, keyVals := s.conditions(len(vals)+1, f, formats{ catalog: "c.constraint_catalog LIKE %s", schema: "c.table_schema LIKE %s", notSchemas: "c.table_schema NOT IN (%s)", parent: "c.table_name LIKE %s", reference: "f.table_name LIKE %s", name: "c.constraint_name LIKE %s", }) if len(conds) != 0 { qstr += " WHERE " + strings.Join(conds, " AND ") vals = append(vals, keyVals...) } rows, closeRows, err := s.query(qstr, []string{}, "constraint_catalog, table_schema, table_name, constraint_name, ordinal_position, column_name", vals...) if err != nil { if err == sql.ErrNoRows { return metadata.NewConstraintColumnSet([]metadata.ConstraintColumn{}), nil } return nil, err } defer closeRows() results := []metadata.ConstraintColumn{} i := 1 for rows.Next() { rec := metadata.ConstraintColumn{OrdinalPosition: i} i++ err = rows.Scan( &rec.Catalog, &rec.Schema, &rec.Table, &rec.Constraint, &rec.Name, &rec.OrdinalPosition, &rec.ForeignCatalog, &rec.ForeignSchema, &rec.ForeignTable, &rec.ForeignName, ) if err != nil { return nil, err } results = append(results, rec) } if rows.Err() != nil { return nil, rows.Err() } return metadata.NewConstraintColumnSet(results), nil } // Sequences from selected catalog (or all, if empty), matching schemas and names func (s InformationSchema) Sequences(f metadata.Filter) (*metadata.SequenceSet, error) { if !s.hasSequences { return nil, text.ErrNotSupported } columns := []string{ "sequence_catalog", "sequence_schema", "sequence_name", "data_type", "start_value", "minimum_value", "maximum_value", s.clauses[SequenceColumnsIncrement], "cycle_option", } qstr := "SELECT\n " + strings.Join(columns, ",\n ") + " FROM information_schema.sequences\n" conds, vals := s.conditions(1, f, formats{ catalog: "sequence_catalog LIKE %s", schema: "sequence_schema LIKE %s", notSchemas: "sequence_schema NOT IN (%s)", name: "sequence_name LIKE %s", }) rows, closeRows, err := s.query(qstr, conds, "sequence_catalog, sequence_schema, sequence_name", vals...) if err != nil { if err == sql.ErrNoRows { return metadata.NewSequenceSet([]metadata.Sequence{}), nil } return nil, err } defer closeRows() results := []metadata.Sequence{} for rows.Next() { rec := metadata.Sequence{} err = rows.Scan(&rec.Catalog, &rec.Schema, &rec.Name, &rec.DataType, &rec.Start, &rec.Min, &rec.Max, &rec.Increment, &rec.Cycles) if err != nil { return nil, err } results = append(results, rec) } if rows.Err() != nil { return nil, rows.Err() } return metadata.NewSequenceSet(results), nil } // PrivilegeSummaries of privileges on tables, views and sequences from selected catalog (or all, if empty), matching schemas and names func (s InformationSchema) PrivilegeSummaries(f metadata.Filter) (*metadata.PrivilegeSummarySet, error) { if !s.hasTablePrivileges && !s.hasColumnPrivileges && !s.hasUsagePrivileges { return nil, text.ErrNotSupported } qstrs := []string{} conds, vals := s.conditions(1, f, formats{ catalog: "object_catalog LIKE %s", schema: "object_schema LIKE %s", notSchemas: "object_schema NOT IN (%s)", name: "object_name LIKE %s", types: "object_type IN (%s)", }) if s.hasTablePrivileges { columns := []string{ "t.table_catalog AS object_catalog", "t.table_schema AS object_schema", "t.table_name AS object_name", "t.table_type AS object_type", "'' AS column_name", "COALESCE(grantee, '') AS grantee", "COALESCE(" + s.clauses[PrivilegesGrantor] + ", '') AS grantor", "COALESCE(privilege_type, '') AS privilege_type", "CASE WHEN is_grantable='YES' THEN 1 ELSE 0 END AS is_grantable", } // `tables` is on the left side of the join to also list tables that have no privileges set. qstr := "SELECT\n" + " " + strings.Join(columns, ", ") + "\n" + "FROM information_schema.tables t\n" + "LEFT JOIN information_schema.table_privileges tp\n" + " ON t.table_catalog = tp.table_catalog AND t.table_schema = tp.table_schema AND t.table_name = tp.table_name" qstrs = append(qstrs, qstr) } if s.hasColumnPrivileges { columns := []string{ "t.table_catalog AS object_catalog", "t.table_schema AS object_schema", "t.table_name AS object_name", "t.table_type AS object_type", "column_name", "grantee", s.clauses[PrivilegesGrantor] + " AS grantor", "privilege_type", "CASE WHEN is_grantable='YES' THEN 1 ELSE 0 END AS is_grantable", } qstr := "SELECT\n" + " " + strings.Join(columns, ", ") + "\n" + "FROM information_schema.column_privileges cp\n" + "LEFT JOIN information_schema.tables t\n" + " ON t.table_catalog = cp.table_catalog AND t.table_schema = cp.table_schema AND t.table_name = cp.table_name" qstrs = append(qstrs, qstr) } if s.hasUsagePrivileges { columns := []string{ "object_catalog", "object_schema", "object_name", "object_type", "'' AS column_name", "grantee", s.clauses[PrivilegesGrantor] + " AS grantor", "privilege_type", "CASE WHEN is_grantable='YES' THEN 1 ELSE 0 END AS is_grantable", } qstr := "SELECT\n" + " " + strings.Join(columns, ", ") + "\n" + "FROM information_schema.usage_privileges" qstrs = append(qstrs, qstr) } // In the query result, table and column level privileges will be on separate rows. // Each table or column can have multple privileges (i.e rows). // For table level privileges the `column_name` column is empty. qstr := "SELECT * FROM (\n" + strings.Join(qstrs, "\nUNION ALL\n") + "\n) AS subquery" rows, closeRows, err := s.query( qstr, conds, "object_catalog, object_schema, object_type, object_name, column_name, grantee, grantor, privilege_type", vals...) if err != nil { if err == sql.ErrNoRows { return metadata.NewPrivilegeSummarySet([]metadata.PrivilegeSummary{}), nil } return nil, err } defer closeRows() type row struct { Catalog string Schema string Name string ObjectType string Column string Grantee string Grantor string PrivilegeType string IsGrantable bool } // The rows need to be aggregated into one `metadata.PrivilegeSummary` object per table. The rows are ordered by table such that we can append // to the current `metadata.PrivilegeSummary` as long as we are processing the same table. results := []metadata.PrivilegeSummary{} curSummary := &metadata.PrivilegeSummary{} for rows.Next() { r := row{} err = rows.Scan(&r.Catalog, &r.Schema, &r.Name, &r.ObjectType, &r.Column, &r.Grantee, &r.Grantor, &r.PrivilegeType, &r.IsGrantable) if err != nil { return nil, err } if curSummary.Catalog != r.Catalog || curSummary.Schema != r.Schema || curSummary.Name != r.Name { summary := metadata.PrivilegeSummary{ Catalog: r.Catalog, Schema: r.Schema, Name: r.Name, ObjectType: r.ObjectType, ObjectPrivileges: metadata.ObjectPrivileges{}, ColumnPrivileges: metadata.ColumnPrivileges{}, } results = append(results, summary) curSummary = &results[len(results)-1] } switch { // If the row specifies neither column nor table level privileges case r.PrivilegeType == "": // If row specifies table level privilege case r.Column == "": objPrivilege := metadata.ObjectPrivilege{Grantee: r.Grantee, Grantor: r.Grantor, PrivilegeType: r.PrivilegeType, IsGrantable: r.IsGrantable} curSummary.ObjectPrivileges = append(curSummary.ObjectPrivileges, objPrivilege) // If row specifies column level privilege default: colPrivilege := metadata.ColumnPrivilege{Column: r.Column, Grantee: r.Grantee, Grantor: r.Grantor, PrivilegeType: r.PrivilegeType, IsGrantable: r.IsGrantable} curSummary.ColumnPrivileges = append(curSummary.ColumnPrivileges, colPrivilege) } } if rows.Err() != nil { return nil, rows.Err() } return metadata.NewPrivilegeSummarySet(results), nil } func (s InformationSchema) conditions(baseParam int, filter metadata.Filter, formats formats) ([]string, []interface{}) { conds := []string{} vals := []interface{}{} if filter.Catalog != "" && formats.catalog != "" { vals = append(vals, filter.Catalog) conds = append(conds, fmt.Sprintf(formats.catalog, s.pf(baseParam))) baseParam++ } if filter.Schema != "" && formats.schema != "" { vals = append(vals, filter.Schema) conds = append(conds, fmt.Sprintf(formats.schema, s.pf(baseParam))) baseParam++ } if !filter.WithSystem && formats.notSchemas != "" && len(s.systemSchemas) != 0 { pholders := []string{} for _, v := range s.systemSchemas { if v == filter.Schema { continue } vals = append(vals, v) pholders = append(pholders, s.pf(baseParam)) baseParam++ } if len(pholders) != 0 { conds = append(conds, fmt.Sprintf(formats.notSchemas, strings.Join(pholders, ", "))) } } if filter.OnlyVisible && formats.schema != "" && s.currentSchema != "" { conds = append(conds, fmt.Sprintf(formats.schema, s.currentSchema)) } if filter.Parent != "" && formats.parent != "" { vals = append(vals, filter.Parent) conds = append(conds, fmt.Sprintf(formats.parent, s.pf(baseParam))) baseParam++ } if filter.Reference != "" && formats.reference != "" { vals = append(vals, filter.Reference) conds = append(conds, fmt.Sprintf(formats.reference, s.pf(baseParam))) baseParam++ } if filter.Name != "" && formats.name != "" { vals = append(vals, filter.Name) conds = append(conds, fmt.Sprintf(formats.name, s.pf(baseParam))) baseParam++ } if len(filter.Types) != 0 && formats.types != "" { pholders := []string{} for _, t := range filter.Types { vals = append(vals, t) pholders = append(pholders, s.pf(baseParam)) baseParam++ } if len(pholders) != 0 { conds = append(conds, fmt.Sprintf(formats.types, strings.Join(pholders, ", "))) } } return conds, vals } type formats struct { catalog string schema string notSchemas string parent string reference string name string types string } func (s InformationSchema) query(qstr string, conds []string, order string, vals ...interface{}) (*sql.Rows, func(), error) { if len(conds) != 0 { qstr += "\nWHERE " + strings.Join(conds, " AND ") } if order != "" { qstr += "\nORDER BY " + order } if s.limit != 0 { qstr += fmt.Sprintf("\nLIMIT %d", s.limit) } return s.Query(qstr, vals...) } usql-0.19.19/drivers/metadata/informationschema/metadata_test.go000066400000000000000000001225231476173253300250110ustar00rootroot00000000000000// Package informationschema_test runs integration tests for informationschema package // on real databases running in containers. During development, to avoid rebuilding // containers every run, add the `-cleanup=false` flags when calling `go test`. package informationschema_test import ( "database/sql" "flag" "fmt" "log" "net/url" "os" "sort" "strings" "testing" _ "github.com/go-sql-driver/mysql" "github.com/google/go-cmp/cmp" _ "github.com/microsoft/go-mssqldb" // DRIVER: sqlserver dt "github.com/ory/dockertest/v3" dc "github.com/ory/dockertest/v3/docker" _ "github.com/trinodb/trino-go-client/trino" "github.com/xo/usql/drivers/metadata" infos "github.com/xo/usql/drivers/metadata/informationschema" _ "github.com/xo/usql/drivers/postgres" ) type Database struct { BuildArgs []dc.BuildArg RunOptions *dt.RunOptions Exec []string Driver string URL string ReadinessURL string DockerPort string Resource *dt.Resource DB *sql.DB Opts []metadata.ReaderOption Reader metadata.BasicReader } const ( pw = "yourStrong123_Password" ) var ( dbs = map[string]*Database{ "pgsql": { BuildArgs: []dc.BuildArg{ {Name: "BASE_IMAGE", Value: "postgres:13"}, {Name: "SCHEMA_URL", Value: "https://raw.githubusercontent.com/jOOQ/sakila/main/postgres-sakila-db/postgres-sakila-schema.sql"}, {Name: "TARGET", Value: "/docker-entrypoint-initdb.d"}, {Name: "USER", Value: "root"}, }, RunOptions: &dt.RunOptions{ Name: "usql-pgsql", Cmd: []string{"-c", "log_statement=all", "-c", "log_min_duration_statement=0"}, Env: []string{"POSTGRES_PASSWORD=pw"}, }, Driver: "postgres", URL: "postgres://postgres:pw@localhost:%s/postgres?sslmode=disable", DockerPort: "5432/tcp", Opts: []metadata.ReaderOption{ infos.WithIndexes(false), infos.WithCustomClauses(map[infos.ClauseName]string{ infos.ColumnsColumnSize: "COALESCE(character_maximum_length, numeric_precision, datetime_precision, interval_precision, 0)", infos.FunctionColumnsColumnSize: "COALESCE(character_maximum_length, numeric_precision, datetime_precision, interval_precision, 0)", }), infos.WithSystemSchemas([]string{"pg_catalog", "pg_toast", "information_schema"}), }, }, "mysql": { BuildArgs: []dc.BuildArg{ {Name: "BASE_IMAGE", Value: "mysql:8"}, {Name: "SCHEMA_URL", Value: "https://raw.githubusercontent.com/jOOQ/sakila/main/mysql-sakila-db/mysql-sakila-schema.sql"}, {Name: "TARGET", Value: "/docker-entrypoint-initdb.d"}, {Name: "USER", Value: "root"}, }, RunOptions: &dt.RunOptions{ Name: "usql-mysql", Cmd: []string{"--general-log=1", "--general-log-file=/var/lib/mysql/mysql.log"}, Env: []string{"MYSQL_ROOT_PASSWORD=pw"}, }, Driver: "mysql", URL: "root:pw@(localhost:%s)/mysql?parseTime=true", DockerPort: "3306/tcp", Opts: []metadata.ReaderOption{ infos.WithPlaceholder(func(int) string { return "?" }), infos.WithCheckConstraints(false), infos.WithCustomClauses(map[infos.ClauseName]string{ infos.ColumnsDataType: "column_type", infos.ColumnsNumericPrecRadix: "10", infos.FunctionColumnsNumericPrecRadix: "10", infos.ConstraintIsDeferrable: "''", infos.ConstraintInitiallyDeferred: "''", infos.PrivilegesGrantor: "''", infos.ConstraintJoinCond: "AND r.referenced_table_name = f.table_name", }), infos.WithSystemSchemas([]string{"mysql", "information_schema", "performance_schema", "sys"}), infos.WithCurrentSchema("COALESCE(DATABASE(), '%')"), infos.WithUsagePrivileges(false), infos.WithSequences(false), }, }, "sqlserver": { BuildArgs: []dc.BuildArg{ {Name: "BASE_IMAGE", Value: "mcr.microsoft.com/mssql/server:2019-latest"}, {Name: "SCHEMA_URL", Value: "https://raw.githubusercontent.com/jOOQ/sakila/main/sql-server-sakila-db/sql-server-sakila-schema.sql"}, {Name: "TARGET", Value: "/schema"}, {Name: "USER", Value: "mssql:0"}, }, RunOptions: &dt.RunOptions{ Name: "usql-sqlserver", Env: []string{"ACCEPT_EULA=Y", "SA_PASSWORD=" + pw}, }, Exec: []string{"/opt/mssql-tools/bin/sqlcmd", "-S", "localhost", "-U", "sa", "-P", pw, "-d", "master", "-i", "/schema/sql-server-sakila-schema.sql"}, Driver: "sqlserver", URL: "sqlserver://sa:" + url.QueryEscape(pw) + "@127.0.0.1:%s?database=sakila", ReadinessURL: "sqlserver://sa:" + url.QueryEscape(pw) + "@127.0.0.1:%s", DockerPort: "1433/tcp", Opts: []metadata.ReaderOption{ infos.WithPlaceholder(func(n int) string { return fmt.Sprintf("@p%d", n) }), infos.WithIndexes(false), infos.WithConstraints(false), infos.WithCustomClauses(map[infos.ClauseName]string{ infos.FunctionsSecurityType: "''", }), infos.WithSystemSchemas([]string{ "db_accessadmin", "db_backupoperator", "db_datareader", "db_datawriter", "db_ddladmin", "db_denydatareader", "db_denydatawriter", "db_owner", "db_securityadmin", "INFORMATION_SCHEMA", "sys", }), infos.WithUsagePrivileges(false), infos.WithSequences(false), }, }, "trino": { BuildArgs: []dc.BuildArg{ {Name: "BASE_IMAGE", Value: "trinodb/trino:351"}, }, RunOptions: &dt.RunOptions{ Name: "usql-trino", }, Driver: "trino", URL: "http://test@localhost:%s?catalog=tpch&schema=sf1", DockerPort: "8080/tcp", Opts: []metadata.ReaderOption{ infos.WithPlaceholder(func(int) string { return "?" }), infos.WithIndexes(false), infos.WithConstraints(false), infos.WithCustomClauses(map[infos.ClauseName]string{ infos.ColumnsColumnSize: "0", infos.ColumnsNumericScale: "0", infos.ColumnsNumericPrecRadix: "0", infos.ColumnsCharOctetLength: "0", infos.FunctionColumnsColumnSize: "0", infos.FunctionColumnsNumericScale: "0", infos.FunctionColumnsNumericPrecRadix: "0", infos.FunctionColumnsCharOctetLength: "0", }), }, }, } cleanup bool ) func TestMain(m *testing.M) { var only string flag.BoolVar(&cleanup, "cleanup", true, "delete containers when finished") flag.StringVar(&only, "dbs", "", "comma separated list of dbs to test: pgsql, mysql, sqlserver, trino") flag.Parse() if only != "" { runOnly := map[string]struct{}{} for _, dbName := range strings.Split(only, ",") { dbName = strings.TrimSpace(dbName) runOnly[dbName] = struct{}{} } for dbName := range dbs { if _, ok := runOnly[dbName]; !ok { delete(dbs, dbName) } } } pool, err := dt.NewPool("") if err != nil { log.Fatalf("Could not connect to docker: %s", err) } for dbName, db := range dbs { var ok bool db.Resource, ok = pool.ContainerByName(db.RunOptions.Name) if !ok { buildOpts := &dt.BuildOptions{ ContextDir: "../../testdata/docker", BuildArgs: db.BuildArgs, } db.Resource, err = pool.BuildAndRunWithBuildOptions(buildOpts, db.RunOptions) if err != nil { log.Fatal("Could not start resource: ", err) } } state := db.Resource.Container.State.Status if state != "created" && state != "running" { log.Fatalf("Unexpected container state for %s: %s", dbName, state) } url := db.URL if db.ReadinessURL != "" { url = db.ReadinessURL } port := db.Resource.GetPort(db.DockerPort) if db.DB, err = waitForDbConnection(db.Driver, pool, url, port); err != nil { log.Fatalf("Timed out waiting for %s: %s", dbName, err) } if len(db.Exec) != 0 { exitCode, err := db.Resource.Exec(db.Exec, dt.ExecOptions{ StdIn: os.Stdin, StdOut: os.Stdout, StdErr: os.Stderr, TTY: true, }) if err != nil || exitCode != 0 { log.Fatal("Could not load schema: ", err) } } // Reconnect with actual URL if a separate URL for readiness checking was used if db.ReadinessURL != "" { if db.DB, err = waitForDbConnection(db.Driver, pool, db.URL, port); err != nil { log.Fatalf("Timed out waiting for %s: %s", dbName, err) } } db.Reader = infos.New(db.Opts...)(db.DB).(metadata.BasicReader) } code := m.Run() // You can't defer this because os.Exit doesn't care for defer if cleanup { for _, db := range dbs { if err := pool.Purge(db.Resource); err != nil { log.Fatal("Could not purge resource: ", err) } } } os.Exit(code) } func waitForDbConnection(driver string, pool *dt.Pool, url string, port string) (*sql.DB, error) { // exponential backoff-retry, because the application in the container might not be ready to accept connections yet var db *sql.DB if err := pool.Retry(func() error { var err error db, err = sql.Open(driver, fmt.Sprintf(url, port)) if err != nil { return err } return db.Ping() }); err != nil { return nil, err } return db, nil } func TestSchemas(t *testing.T) { expected := map[string]string{ "pgsql": "information_schema, pg_catalog, pg_toast, public", "mysql": "information_schema, mysql, performance_schema, sakila, sys", "sqlserver": "db_accessadmin, db_backupoperator, db_datareader, db_datawriter, db_ddladmin, db_denydatareader, db_denydatawriter, db_owner, db_securityadmin, dbo, guest, INFORMATION_SCHEMA, sys", "trino": "information_schema, sf1, sf100, sf1000, sf10000, sf100000, sf300, sf3000, sf30000, tiny", } for dbName, db := range dbs { r := db.Reader result, err := r.Schemas(metadata.Filter{WithSystem: true}) if err != nil { log.Fatalf("Could not read %s schemas: %v", dbName, err) } names := []string{} for result.Next() { names = append(names, result.Get().Schema) } actual := strings.Join(names, ", ") if actual != expected[dbName] { t.Errorf("Wrong %s schema names, expected:\n %v\ngot:\n %v", dbName, expected[dbName], names) } } } func TestTables(t *testing.T) { schemas := map[string]string{ "pgsql": "public", "mysql": "sakila", "sqlserver": "dbo", "trino": "sf1", } expected := map[string]string{ "pgsql": "actor, address, category, city, country, customer, film, film_actor, film_category, inventory, language, payment, payment_p2007_01, payment_p2007_02, payment_p2007_03, payment_p2007_04, payment_p2007_05, payment_p2007_06, rental, staff, store, actor_info, customer_list, film_list, nicer_but_slower_film_list, sales_by_film_category, sales_by_store, staff_list", "mysql": "actor, address, category, city, country, customer, film, film_actor, film_category, film_text, inventory, language, payment, rental, staff, store, actor_info, customer_list, film_list, nicer_but_slower_film_list, sales_by_film_category, sales_by_store, staff_list", "sqlserver": "actor, address, category, city, country, customer, film, film_actor, film_category, film_text, inventory, language, payment, rental, staff, store, customer_list, film_list, sales_by_film_category, sales_by_store, staff_list", "trino": "customer, lineitem, nation, orders, part, partsupp, region, supplier", } for dbName, db := range dbs { r := db.Reader result, err := r.Tables(metadata.Filter{Schema: schemas[dbName], Types: []string{"BASE TABLE", "TABLE", "VIEW"}}) if err != nil { log.Fatalf("Could not read %s tables: %v", dbName, err) } names := []string{} for result.Next() { names = append(names, result.Get().Name) } actual := strings.Join(names, ", ") if actual != expected[dbName] { t.Errorf("Wrong %s table names, expected:\n %v\ngot:\n %v", dbName, expected[dbName], names) } } } func TestColumns(t *testing.T) { schemas := map[string]string{ "pgsql": "public", "mysql": "sakila", "sqlserver": "dbo", "trino": "sf1", } tables := map[string]string{ "pgsql": "film%", "mysql": "film%", "sqlserver": "film%", "trino": "orders", } expectedColumns := map[string]string{ "pgsql": "film_id, title, description, release_year, language_id, original_language_id, rental_duration, rental_rate, length, replacement_cost, rating, last_update, special_features, fulltext, actor_id, film_id, last_update, film_id, category_id, last_update, fid, title, description, category, price, length, rating, actors", "mysql": "film_id, title, description, release_year, language_id, original_language_id, rental_duration, rental_rate, length, replacement_cost, rating, special_features, last_update, actor_id, film_id, last_update, film_id, category_id, last_update, FID, title, description, category, price, length, rating, actors, film_id, title, description", "sqlserver": "film_id, title, description, release_year, language_id, original_language_id, rental_duration, rental_rate, length, replacement_cost, rating, special_features, last_update, actor_id, film_id, last_update, film_id, category_id, last_update, FID, title, description, category, price, length, rating, actors, film_id, title, description", "trino": "orderkey, custkey, orderstatus, totalprice, orderdate, orderpriority, clerk, shippriority, comment", } expectedTypes := map[string]string{ "mysql": "int unsigned, varchar(255), text, year, int unsigned, int unsigned, tinyint unsigned, decimal(4,2), smallint unsigned, decimal(5,2), enum('G','PG','PG-13','R','NC-17'), set('Trailers','Commentaries','Deleted Scenes','Behind the Scenes'), timestamp, int unsigned, int unsigned, timestamp, int unsigned, int unsigned, timestamp, int unsigned, varchar(255), text, varchar(25), decimal(4,2), smallint unsigned, enum('G','PG','PG-13','R','NC-17'), text, int, varchar(255), text", } for dbName, db := range dbs { r := db.Reader result, err := r.Columns(metadata.Filter{Schema: schemas[dbName], Parent: tables[dbName]}) if err != nil { log.Fatalf("Could not read %s columns: %v", dbName, err) } names := []string{} types := []string{} for result.Next() { names = append(names, result.Get().Name) types = append(types, result.Get().DataType) } actualColumns := strings.Join(names, ", ") actualTypes := strings.Join(types, ", ") if expected, ok := expectedColumns[dbName]; ok && actualColumns != expected { t.Errorf("Wrong %s column names, expected:\n %v, got:\n %v", dbName, expected, names) } if expected, ok := expectedTypes[dbName]; ok && actualTypes != expected { t.Errorf("Wrong %s column types, expected:\n %v, got:\n %v", dbName, expected, types) } } } func TestFunctions(t *testing.T) { schemas := map[string]string{ "pgsql": "public", "mysql": "sakila", } expected := map[string]string{ "pgsql": "_group_concat, film_in_stock, film_not_in_stock, get_customer_balance, group_concat, inventory_held_by_customer, inventory_in_stock, last_day, last_updated, rewards_report", "mysql": "film_in_stock, film_not_in_stock, get_customer_balance, inventory_held_by_customer, inventory_in_stock, rewards_report", } for dbName, db := range dbs { if schemas[dbName] == "" { continue } r := infos.New(db.Opts...)(db.DB).(metadata.FunctionReader) result, err := r.Functions(metadata.Filter{Schema: schemas[dbName]}) if err != nil { log.Fatalf("Could not read %s functions: %v", dbName, err) } names := []string{} for result.Next() { names = append(names, result.Get().Name) } actual := strings.Join(names, ", ") if actual != expected[dbName] { t.Errorf("Wrong %s function names, expected:\n %v\ngot:\n %v", dbName, expected[dbName], names) } } } func TestFunctionColumns(t *testing.T) { schemas := map[string]string{ "pgsql": "public", "mysql": "sakila", } tables := map[string]string{ "pgsql": "film%", "mysql": "film%", } expected := map[string]string{ "pgsql": "p_film_id, p_store_id, p_film_count, p_film_id, p_store_id, p_film_count", "mysql": "p_film_id, p_store_id, p_film_count, p_film_id, p_store_id, p_film_count", } for dbName, db := range dbs { if schemas[dbName] == "" { continue } r := infos.New(db.Opts...)(db.DB).(metadata.FunctionColumnReader) result, err := r.FunctionColumns(metadata.Filter{Schema: schemas[dbName], Parent: tables[dbName]}) if err != nil { log.Fatalf("Could not read %s function columns: %v", dbName, err) } names := []string{} for result.Next() { names = append(names, result.Get().Name) } actual := strings.Join(names, ", ") if actual != expected[dbName] { t.Errorf("Wrong %s function column names, expected:\n %v, got:\n %v", dbName, expected[dbName], names) } } } func TestIndexes(t *testing.T) { schemas := map[string]string{ "mysql": "sakila", } expected := map[string]string{ "mysql": "actor.idx_actor_last_name, actor.PRIMARY, address.idx_fk_city_id, address.PRIMARY, category.PRIMARY, city.idx_fk_country_id, city.PRIMARY, country.PRIMARY, customer.idx_fk_address_id, customer.idx_fk_store_id, customer.idx_last_name, customer.PRIMARY, film.idx_fk_language_id, film.idx_fk_original_language_id, film.idx_title, film.PRIMARY, film_actor.idx_fk_film_id, film_actor.PRIMARY, film_category.fk_film_category_category, film_category.PRIMARY, film_text.idx_title_description, film_text.PRIMARY, inventory.idx_fk_film_id, inventory.idx_store_id_film_id, inventory.PRIMARY, language.PRIMARY, payment.fk_payment_rental, payment.idx_fk_customer_id, payment.idx_fk_staff_id, payment.PRIMARY, rental.idx_fk_customer_id, rental.idx_fk_inventory_id, rental.idx_fk_staff_id, rental.PRIMARY, rental.rental_date, staff.idx_fk_address_id, staff.idx_fk_store_id, staff.PRIMARY, store.idx_fk_address_id, store.idx_unique_manager, store.PRIMARY", } for dbName, db := range dbs { if schemas[dbName] == "" { continue } r := infos.New(db.Opts...)(db.DB).(metadata.IndexReader) result, err := r.Indexes(metadata.Filter{Schema: schemas[dbName]}) if err != nil { log.Fatalf("Could not read %s indexes: %v", dbName, err) } names := []string{} for result.Next() { names = append(names, result.Get().Table+"."+result.Get().Name) } actual := strings.Join(names, ", ") if actual != expected[dbName] { t.Errorf("Wrong %s index names, expected:\n %v\ngot:\n %v", dbName, expected[dbName], names) } } } func TestIndexColumns(t *testing.T) { schemas := map[string]string{ "mysql": "sakila", } tables := map[string]string{ "mysql": "idx%", } expected := map[string]string{ "mysql": "last_name, city_id, country_id, address_id, store_id, last_name, language_id, original_language_id, title, film_id, title, description, film_id, store_id, film_id, customer_id, staff_id, customer_id, inventory_id, staff_id, address_id, store_id, address_id, manager_staff_id", } for dbName, db := range dbs { if schemas[dbName] == "" { continue } r := infos.New(db.Opts...)(db.DB).(metadata.IndexColumnReader) result, err := r.IndexColumns(metadata.Filter{Schema: schemas[dbName], Name: tables[dbName]}) if err != nil { log.Fatalf("Could not read %s index columns: %v", dbName, err) } names := []string{} for result.Next() { names = append(names, result.Get().Name) } actual := strings.Join(names, ", ") if actual != expected[dbName] { t.Errorf("Wrong %s index column names, expected:\n %v, got:\n %v", dbName, expected[dbName], names) } } } func TestConstraints(t *testing.T) { schemas := map[string]string{ "pgsql": "public", "mysql": "sakila", } constraints := map[string]string{ "pgsql": "film%", "mysql": "film%", } expected := map[string]string{ "pgsql": "film.2200_16417_10_not_null, film.2200_16417_12_not_null, film.2200_16417_14_not_null, film.2200_16417_1_not_null, film.2200_16417_2_not_null, film.2200_16417_5_not_null, film.2200_16417_7_not_null, film.2200_16417_8_not_null, film.film_language_id_fkey, film.film_original_language_id_fkey, film.film_pkey, film_actor.2200_16429_1_not_null, film_actor.2200_16429_2_not_null, film_actor.2200_16429_3_not_null, film_actor.film_actor_actor_id_fkey, film_actor.film_actor_film_id_fkey, film_actor.film_actor_pkey, film_category.2200_16433_1_not_null, film_category.2200_16433_2_not_null, film_category.2200_16433_3_not_null, film_category.film_category_category_id_fkey, film_category.film_category_film_id_fkey, film_category.film_category_pkey", "mysql": "film.fk_film_language, film.fk_film_language_original, film.PRIMARY, film_actor.fk_film_actor_actor, film_actor.fk_film_actor_film, film_actor.PRIMARY, film_category.fk_film_category_category, film_category.fk_film_category_film, film_category.PRIMARY, film_text.PRIMARY", } for dbName, db := range dbs { if schemas[dbName] == "" { continue } r := infos.New(db.Opts...)(db.DB).(metadata.ConstraintReader) result, err := r.Constraints(metadata.Filter{Schema: schemas[dbName], Parent: constraints[dbName]}) if err != nil { log.Fatalf("Could not read %s constraints: %v", dbName, err) } names := []string{} for result.Next() { names = append(names, result.Get().Table+"."+result.Get().Name) } actual := strings.Join(names, ", ") if actual != expected[dbName] { t.Errorf("Wrong %s constraint names, expected:\n %v\ngot:\n %v", dbName, expected[dbName], names) } } } func TestConstraintColumns(t *testing.T) { schemas := map[string]string{ "pgsql": "public", "mysql": "sakila", } constraints := map[string]string{ "pgsql": "film%", "mysql": "film%", } expected := map[string]string{ "pgsql": "actor_id, category_id, film_id, film_id, language_id, original_language_id, film_id, film_id, actor_id, film_id, actor_id, actor_id, film_id, film_id, category_id, film_id, category_id, film_id, film_id, category_id, language_id, language_id", "mysql": "", } for dbName, db := range dbs { if schemas[dbName] == "" { continue } r := infos.New(db.Opts...)(db.DB).(metadata.ConstraintColumnReader) result, err := r.ConstraintColumns(metadata.Filter{Schema: schemas[dbName], Name: constraints[dbName]}) if err != nil { log.Fatalf("Could not read %s constraint columns: %v", dbName, err) } names := []string{} for result.Next() { names = append(names, result.Get().Name) } actual := strings.Join(names, ", ") if actual != expected[dbName] { t.Errorf("Wrong %s constraint column names, expected:\n %v, got:\n %v", dbName, expected[dbName], names) } } } func TestReverseConstraints(t *testing.T) { schemas := map[string]string{ "pgsql": "public", "mysql": "sakila", } constraints := map[string]string{ "pgsql": "film%", "mysql": "film%", } expected := map[string]string{ "pgsql": "film_actor.film_actor_film_id_fkey, film_category.film_category_film_id_fkey, inventory.inventory_film_id_fkey", "mysql": "film_actor.fk_film_actor_film, film_category.fk_film_category_film, inventory.fk_inventory_film", } for dbName, db := range dbs { if schemas[dbName] == "" { continue } r := infos.New(db.Opts...)(db.DB).(metadata.ConstraintReader) result, err := r.Constraints(metadata.Filter{Schema: schemas[dbName], Reference: constraints[dbName]}) if err != nil { log.Fatalf("Could not read %s constraints: %v", dbName, err) } names := []string{} for result.Next() { names = append(names, result.Get().Table+"."+result.Get().Name) } actual := strings.Join(names, ", ") if actual != expected[dbName] { t.Errorf("Wrong %s reverse constraint names, expected:\n %v\ngot:\n %v", dbName, expected[dbName], names) } } } func TestSequences(t *testing.T) { schemas := map[string]string{ "pgsql": "public", } expected := map[string]string{ "pgsql": "actor_actor_id_seq, address_address_id_seq, category_category_id_seq, city_city_id_seq, country_country_id_seq, customer_customer_id_seq, film_film_id_seq, inventory_inventory_id_seq, language_language_id_seq, payment_payment_id_seq, rental_rental_id_seq, staff_staff_id_seq, store_store_id_seq", } for dbName, db := range dbs { if schemas[dbName] == "" { continue } r := infos.New(db.Opts...)(db.DB).(metadata.SequenceReader) result, err := r.Sequences(metadata.Filter{Schema: schemas[dbName]}) if err != nil { log.Fatalf("Could not read %s sequences: %v", dbName, err) } names := []string{} for result.Next() { names = append(names, result.Get().Name) } actual := strings.Join(names, ", ") if actual != expected[dbName] { t.Errorf("Wrong %s sequence names, expected:\n %v\ngot:\n %v", dbName, expected[dbName], names) } } } func TestPrivilegeSummaries_NonExistent(t *testing.T) { type test struct { Name string Db *Database Schema string } tests := map[string]test{ "pgsql": {Db: dbs["pgsql"], Schema: "public"}, "mysql": {Db: dbs["mysql"], Schema: "sakila"}, "sqlserver": {Db: dbs["sqlserver"], Schema: "dbo"}, } for testName, test := range tests { if test.Db != nil { t.Run(testName, func(t *testing.T) { table := "privtest_table" // Read privileges r := infos.New(test.Db.Opts...)(test.Db.DB).(metadata.PrivilegeSummaryReader) result, err := r.PrivilegeSummaries(metadata.Filter{Schema: test.Schema, Name: table}) if err != nil { t.Fatalf("Could not read privileges: %v", err) } // Check result if result.Len() != 0 { t.Errorf("Wrong result count, expected:\n %d, got:\n %d", 0, result.Len()) } }) } } } func TestPrivilegeSummaries(t *testing.T) { type test struct { Db *Database Schema string User string Create string CreateUserStmt string DropUserStmt string Grants []string WantTable metadata.ObjectPrivileges WantColumn metadata.ColumnPrivileges } setDefaults := func(t test) test { if t.User == "" { t.User = "privtest_user" } if t.Create == "" { t.Create = "TABLE" } if t.CreateUserStmt == "" { t.CreateUserStmt = "CREATE USER %s" } if t.DropUserStmt == "" { t.DropUserStmt = "DROP USER %s" } if t.Grants == nil { t.Grants = []string{} } if t.WantTable == nil { t.WantTable = metadata.ObjectPrivileges{} } if t.WantColumn == nil { t.WantColumn = metadata.ColumnPrivileges{} } return t } postgresDefaultTable := func() metadata.ObjectPrivileges { return metadata.ObjectPrivileges{ metadata.ObjectPrivilege{Grantee: "postgres", Grantor: "postgres", IsGrantable: true, PrivilegeType: "INSERT"}, metadata.ObjectPrivilege{Grantee: "postgres", Grantor: "postgres", IsGrantable: true, PrivilegeType: "SELECT"}, metadata.ObjectPrivilege{Grantee: "postgres", Grantor: "postgres", IsGrantable: true, PrivilegeType: "UPDATE"}, metadata.ObjectPrivilege{Grantee: "postgres", Grantor: "postgres", IsGrantable: true, PrivilegeType: "DELETE"}, metadata.ObjectPrivilege{Grantee: "postgres", Grantor: "postgres", IsGrantable: true, PrivilegeType: "TRUNCATE"}, metadata.ObjectPrivilege{Grantee: "postgres", Grantor: "postgres", IsGrantable: true, PrivilegeType: "REFERENCES"}, metadata.ObjectPrivilege{Grantee: "postgres", Grantor: "postgres", IsGrantable: true, PrivilegeType: "TRIGGER"}, } } postgresDefaultColumn := func(columns []string) metadata.ColumnPrivileges { p := metadata.ColumnPrivileges{} for _, col := range columns { p = append(p, metadata.ColumnPrivilege{Column: col, Grantee: "postgres", Grantor: "postgres", IsGrantable: true, PrivilegeType: "INSERT"}, metadata.ColumnPrivilege{Column: col, Grantee: "postgres", Grantor: "postgres", IsGrantable: true, PrivilegeType: "SELECT"}, metadata.ColumnPrivilege{Column: col, Grantee: "postgres", Grantor: "postgres", IsGrantable: true, PrivilegeType: "UPDATE"}, metadata.ColumnPrivilege{Column: col, Grantee: "postgres", Grantor: "postgres", IsGrantable: true, PrivilegeType: "REFERENCES"}) } return p } tests := map[string]test{ "pgsql-no-grants": setDefaults(test{ Db: dbs["pgsql"], Schema: "public", Grants: []string{}, WantTable: postgresDefaultTable(), WantColumn: postgresDefaultColumn([]string{"col1", "col2"}), }), "pgsql-sequence": setDefaults(test{ Db: dbs["pgsql"], Schema: "public", Create: "SEQUENCE", Grants: []string{"USAGE"}, WantTable: metadata.ObjectPrivileges{ metadata.ObjectPrivilege{Grantee: "privtest_user", Grantor: "postgres", IsGrantable: false, PrivilegeType: "USAGE"}, metadata.ObjectPrivilege{Grantee: "postgres", Grantor: "postgres", IsGrantable: true, PrivilegeType: "USAGE"}, }, }), "pgsql-view": setDefaults(test{ Db: dbs["pgsql"], Schema: "public", Create: "VIEW", Grants: []string{"SELECT", "INSERT*"}, WantTable: append(postgresDefaultTable(), metadata.ObjectPrivilege{Grantee: "privtest_user", Grantor: "postgres", IsGrantable: false, PrivilegeType: "SELECT"}, metadata.ObjectPrivilege{Grantee: "privtest_user", Grantor: "postgres", IsGrantable: true, PrivilegeType: "INSERT"}, ), WantColumn: append( postgresDefaultColumn([]string{"col1", "col2"}), metadata.ColumnPrivilege{Column: "col1", Grantee: "privtest_user", Grantor: "postgres", IsGrantable: false, PrivilegeType: "SELECT"}, metadata.ColumnPrivilege{Column: "col1", Grantee: "privtest_user", Grantor: "postgres", IsGrantable: true, PrivilegeType: "INSERT"}, metadata.ColumnPrivilege{Column: "col2", Grantee: "privtest_user", Grantor: "postgres", IsGrantable: false, PrivilegeType: "SELECT"}, metadata.ColumnPrivilege{Column: "col2", Grantee: "privtest_user", Grantor: "postgres", IsGrantable: true, PrivilegeType: "INSERT"}, ), }), "pgsql-table": setDefaults(test{ Db: dbs["pgsql"], Schema: "public", Grants: []string{"SELECT", "INSERT*"}, WantTable: append(postgresDefaultTable(), metadata.ObjectPrivilege{Grantee: "privtest_user", Grantor: "postgres", IsGrantable: false, PrivilegeType: "SELECT"}, metadata.ObjectPrivilege{Grantee: "privtest_user", Grantor: "postgres", IsGrantable: true, PrivilegeType: "INSERT"}, ), WantColumn: append( postgresDefaultColumn([]string{"col1", "col2"}), metadata.ColumnPrivilege{Column: "col1", Grantee: "privtest_user", Grantor: "postgres", IsGrantable: false, PrivilegeType: "SELECT"}, metadata.ColumnPrivilege{Column: "col1", Grantee: "privtest_user", Grantor: "postgres", IsGrantable: true, PrivilegeType: "INSERT"}, metadata.ColumnPrivilege{Column: "col2", Grantee: "privtest_user", Grantor: "postgres", IsGrantable: false, PrivilegeType: "SELECT"}, metadata.ColumnPrivilege{Column: "col2", Grantee: "privtest_user", Grantor: "postgres", IsGrantable: true, PrivilegeType: "INSERT"}, ), }), "pgsql-column": setDefaults(test{ Db: dbs["pgsql"], Schema: "public", Grants: []string{"SELECT(col1)", "INSERT(col2)*"}, WantTable: postgresDefaultTable(), WantColumn: append( postgresDefaultColumn([]string{"col1", "col2"}), metadata.ColumnPrivilege{Column: "col1", Grantee: "privtest_user", Grantor: "postgres", IsGrantable: false, PrivilegeType: "SELECT"}, metadata.ColumnPrivilege{Column: "col2", Grantee: "privtest_user", Grantor: "postgres", IsGrantable: true, PrivilegeType: "INSERT"}, ), }), "pgsql-table-column": setDefaults(test{ Db: dbs["pgsql"], Schema: "public", Grants: []string{"SELECT", "INSERT(col1)"}, WantTable: append(postgresDefaultTable(), metadata.ObjectPrivilege{Grantee: "privtest_user", Grantor: "postgres", IsGrantable: false, PrivilegeType: "SELECT"}, ), WantColumn: append( postgresDefaultColumn([]string{"col1", "col2"}), metadata.ColumnPrivilege{Column: "col1", Grantee: "privtest_user", Grantor: "postgres", IsGrantable: false, PrivilegeType: "SELECT"}, metadata.ColumnPrivilege{Column: "col1", Grantee: "privtest_user", Grantor: "postgres", IsGrantable: false, PrivilegeType: "INSERT"}, metadata.ColumnPrivilege{Column: "col2", Grantee: "privtest_user", Grantor: "postgres", IsGrantable: false, PrivilegeType: "SELECT"}, ), }), "mysql-no-grants": setDefaults(test{ Db: dbs["mysql"], Schema: "sakila", User: "'privtest_user'@'%'", Grants: []string{}, WantTable: metadata.ObjectPrivileges{}, }), "mysql-view": setDefaults(test{ Db: dbs["mysql"], Schema: "sakila", Create: "VIEW", User: "'privtest_user'@'%'", Grants: []string{"SELECT", "INSERT"}, WantTable: metadata.ObjectPrivileges{ metadata.ObjectPrivilege{Grantee: "'privtest_user'@'%'", Grantor: "", IsGrantable: false, PrivilegeType: "SELECT"}, metadata.ObjectPrivilege{Grantee: "'privtest_user'@'%'", Grantor: "", IsGrantable: false, PrivilegeType: "INSERT"}, }, }), "mysql-view-grantable": setDefaults(test{ Db: dbs["mysql"], Schema: "sakila", Create: "VIEW", User: "'privtest_user'@'%'", Grants: []string{"SELECT*", "INSERT*"}, WantTable: metadata.ObjectPrivileges{ metadata.ObjectPrivilege{Grantee: "'privtest_user'@'%'", Grantor: "", IsGrantable: true, PrivilegeType: "SELECT"}, metadata.ObjectPrivilege{Grantee: "'privtest_user'@'%'", Grantor: "", IsGrantable: true, PrivilegeType: "INSERT"}, }, }), "mysql-table": setDefaults(test{ Db: dbs["mysql"], Schema: "sakila", User: "'privtest_user'@'%'", Grants: []string{"SELECT", "INSERT"}, WantTable: metadata.ObjectPrivileges{ metadata.ObjectPrivilege{Grantee: "'privtest_user'@'%'", Grantor: "", IsGrantable: false, PrivilegeType: "SELECT"}, metadata.ObjectPrivilege{Grantee: "'privtest_user'@'%'", Grantor: "", IsGrantable: false, PrivilegeType: "INSERT"}, }, }), "mysql-table-grantable": setDefaults(test{ Db: dbs["mysql"], Schema: "sakila", User: "'privtest_user'@'%'", Grants: []string{"SELECT*", "INSERT*"}, WantTable: metadata.ObjectPrivileges{ metadata.ObjectPrivilege{Grantee: "'privtest_user'@'%'", Grantor: "", IsGrantable: true, PrivilegeType: "SELECT"}, metadata.ObjectPrivilege{Grantee: "'privtest_user'@'%'", Grantor: "", IsGrantable: true, PrivilegeType: "INSERT"}, }, }), "mysql-column": setDefaults(test{ Db: dbs["mysql"], Schema: "sakila", User: "'privtest_user'@'%'", Grants: []string{"SELECT(col1)", "INSERT(col2)"}, WantColumn: metadata.ColumnPrivileges{ metadata.ColumnPrivilege{Column: "col1", Grantee: "'privtest_user'@'%'", Grantor: "", IsGrantable: false, PrivilegeType: "SELECT"}, metadata.ColumnPrivilege{Column: "col2", Grantee: "'privtest_user'@'%'", Grantor: "", IsGrantable: false, PrivilegeType: "INSERT"}, }, }), "mysql-column-grantable": setDefaults(test{ Db: dbs["mysql"], Schema: "sakila", User: "'privtest_user'@'%'", Grants: []string{"SELECT(col1)*", "INSERT(col2)*"}, WantColumn: metadata.ColumnPrivileges{ metadata.ColumnPrivilege{Column: "col1", Grantee: "'privtest_user'@'%'", Grantor: "", IsGrantable: true, PrivilegeType: "SELECT"}, metadata.ColumnPrivilege{Column: "col2", Grantee: "'privtest_user'@'%'", Grantor: "", IsGrantable: true, PrivilegeType: "INSERT"}, }, }), "mysql-table-column": setDefaults(test{ Db: dbs["mysql"], Schema: "sakila", User: "'privtest_user'@'%'", Grants: []string{"SELECT", "INSERT(col1)"}, WantTable: metadata.ObjectPrivileges{ metadata.ObjectPrivilege{Grantee: "'privtest_user'@'%'", Grantor: "", IsGrantable: false, PrivilegeType: "SELECT"}, }, WantColumn: metadata.ColumnPrivileges{ metadata.ColumnPrivilege{Column: "col1", Grantee: "'privtest_user'@'%'", Grantor: "", IsGrantable: false, PrivilegeType: "INSERT"}, }, }), "sqlserver-no-grants": setDefaults(test{ Db: dbs["sqlserver"], Schema: "dbo", CreateUserStmt: "CREATE LOGIN %[1]s WITH PASSWORD = 'yourStrong123_Password'; CREATE USER %[1]s FOR LOGIN %[1]s", DropUserStmt: "DROP USER %[1]s; DROP LOGIN %[1]s", Grants: []string{}, WantTable: metadata.ObjectPrivileges{}, }), "sqlserver-view": setDefaults(test{ Db: dbs["sqlserver"], Schema: "dbo", Create: "VIEW", CreateUserStmt: "CREATE LOGIN %[1]s WITH PASSWORD = 'yourStrong123_Password'; CREATE USER %[1]s FOR LOGIN %[1]s", DropUserStmt: "DROP USER %[1]s; DROP LOGIN %[1]s", Grants: []string{"SELECT", "INSERT*"}, WantTable: metadata.ObjectPrivileges{ metadata.ObjectPrivilege{Grantee: "privtest_user", Grantor: "dbo", IsGrantable: false, PrivilegeType: "SELECT"}, metadata.ObjectPrivilege{Grantee: "privtest_user", Grantor: "dbo", IsGrantable: true, PrivilegeType: "INSERT"}, }, }), "sqlserver-table": setDefaults(test{ Db: dbs["sqlserver"], Schema: "dbo", CreateUserStmt: "CREATE LOGIN %[1]s WITH PASSWORD = 'yourStrong123_Password'; CREATE USER %[1]s FOR LOGIN %[1]s", DropUserStmt: "DROP USER %[1]s; DROP LOGIN %[1]s", Grants: []string{"SELECT", "INSERT*"}, WantTable: metadata.ObjectPrivileges{ metadata.ObjectPrivilege{Grantee: "privtest_user", Grantor: "dbo", IsGrantable: false, PrivilegeType: "SELECT"}, metadata.ObjectPrivilege{Grantee: "privtest_user", Grantor: "dbo", IsGrantable: true, PrivilegeType: "INSERT"}, }, }), "sqlserver-column": setDefaults(test{ Db: dbs["sqlserver"], Schema: "dbo", CreateUserStmt: "CREATE LOGIN %[1]s WITH PASSWORD = 'yourStrong123_Password'; CREATE USER %[1]s FOR LOGIN %[1]s", DropUserStmt: "DROP USER %[1]s; DROP LOGIN %[1]s", Grants: []string{"SELECT(col1)", "UPDATE(col2)*"}, WantColumn: metadata.ColumnPrivileges{ metadata.ColumnPrivilege{Column: "col1", Grantee: "privtest_user", Grantor: "dbo", IsGrantable: false, PrivilegeType: "SELECT"}, metadata.ColumnPrivilege{Column: "col2", Grantee: "privtest_user", Grantor: "dbo", IsGrantable: true, PrivilegeType: "UPDATE"}, }, }), "sqlserver-table-column": setDefaults(test{ Db: dbs["sqlserver"], Schema: "dbo", CreateUserStmt: "CREATE LOGIN %[1]s WITH PASSWORD = 'yourStrong123_Password'; CREATE USER %[1]s FOR LOGIN %[1]s", DropUserStmt: "DROP USER %[1]s; DROP LOGIN %[1]s", Grants: []string{"SELECT", "UPDATE(col1)"}, WantTable: metadata.ObjectPrivileges{ metadata.ObjectPrivilege{Grantee: "privtest_user", Grantor: "dbo", IsGrantable: false, PrivilegeType: "SELECT"}, }, WantColumn: metadata.ColumnPrivileges{ metadata.ColumnPrivilege{Column: "col1", Grantee: "privtest_user", Grantor: "dbo", IsGrantable: false, PrivilegeType: "UPDATE"}, }, }), } for testName, test := range tests { if test.Db != nil { t.Run(testName, func(t *testing.T) { // Create user, table and grants const name = "privtest" var query string var err error query = fmt.Sprintf(test.CreateUserStmt, test.User) _, err = test.Db.DB.Exec(query) if err != nil { t.Fatalf("Could not CREATE USER:\n%s\n%s", query, err) } defer test.Db.DB.Exec(fmt.Sprintf(test.DropUserStmt, test.User)) switch test.Create { case "TABLE": query = fmt.Sprintf("CREATE TABLE %s.%s (col1 int, col2 varchar(255))", test.Schema, name) _, err = test.Db.DB.Exec(query) if err != nil { t.Fatalf("Could not CREATE TABLE:\n%s\n%s", query, err) } defer test.Db.DB.Exec(fmt.Sprintf("DROP TABLE %s.%s", test.Schema, name)) case "VIEW": query = fmt.Sprintf("CREATE TABLE %s.%s_table (col1 int, col2 varchar(255))", test.Schema, name) _, err = test.Db.DB.Exec(query) if err != nil { t.Fatalf("Could not CREATE TABLE:\n%s\n%s", query, err) } defer test.Db.DB.Exec(fmt.Sprintf("DROP TABLE %s.%s_table", test.Schema, name)) query = fmt.Sprintf("CREATE VIEW %s.%s AS SELECT * FROM %[1]s.%[2]s_table", test.Schema, name) _, err = test.Db.DB.Exec(query) if err != nil { t.Fatalf("Could not CREATE VIEW:\n%s\n%s", query, err) } defer test.Db.DB.Exec(fmt.Sprintf("DROP VIEW %s.%s", test.Schema, name)) case "SEQUENCE": query = fmt.Sprintf("CREATE SEQUENCE %s.%s", test.Schema, name) _, err = test.Db.DB.Exec(query) if err != nil { t.Fatalf("Could not CREATE SEQUENCE:\n%s\n%s", query, err) } defer test.Db.DB.Exec(fmt.Sprintf("DROP SEQUENCE %s.%s", test.Schema, name)) } for _, grant := range test.Grants { isGrantable := false if grant[len(grant)-1] == '*' { isGrantable = true grant = grant[:len(grant)-1] } query = fmt.Sprintf("GRANT %s ON %s.%s TO %s", grant, test.Schema, name, test.User) if isGrantable { query += " WITH GRANT OPTION" } _, err = test.Db.DB.Exec(query) if err != nil { t.Fatalf("Could not GRANT %s:\n%s\n%s", grant, query, err) } } // Read privileges r := infos.New(test.Db.Opts...)(test.Db.DB).(metadata.PrivilegeSummaryReader) types := []string{"TABLE", "BASE TABLE", "SYSTEM TABLE", "SYNONYM", "LOCAL TEMPORARY", "GLOBAL TEMPORARY", "VIEW", "SYSTEM VIEW", "MATERIALIZED VIEW", "SEQUENCE"} result, err := r.PrivilegeSummaries(metadata.Filter{Schema: test.Schema, Name: name, Types: types}) if err != nil { t.Fatalf("Could not read privileges: %v", err) } // Check result if result.Len() != 1 { t.Fatalf("Wrong result count\nWant:\t%d\nGot:\t%d\n", 1, result.Len()) } result.Next() if result.Get().Schema != test.Schema { t.Errorf("Wrong schema!\nWant:\t%s\nGot:\t%s\n", test.Schema, result.Get().Schema) } if result.Get().Name != name { t.Errorf("Wrong table!\nWant:\t%s\nGot:\t%s\n", name, result.Get().Name) } want := "" switch test.Create { case "TABLE": want = "BASE TABLE" default: want = test.Create } if result.Get().ObjectType != want { t.Errorf("Wrong Type!\nWant:\t%s\nGot:\t%s\n", want, result.Get().ObjectType) } gotTablePrivileges := result.Get().ObjectPrivileges sort.Sort(gotTablePrivileges) sort.Sort(test.WantTable) if diff := cmp.Diff(test.WantTable, gotTablePrivileges); diff != "" { t.Errorf("Wrong object privileges!\n(-expected, +got):\n%s", diff) } gotColumnPrivileges := result.Get().ColumnPrivileges sort.Sort(gotColumnPrivileges) sort.Sort(test.WantColumn) if diff := cmp.Diff(test.WantColumn, gotColumnPrivileges); diff != "" { t.Errorf("Wrong column privileges!\n(-expected, +got):\n%s", diff) } }) } } } usql-0.19.19/drivers/metadata/metadata.go000066400000000000000000000526201476173253300202440ustar00rootroot00000000000000package metadata import ( "strings" "github.com/xo/dburl" "github.com/xo/usql/text" ) // ExtendedReader of all database metadata in a structured format. type ExtendedReader interface { CatalogReader SchemaReader TableReader ColumnReader ColumnStatReader IndexReader IndexColumnReader TriggerReader ConstraintReader ConstraintColumnReader FunctionReader FunctionColumnReader SequenceReader PrivilegeSummaryReader } // BasicReader of common database metadata like schemas, tables and columns. type BasicReader interface { SchemaReader TableReader ColumnReader } // CatalogReader lists database schemas. type CatalogReader interface { Reader Catalogs(Filter) (*CatalogSet, error) } // SchemaReader lists database schemas. type SchemaReader interface { Reader Schemas(Filter) (*SchemaSet, error) } // TableReader lists database tables. type TableReader interface { Reader Tables(Filter) (*TableSet, error) } // ColumnReader lists table columns. type ColumnReader interface { Reader Columns(Filter) (*ColumnSet, error) } // ColumnStatsReader lists table column statistics. type ColumnStatReader interface { Reader ColumnStats(Filter) (*ColumnStatSet, error) } // IndexReader lists table indexes. type IndexReader interface { Reader Indexes(Filter) (*IndexSet, error) } // IndexColumnReader lists index columns. type IndexColumnReader interface { Reader IndexColumns(Filter) (*IndexColumnSet, error) } // TriggerReader lists table triggers. type TriggerReader interface { Reader Triggers(Filter) (*TriggerSet, error) } // ConstraintReader lists table constraints. type ConstraintReader interface { Reader Constraints(Filter) (*ConstraintSet, error) } // ConstraintColumnReader lists constraint columns. type ConstraintColumnReader interface { Reader ConstraintColumns(Filter) (*ConstraintColumnSet, error) } // FunctionReader lists database functions. type FunctionReader interface { Reader Functions(Filter) (*FunctionSet, error) } // FunctionColumnReader lists function parameters. type FunctionColumnReader interface { Reader FunctionColumns(Filter) (*FunctionColumnSet, error) } // SequenceReader lists sequences. type SequenceReader interface { Reader Sequences(Filter) (*SequenceSet, error) } // PrivilegeSummaryReader lists summaries of privileges granted on tables, views and sequences. type PrivilegeSummaryReader interface { Reader PrivilegeSummaries(Filter) (*PrivilegeSummarySet, error) } // Reader of any database metadata in a structured format. type Reader interface{} // Filter objects returned by Readers type Filter struct { // Catalog name pattern that objects must belong to; // use Name to filter catalogs by name Catalog string // Schema name pattern that objects must belong to; // use Name to filter schemas by name Schema string // Parent name pattern that objects must belong to; // does not apply to schema and catalog containing matching objects Parent string // Reference name pattern of other objects referencing this one, Reference string // Name pattern that object name must match Name string // Types of the object Types []string // WithSystem objects WithSystem bool // OnlyVisible objects OnlyVisible bool } // Writer of database metadata in a human readable format. type Writer interface { // DescribeFunctions \df, \dfa, \dfn, \dft, \dfw, etc. DescribeFunctions(*dburl.URL, string, string, bool, bool) error // DescribeTableDetails \d foo DescribeTableDetails(*dburl.URL, string, bool, bool) error // ListAllDbs \l ListAllDbs(*dburl.URL, string, bool) error // ListTables \dt, \dv, \dm, etc. ListTables(*dburl.URL, string, string, bool, bool) error // ListSchemas \dn ListSchemas(*dburl.URL, string, bool, bool) error // ListIndexes \di ListIndexes(*dburl.URL, string, bool, bool) error // ShowStats \ss ShowStats(*dburl.URL, string, string, bool, int) error // ListPrivilegeSummaries \dp ListPrivilegeSummaries(*dburl.URL, string, bool) error } type CatalogSet struct { resultSet } func NewCatalogSet(v []Catalog) *CatalogSet { r := make([]Result, len(v)) for i := range v { r[i] = &v[i] } return &CatalogSet{ resultSet: resultSet{ results: r, columns: []string{"Catalog"}, }, } } func NewCatalogSetWithColumns(v []Result, cols []string) *CatalogSet { return &CatalogSet{ resultSet: resultSet{ results: v, columns: cols, }, } } type CatalogProvider interface { GetCatalog() Catalog } func (s CatalogSet) Get() Catalog { r := s.results[s.current-1] return r.(CatalogProvider).GetCatalog() } type Catalog struct { Catalog string } func (s Catalog) Values() []interface{} { return []interface{}{s.Catalog} } func (s Catalog) GetCatalog() Catalog { return s } type SchemaSet struct { resultSet } func NewSchemaSet(v []Schema) *SchemaSet { r := make([]Result, len(v)) for i := range v { r[i] = &v[i] } return &SchemaSet{ resultSet: resultSet{ results: r, columns: []string{"Schema", "Catalog"}, }, } } func (s SchemaSet) Get() *Schema { return s.results[s.current-1].(*Schema) } type Schema struct { Schema string Catalog string } func (s Schema) Values() []interface{} { return []interface{}{s.Schema, s.Catalog} } type TableSet struct { resultSet } func NewTableSet(v []Table) *TableSet { r := make([]Result, len(v)) for i := range v { r[i] = &v[i] } return &TableSet{ resultSet: resultSet{ results: r, columns: []string{ "Catalog", "Schema", "Name", "Type", "Rows", "Size", "Comment", }, }, } } func (t TableSet) Get() *Table { return t.results[t.current-1].(*Table) } type Table struct { Catalog string Schema string Name string Type string Rows int64 Size string Comment string } func (t Table) Values() []interface{} { return []interface{}{ t.Catalog, t.Schema, t.Name, t.Type, t.Rows, t.Size, t.Comment, } } type ColumnSet struct { resultSet } func NewColumnSet(v []Column) *ColumnSet { r := make([]Result, len(v)) for i := range v { r[i] = &v[i] } return &ColumnSet{ resultSet: resultSet{ results: r, columns: []string{ "Catalog", "Schema", "Table", "Name", "Type", "Nullable", "Default", "Size", "Decimal Digits", "Precision Radix", "Octet Length", }, }, } } func (c ColumnSet) Get() *Column { return c.results[c.current-1].(*Column) } type Column struct { Catalog string Schema string Table string Name string OrdinalPosition int DataType string // ScanType reflect.Type Default string ColumnSize int DecimalDigits int NumPrecRadix int CharOctetLength int IsNullable Bool } type Bool string var ( UNKNOWN Bool = "" YES Bool = "YES" NO Bool = "NO" ) func (c Column) Values() []interface{} { return []interface{}{ c.Catalog, c.Schema, c.Table, c.Name, c.DataType, c.IsNullable, c.Default, c.ColumnSize, c.DecimalDigits, c.NumPrecRadix, c.CharOctetLength, } } type ColumnStatSet struct { resultSet } func NewColumnStatSet(v []ColumnStat) *ColumnStatSet { r := make([]Result, len(v)) for i := range v { r[i] = &v[i] } return &ColumnStatSet{ resultSet: resultSet{ results: r, columns: []string{ "Catalog", "Schema", "Table", "Name", "Average width", "Nulls fraction", "Distinct values", "Minimum value", "Maximum value", "Mean value", "Top N common values", "Top N values freqs", }, }, } } func (c ColumnStatSet) Get() *ColumnStat { return c.results[c.current-1].(*ColumnStat) } type ColumnStat struct { Catalog string Schema string Table string Name string AvgWidth int NullFrac float64 NumDistinct int64 Min string Max string Mean string TopN []string TopNFreqs []float64 } func (c ColumnStat) Values() []interface{} { return []interface{}{ c.Catalog, c.Schema, c.Table, c.Name, c.AvgWidth, c.NullFrac, c.NumDistinct, c.Min, c.Max, c.Mean, c.TopN, c.TopNFreqs, } } type IndexSet struct { resultSet } func NewIndexSet(v []Index) *IndexSet { r := make([]Result, len(v)) for i := range v { r[i] = &v[i] } return &IndexSet{ resultSet: resultSet{ results: r, columns: []string{ "Catalog", "Schema", "Name", "Table", "Is primary", "Is unique", "Type", }, }, } } func (i IndexSet) Get() *Index { return i.results[i.current-1].(*Index) } type Index struct { Catalog string Schema string Table string Name string IsPrimary Bool IsUnique Bool Type string Columns string } func (i Index) Values() []interface{} { return []interface{}{ i.Catalog, i.Schema, i.Name, i.Table, i.IsPrimary, i.IsUnique, i.Type, } } type IndexColumnSet struct { resultSet } func NewIndexColumnSet(v []IndexColumn) *IndexColumnSet { r := make([]Result, len(v)) for i := range v { r[i] = &v[i] } return &IndexColumnSet{ resultSet: resultSet{ results: r, columns: []string{ "Catalog", "Schema", "Table", "Index name", "Name", "Data type", }, }, } } func (c IndexColumnSet) Get() *IndexColumn { return c.results[c.current-1].(*IndexColumn) } type IndexColumn struct { Catalog string Schema string Table string IndexName string Name string DataType string OrdinalPosition int } func (c IndexColumn) Values() []interface{} { return []interface{}{ c.Catalog, c.Schema, c.Table, c.IndexName, c.Name, c.DataType, } } type ConstraintSet struct { resultSet } func NewConstraintSet(v []Constraint) *ConstraintSet { r := make([]Result, len(v)) for i := range v { r[i] = &v[i] } return &ConstraintSet{ resultSet: resultSet{ results: r, columns: []string{ "Catalog", "Schema", "Table", "Name", "Type", "Is deferrable", "Initially deferred", "Foreign catalog", "Foreign schema", "Foreign table", "Foreign name", "Match type", "Update rule", "Delete rule", "Check Clause", }, }, } } func (i ConstraintSet) Get() *Constraint { return i.results[i.current-1].(*Constraint) } type Constraint struct { Catalog string Schema string Table string Name string Type string IsDeferrable Bool IsInitiallyDeferred Bool ForeignCatalog string ForeignSchema string ForeignTable string ForeignName string MatchType string UpdateRule string DeleteRule string CheckClause string } func (i Constraint) Values() []interface{} { return []interface{}{ i.Catalog, i.Schema, i.Table, i.Name, i.Type, i.IsDeferrable, i.IsInitiallyDeferred, i.ForeignCatalog, i.ForeignSchema, i.ForeignTable, i.ForeignName, i.MatchType, i.UpdateRule, i.DeleteRule, } } type ConstraintColumnSet struct { resultSet } func NewConstraintColumnSet(v []ConstraintColumn) *ConstraintColumnSet { r := make([]Result, len(v)) for i := range v { r[i] = &v[i] } return &ConstraintColumnSet{ resultSet: resultSet{ results: r, columns: []string{ "Catalog", "Schema", "Table", "Constraint", "Name", "Foreign Catalog", "Foreign Schema", "Foreign Table", "Foreign Constraint", "Foreign Name", }, }, } } func (c ConstraintColumnSet) Get() *ConstraintColumn { return c.results[c.current-1].(*ConstraintColumn) } type ConstraintColumn struct { Catalog string Schema string Table string Constraint string Name string OrdinalPosition int ForeignCatalog string ForeignSchema string ForeignTable string ForeignConstraint string ForeignName string } func (c ConstraintColumn) Values() []interface{} { return []interface{}{ c.Catalog, c.Schema, c.Table, c.Constraint, c.Name, c.ForeignCatalog, c.ForeignSchema, c.ForeignTable, c.ForeignConstraint, c.ForeignName, } } type FunctionSet struct { resultSet } func NewFunctionSet(v []Function) *FunctionSet { r := make([]Result, len(v)) for i := range v { r[i] = &v[i] } return &FunctionSet{ resultSet: resultSet{ results: r, columns: []string{ "Catalog", "Schema", "Name", "Result data type", "Argument data types", "Type", "Volatility", "Security", "Language", "Source code", }, }, } } func (f FunctionSet) Get() *Function { return f.results[f.current-1].(*Function) } type Function struct { Catalog string Schema string Name string ResultType string ArgTypes string Type string Volatility string Security string Language string Source string SpecificName string } func (f Function) Values() []interface{} { return []interface{}{ f.Catalog, f.Schema, f.Name, f.ResultType, f.ArgTypes, f.Type, f.Volatility, f.Security, f.Language, f.Source, } } type FunctionColumnSet struct { resultSet } func NewFunctionColumnSet(v []FunctionColumn) *FunctionColumnSet { r := make([]Result, len(v)) for i := range v { r[i] = &v[i] } return &FunctionColumnSet{ resultSet: resultSet{ results: r, columns: []string{ "Catalog", "Schema", "Function name", "Name", "Type", "Data type", "Size", "Decimal Digits", "Precision Radix", "Octet Length", }, }, } } func (c FunctionColumnSet) Get() *FunctionColumn { return c.results[c.current-1].(*FunctionColumn) } type FunctionColumn struct { Catalog string Schema string Table string Name string FunctionName string OrdinalPosition int Type string DataType string // ScanType reflect.Type ColumnSize int DecimalDigits int NumPrecRadix int CharOctetLength int } func (c FunctionColumn) Values() []interface{} { return []interface{}{ c.Catalog, c.Schema, c.FunctionName, c.Name, c.Type, c.DataType, c.ColumnSize, c.DecimalDigits, c.NumPrecRadix, c.CharOctetLength, } } type SequenceSet struct { resultSet } func NewSequenceSet(v []Sequence) *SequenceSet { r := make([]Result, len(v)) for i := range v { r[i] = &v[i] } return &SequenceSet{ resultSet: resultSet{ results: r, columns: []string{ "Type", "Start", "Min", "Max", "Increment", "Cycles?", }, }, } } func (s SequenceSet) Get() *Sequence { return s.results[s.current-1].(*Sequence) } type Sequence struct { Catalog string Schema string Name string DataType string Start string Min string Max string Increment string Cycles Bool } func (s Sequence) Values() []interface{} { return []interface{}{ s.DataType, s.Start, s.Min, s.Max, s.Increment, s.Cycles, } } type PrivilegeSummarySet struct { resultSet } func NewPrivilegeSummarySet(v []PrivilegeSummary) *PrivilegeSummarySet { r := make([]Result, len(v)) for i := range v { r[i] = &v[i] } return &PrivilegeSummarySet{ resultSet: resultSet{ results: r, columns: []string{ "Schema", "Name", "Type", "Access privileges", "Column privileges", }, }, } } func (s PrivilegeSummarySet) Get() *PrivilegeSummary { return s.results[s.current-1].(*PrivilegeSummary) } // PrivilegeSummary summarizes the privileges granted on a database object type PrivilegeSummary struct { Catalog string Schema string Name string ObjectType string ObjectPrivileges ObjectPrivileges ColumnPrivileges ColumnPrivileges } func (s PrivilegeSummary) Values() []interface{} { return []interface{}{ s.Catalog, s.Schema, s.Name, s.ObjectType, s.ObjectPrivileges, s.ColumnPrivileges, } } // ObjectPrivilege represents a privilege granted on a database object. type ObjectPrivilege struct { Grantee string Grantor string PrivilegeType string IsGrantable bool } // ColumnPrivilege represents a privilege granted on a column. type ColumnPrivilege struct { Column string Grantee string Grantor string PrivilegeType string IsGrantable bool } // ObjectPrivileges represents privileges granted on a database object. // The privileges are assumed to be sorted. Otherwise the // String() method will fail. type ObjectPrivileges []ObjectPrivilege // ColumnPrivileges represents privileges granted on a column. // The privileges are assumed to be sorted. Otherwise the // String() method will fail. type ColumnPrivileges []ColumnPrivilege func (p ObjectPrivileges) Len() int { return len(p) } func (p ObjectPrivileges) Swap(i, j int) { p[i], p[j] = p[j], p[i] } func (p ObjectPrivileges) Less(i, j int) bool { switch { case p[i].Grantee != p[j].Grantee: return p[i].Grantee < p[j].Grantee case p[i].Grantor != p[j].Grantor: return p[i].Grantor < p[j].Grantor } return p[i].PrivilegeType < p[j].PrivilegeType } // String returns a string representation of ObjectPrivileges. // Assumes the ObjectPrivileges to be sorted. func (p ObjectPrivileges) String() string { if len(p) == 0 { return "" } lines := []string{} types := []string{} for i := range p { switch { // Is last privilege or next privilege has new grantee or grantor; finalize line case i == len(p)-1 || p[i].Grantee != p[i+1].Grantee || p[i].Grantor != p[i+1].Grantor: types = append(types, typeStr(p[i].PrivilegeType, p[i].IsGrantable)) lines = append(lines, lineStr(p[i].Grantee, p[i].Grantor, types)) types = types[:0] default: types = append(types, typeStr(p[i].PrivilegeType, p[i].IsGrantable)) } } return strings.Join(lines, "\n") } func (p ColumnPrivileges) Len() int { return len(p) } func (p ColumnPrivileges) Swap(i, j int) { p[i], p[j] = p[j], p[i] } func (p ColumnPrivileges) Less(i, j int) bool { switch { case p[i].Column != p[j].Column: return p[i].Column < p[j].Column case p[i].Grantee != p[j].Grantee: return p[i].Grantee < p[j].Grantee case p[i].Grantor != p[j].Grantor: return p[i].Grantor < p[j].Grantor } return p[i].PrivilegeType < p[j].PrivilegeType } // String returns a string representation of ColumnPrivileges. // Assumes the ColumnPrivileges to be sorted. func (p ColumnPrivileges) String() string { if len(p) == 0 { return "" } colBlocks := []string{} lines := []string{} types := []string{} for i := range p { switch { // Is last privilege or next privilege has new column; finalize column block case i == len(p)-1 || p[i].Column != p[i+1].Column: types = append(types, typeStr(p[i].PrivilegeType, p[i].IsGrantable)) lines = append(lines, " "+lineStr(p[i].Grantee, p[i].Grantor, types)) colBlocks = append(colBlocks, p[i].Column+":\n"+strings.Join(lines, "\n")) lines = lines[:0] types = types[:0] // Next privilege has new grantee or grantor; finalize line case p[i].Grantee != p[i+1].Grantee || p[i].Grantor != p[i+1].Grantor: types = append(types, typeStr(p[i].PrivilegeType, p[i].IsGrantable)) lines = append(lines, " "+lineStr(p[i].Grantee, p[i].Grantor, types)) types = types[:0] default: types = append(types, typeStr(p[i].PrivilegeType, p[i].IsGrantable)) } } return strings.Join(colBlocks, "\n") } // typeStr appends an asterisk suffix to grantable privileges func typeStr(privilege string, grantable bool) string { if grantable { return privilege + "*" } else { return privilege } } // lineStr compiles grantee, grantor and privilege types into a line of output func lineStr(grantee, grantor string, types []string) string { if grantor != "" { return grantee + "=" + strings.Join(types, ",") + "/" + grantor } else { return grantee + "=" + strings.Join(types, ",") } } type resultSet struct { results []Result columns []string current int filter func(Result) bool scanValues func(Result) []interface{} } type Result interface { Values() []interface{} } func (r *resultSet) SetFilter(f func(Result) bool) { r.filter = f } func (r *resultSet) SetColumns(c []string) { r.columns = c } func (r *resultSet) SetScanValues(s func(Result) []interface{}) { r.scanValues = s } func (r *resultSet) Len() int { if r.filter == nil { return len(r.results) } len := 0 for _, rec := range r.results { if r.filter(rec) { len++ } } return len } func (r *resultSet) Reset() { r.current = 0 } func (r *resultSet) Next() bool { r.current++ if r.filter != nil { for r.current <= len(r.results) && !r.filter(r.results[r.current-1]) { r.current++ } } return r.current <= len(r.results) } func (r resultSet) Columns() ([]string, error) { return r.columns, nil } func (r resultSet) Scan(dest ...interface{}) error { var v []interface{} if r.scanValues == nil { v = r.results[r.current-1].Values() } else { v = r.scanValues(r.results[r.current-1]) } if len(v) != len(dest) { return text.ErrWrongNumberOfArguments } for i, d := range dest { p := d.(*interface{}) *p = v[i] } return nil } func (r resultSet) Close() error { return nil } func (r resultSet) Err() error { return nil } func (r resultSet) NextResultSet() bool { return false } type Trigger struct { Catalog string Schema string Table string Name string Definition string } func (t Trigger) Values() []interface{} { return []interface{}{ t.Catalog, t.Schema, t.Table, t.Name, t.Definition, } } type TriggerSet struct { resultSet } func NewTriggerSet(t []Trigger) *TriggerSet { r := make([]Result, len(t)) for i := range t { r[i] = &t[i] } return &TriggerSet{ resultSet: resultSet{ results: r, columns: []string{ "Catalog", "Schema", "Table", "Name", "Definition", }, }, } } func (t TriggerSet) Get() *Trigger { return t.results[t.current-1].(*Trigger) } usql-0.19.19/drivers/metadata/metadata_test.go000066400000000000000000000145431476173253300213050ustar00rootroot00000000000000package metadata import ( "testing" "github.com/google/go-cmp/cmp" ) func TestAccessPrivileges_String(t *testing.T) { tests := []struct { name string ps ObjectPrivileges want string }{ { name: "multi", ps: ObjectPrivileges{ {Grantee: "user1", Grantor: "user1", PrivilegeType: "INSERT", IsGrantable: true}, {Grantee: "user1", Grantor: "user1", PrivilegeType: "SELECT"}, {Grantee: "user2", Grantor: "user1", PrivilegeType: "INSERT"}, {Grantee: "user2", Grantor: "user1", PrivilegeType: "SELECT", IsGrantable: true}, {Grantee: "user3", Grantor: "user1", PrivilegeType: "SELECT", IsGrantable: true}, {Grantee: "user3", Grantor: "user2", PrivilegeType: "UPDATE"}, }, want: "user1=INSERT*,SELECT/user1\n" + "user2=INSERT,SELECT*/user1\n" + "user3=SELECT*/user1\n" + "user3=UPDATE/user2", }, { name: "one", ps: ObjectPrivileges{ {Grantee: "user1", Grantor: "user1", PrivilegeType: "INSERT"}, }, want: "user1=INSERT/user1", }, { name: "empty", ps: ObjectPrivileges{}, want: "", }, { name: "empty-grantor", ps: ObjectPrivileges{ {Grantee: "user1", Grantor: "", PrivilegeType: "INSERT", IsGrantable: true}, {Grantee: "user1", Grantor: "", PrivilegeType: "SELECT"}, {Grantee: "user2", Grantor: "", PrivilegeType: "INSERT"}, {Grantee: "user2", Grantor: "", PrivilegeType: "SELECT", IsGrantable: true}, {Grantee: "user3", Grantor: "", PrivilegeType: "UPDATE"}, }, want: "user1=INSERT*,SELECT\n" + "user2=INSERT,SELECT*\n" + "user3=UPDATE", }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { got := tt.ps.String() if diff := cmp.Diff(tt.want, got); diff != "" { t.Errorf("Wrong AccessPrivileges.String(): (-expected, +got):\n%s", diff) } }) } } func TestColumnPrivileges_String(t *testing.T) { tests := []struct { name string ps ColumnPrivileges want string }{ { name: "multi", ps: ColumnPrivileges{ {Column: "col1", Grantee: "user1", Grantor: "user1", PrivilegeType: "INSERT", IsGrantable: true}, {Column: "col1", Grantee: "user1", Grantor: "user1", PrivilegeType: "SELECT"}, {Column: "col1", Grantee: "user2", Grantor: "user1", PrivilegeType: "INSERT"}, {Column: "col1", Grantee: "user2", Grantor: "user1", PrivilegeType: "SELECT", IsGrantable: true}, {Column: "col1", Grantee: "user3", Grantor: "user1", PrivilegeType: "SELECT", IsGrantable: true}, {Column: "col1", Grantee: "user3", Grantor: "user2", PrivilegeType: "UPDATE"}, {Column: "col2", Grantee: "user1", Grantor: "user1", PrivilegeType: "INSERT", IsGrantable: true}, {Column: "col2", Grantee: "user1", Grantor: "user1", PrivilegeType: "SELECT"}, {Column: "col2", Grantee: "user2", Grantor: "user1", PrivilegeType: "INSERT"}, {Column: "col2", Grantee: "user2", Grantor: "user1", PrivilegeType: "SELECT", IsGrantable: true}, {Column: "col2", Grantee: "user3", Grantor: "user2", PrivilegeType: "UPDATE"}, }, want: "col1:\n" + " user1=INSERT*,SELECT/user1\n" + " user2=INSERT,SELECT*/user1\n" + " user3=SELECT*/user1\n" + " user3=UPDATE/user2\n" + "col2:\n" + " user1=INSERT*,SELECT/user1\n" + " user2=INSERT,SELECT*/user1\n" + " user3=UPDATE/user2", }, { name: "one-multi", ps: ColumnPrivileges{ {Column: "col2", Grantee: "user1", Grantor: "user1", PrivilegeType: "INSERT", IsGrantable: true}, {Column: "col3", Grantee: "user1", Grantor: "user1", PrivilegeType: "INSERT", IsGrantable: true}, {Column: "col3", Grantee: "user1", Grantor: "user1", PrivilegeType: "SELECT"}, {Column: "col3", Grantee: "user2", Grantor: "user1", PrivilegeType: "INSERT"}, {Column: "col3", Grantee: "user2", Grantor: "user1", PrivilegeType: "SELECT", IsGrantable: true}, {Column: "col3", Grantee: "user3", Grantor: "user2", PrivilegeType: "UPDATE"}, }, want: "col2:\n" + " user1=INSERT*/user1\n" + "col3:\n" + " user1=INSERT*,SELECT/user1\n" + " user2=INSERT,SELECT*/user1\n" + " user3=UPDATE/user2", }, { name: "multi-one", ps: ColumnPrivileges{ {Column: "col1", Grantee: "user1", Grantor: "user1", PrivilegeType: "INSERT", IsGrantable: true}, {Column: "col1", Grantee: "user1", Grantor: "user1", PrivilegeType: "SELECT"}, {Column: "col1", Grantee: "user2", Grantor: "user1", PrivilegeType: "INSERT"}, {Column: "col1", Grantee: "user2", Grantor: "user1", PrivilegeType: "SELECT", IsGrantable: true}, {Column: "col1", Grantee: "user3", Grantor: "user2", PrivilegeType: "UPDATE"}, {Column: "col2", Grantee: "user1", Grantor: "user1", PrivilegeType: "INSERT", IsGrantable: true}, }, want: "col1:\n" + " user1=INSERT*,SELECT/user1\n" + " user2=INSERT,SELECT*/user1\n" + " user3=UPDATE/user2\n" + "col2:\n" + " user1=INSERT*/user1", }, { name: "one", ps: ColumnPrivileges{ {Column: "col1", Grantee: "user1", Grantor: "user1", PrivilegeType: "INSERT"}, }, want: "col1:\n user1=INSERT/user1", }, { name: "empty", ps: ColumnPrivileges{}, want: "", }, { name: "empty-grantor", ps: ColumnPrivileges{ {Column: "col1", Grantee: "user1", Grantor: "", PrivilegeType: "INSERT", IsGrantable: true}, {Column: "col1", Grantee: "user1", Grantor: "", PrivilegeType: "SELECT"}, {Column: "col1", Grantee: "user2", Grantor: "", PrivilegeType: "INSERT"}, {Column: "col1", Grantee: "user2", Grantor: "", PrivilegeType: "SELECT", IsGrantable: true}, {Column: "col1", Grantee: "user3", Grantor: "", PrivilegeType: "UPDATE"}, {Column: "col2", Grantee: "user1", Grantor: "", PrivilegeType: "INSERT", IsGrantable: true}, {Column: "col2", Grantee: "user1", Grantor: "", PrivilegeType: "SELECT"}, {Column: "col2", Grantee: "user2", Grantor: "", PrivilegeType: "INSERT"}, {Column: "col2", Grantee: "user2", Grantor: "", PrivilegeType: "SELECT", IsGrantable: true}, {Column: "col2", Grantee: "user3", Grantor: "", PrivilegeType: "UPDATE"}, }, want: "col1:\n" + " user1=INSERT*,SELECT\n" + " user2=INSERT,SELECT*\n" + " user3=UPDATE\n" + "col2:\n" + " user1=INSERT*,SELECT\n" + " user2=INSERT,SELECT*\n" + " user3=UPDATE", }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { got := tt.ps.String() if diff := cmp.Diff(tt.want, got); diff != "" { t.Errorf("Wrong ColumnPrivileges.String(): (-expected, +got):\n%s", diff) } }) } } usql-0.19.19/drivers/metadata/mysql/000077500000000000000000000000001476173253300172755ustar00rootroot00000000000000usql-0.19.19/drivers/metadata/mysql/metadata.go000066400000000000000000000044571476173253300214160ustar00rootroot00000000000000package mysql import ( "time" "github.com/gohxs/readline" "github.com/xo/usql/drivers" "github.com/xo/usql/drivers/completer" "github.com/xo/usql/drivers/metadata" infos "github.com/xo/usql/drivers/metadata/informationschema" ) var ( // NewReader for MySQL databases NewReader = infos.New( infos.WithPlaceholder(func(int) string { return "?" }), infos.WithSequences(false), infos.WithCheckConstraints(false), infos.WithCustomClauses(map[infos.ClauseName]string{ infos.ColumnsDataType: "column_type", infos.ColumnsNumericPrecRadix: "10", infos.FunctionColumnsNumericPrecRadix: "10", infos.ConstraintIsDeferrable: "''", infos.ConstraintInitiallyDeferred: "''", infos.PrivilegesGrantor: "''", infos.ConstraintJoinCond: "AND r.referenced_table_name = f.table_name", }), infos.WithSystemSchemas([]string{"mysql", "information_schema", "performance_schema", "sys"}), infos.WithCurrentSchema("COALESCE(DATABASE(), '%')"), infos.WithUsagePrivileges(false), ) // NewCompleter for MySQL databases NewCompleter = func(db drivers.DB, opts ...completer.Option) readline.AutoCompleter { readerOpts := []metadata.ReaderOption{ // this needs to be relatively low, since autocomplete is very interactive metadata.WithTimeout(3 * time.Second), metadata.WithLimit(1000), } reader := NewReader(db, readerOpts...) opts = append([]completer.Option{ completer.WithReader(reader), completer.WithDB(db), completer.WithSQLStartCommands(append(completer.CommonSqlStartCommands, "USE")), completer.WithBeforeComplete(complete(reader)), }, opts...) return completer.NewDefaultCompleter(opts...) } ) func complete(reader metadata.Reader) completer.CompleteFunc { return func(previousWords []string, text []rune) [][]rune { if completer.TailMatches(completer.IGNORE_CASE, previousWords, `USE`) { return completeWithSchemas(reader, text) } return nil } } func completeWithSchemas(reader metadata.Reader, text []rune) [][]rune { schemaNames := []string{} schemas, err := reader.(metadata.SchemaReader).Schemas(metadata.Filter{WithSystem: true}) if err != nil { return nil } for schemas.Next() { schemaNames = append(schemaNames, schemas.Get().Schema) } return completer.CompleteFromList(text, schemaNames...) } usql-0.19.19/drivers/metadata/oracle/000077500000000000000000000000001476173253300173755ustar00rootroot00000000000000usql-0.19.19/drivers/metadata/oracle/metadata.go000066400000000000000000000304541476173253300215120ustar00rootroot00000000000000// Package oracle provides a metadata reader package oracle import ( "database/sql" "fmt" "strings" "github.com/xo/usql/drivers" "github.com/xo/usql/drivers/metadata" ) type metaReader struct { metadata.LoggingReader systemSchemas string } var _ metadata.BasicReader = &metaReader{} var _ metadata.IndexReader = &metaReader{} var _ metadata.IndexColumnReader = &metaReader{} func NewReader() func(drivers.DB, ...metadata.ReaderOption) metadata.Reader { return func(db drivers.DB, opts ...metadata.ReaderOption) metadata.Reader { r := &metaReader{ LoggingReader: metadata.NewLoggingReader(db, opts...), systemSchemas: "'CTXSYS', 'FLOWS_FILES', 'MDSYS', 'OUTLN', 'SYS', 'SYSTEM', 'XDB', 'XS$NULL'", } return r } } func (r metaReader) Catalogs(metadata.Filter) (*metadata.CatalogSet, error) { qstr := `SELECT UPPER(Value) AS catalog FROM v$parameter o WHERE name = 'db_name' UNION ALL SELECT db_link AS catalog FROM dba_db_links ORDER BY catalog ` rows, closeRows, err := r.Query(qstr) if err != nil { if err == sql.ErrNoRows { return metadata.NewCatalogSet([]metadata.Catalog{}), nil } return nil, err } defer closeRows() results := []metadata.Catalog{} for rows.Next() { rec := metadata.Catalog{} err = rows.Scan(&rec.Catalog) if err != nil { return nil, err } results = append(results, rec) } if rows.Err() != nil { return nil, rows.Err() } return metadata.NewCatalogSet(results), nil } func (r metaReader) Schemas(f metadata.Filter) (*metadata.SchemaSet, error) { qstr := `SELECT username FROM all_users ` conds, vals := r.conditions(f, formats{ name: "username LIKE :%d", notSchemas: "username NOT IN (%s)", }) if len(conds) != 0 { qstr += " WHERE " + strings.Join(conds, " AND ") } qstr += ` ORDER BY username` rows, closeRows, err := r.Query(qstr, vals...) if err != nil { if err == sql.ErrNoRows { return metadata.NewSchemaSet([]metadata.Schema{}), nil } return nil, err } defer closeRows() results := []metadata.Schema{} for rows.Next() { rec := metadata.Schema{} err = rows.Scan(&rec.Schema) if err != nil { return nil, err } results = append(results, rec) } if rows.Err() != nil { return nil, rows.Err() } return metadata.NewSchemaSet(results), nil } // Tables from selected catalog (or all, if empty), matching schemas, names and types func (r metaReader) Tables(f metadata.Filter) (*metadata.TableSet, error) { qstr := `SELECT o.owner AS table_schem, o.object_name AS table_name, o.object_type AS table_type FROM all_objects o ` conds, vals := r.conditions(f, formats{ schema: "o.owner LIKE %s", notSchemas: "o.owner NOT IN (%s)", name: "o.object_name LIKE :%d", types: "o.object_type IN (%s)", }) if len(conds) != 0 { qstr += " WHERE " + strings.Join(conds, " AND ") } addSynonyms := false for _, t := range f.Types { if t == "SYNONYM" { addSynonyms = true } } if addSynonyms { qstr += ` UNION ALL SELECT s.owner AS table_schem, s.synonym_name AS table_name, 'SYNONYM' AS table_type FROM all_synonyms s ` conds, seqVals := r.conditions(f, formats{ schema: "s.owner LIKE %s", notSchemas: "s.owner NOT IN (%s)", name: "s.synonym_name LIKE :%d", }) vals = append(vals, seqVals...) if len(conds) != 0 { qstr += " WHERE " + strings.Join(conds, " AND ") } } qstr += ` ORDER BY table_schem, table_name, table_type` rows, closeRows, err := r.Query(qstr, vals...) if err != nil { if err == sql.ErrNoRows { return metadata.NewTableSet([]metadata.Table{}), nil } return nil, err } defer closeRows() results := []metadata.Table{} for rows.Next() { rec := metadata.Table{} err = rows.Scan(&rec.Schema, &rec.Name, &rec.Type) if err != nil { return nil, err } results = append(results, rec) } if rows.Err() != nil { return nil, rows.Err() } return metadata.NewTableSet(results), nil } func (r metaReader) Columns(f metadata.Filter) (*metadata.ColumnSet, error) { qstr := `SELECT c.owner, c.table_name, c.column_name, c.column_id AS ordinal_position, c.data_type, CASE c.nullable WHEN 'Y' THEN 'YES' ELSE 'NO' END AS nullable, COALESCE(c.data_length, c.data_precision, 0), COALESCE(c.data_scale, 0), CASE c.data_type WHEN 'FLOAT' THEN 2 WHEN 'NUMBER' THEN 10 ELSE 0 END AS num_prec_radix, COALESCE(c.char_col_decl_length, 0) as char_octet_length FROM all_tab_columns c ` conds, vals := r.conditions(f, formats{ schema: "c.owner LIKE %s", notSchemas: "c.owner NOT IN (%s)", parent: "c.table_name LIKE :%d", }) if len(conds) != 0 { qstr += " WHERE " + strings.Join(conds, " AND ") } qstr += ` ORDER BY c.owner, c.table_name, c.column_id` rows, closeRows, err := r.Query(qstr, vals...) if err != nil { if err == sql.ErrNoRows { return metadata.NewColumnSet([]metadata.Column{}), nil } return nil, err } defer closeRows() results := []metadata.Column{} for rows.Next() { rec := metadata.Column{} targets := []interface{}{ &rec.Schema, &rec.Table, &rec.Name, &rec.OrdinalPosition, &rec.DataType, &rec.IsNullable, &rec.ColumnSize, &rec.DecimalDigits, &rec.NumPrecRadix, &rec.CharOctetLength, } err = rows.Scan(targets...) if err != nil { return nil, err } results = append(results, rec) } if rows.Err() != nil { return nil, rows.Err() } return metadata.NewColumnSet(results), nil } func (r metaReader) Functions(f metadata.Filter) (*metadata.FunctionSet, error) { qstr := `SELECT decode (b.object_type,'PACKAGE',CONCAT(CONCAT(b.object_name,'.'), a.object_name) ,b.object_name) as specific_name, b.owner as procedure_schem, decode (b.object_type,'PACKAGE',CONCAT(CONCAT(b.object_name,'.'), a.object_name) ,b.object_name) as procedure_name, decode (b.object_type,'PACKAGE',decode(a.position,0,2,1,1,0), decode(b.object_type,'PROCEDURE',1,'FUNCTION',2,0)) as procedure_type FROM all_arguments a JOIN all_objects b ON b.object_id = a.object_id AND a.sequence = 1 ` conds, vals := r.conditions(f, formats{ schema: "b.owner LIKE %s", notSchemas: "b.owner NOT IN (%s)", name: "b.object_name LIKE :%d", types: "b.object_type IN (%s)", }) conds = append(conds, "(b.object_type = 'PROCEDURE' OR b.object_type = 'FUNCTION' OR b.object_type = 'PACKAGE')") qstr += " WHERE " + strings.Join(conds, " AND ") qstr += ` ORDER BY procedure_schem, procedure_name, procedure_type` rows, closeRows, err := r.Query(qstr, vals...) if err != nil { if err == sql.ErrNoRows { return metadata.NewFunctionSet([]metadata.Function{}), nil } return nil, err } defer closeRows() results := []metadata.Function{} for rows.Next() { rec := metadata.Function{} err = rows.Scan( &rec.SpecificName, &rec.Schema, &rec.Name, &rec.Type, ) if err != nil { return nil, err } results = append(results, rec) } if rows.Err() != nil { return nil, rows.Err() } return metadata.NewFunctionSet(results), nil } func (r metaReader) FunctionColumns(f metadata.Filter) (*metadata.FunctionColumnSet, error) { qstr := `SELECT a.owner as procedure_schem, decode (b.object_type,'PACKAGE',CONCAT(CONCAT(b.object_name,'.'),a.object_name), b.object_name) as procedure_name, decode(a.position,0,'RETURN_VALUE',a.argument_name) as column_name, a.position as ordinal_position, decode(a.position,0,5,decode(a.in_out,'IN',1,'IN/OUT',2,'OUT',4)) as column_type, a.data_type as type_name, COALESCE(a.data_length, a.data_precision, 0) as column_size, COALESCE(a.data_scale, 0) as decimal_digits, COALESCE(a.radix, 0) as num_prec_radix FROM all_objects b JOIN all_arguments a ON b.object_id = a.object_id AND a.data_level = 0 ` conds, vals := r.conditions(f, formats{ schema: "a.owner LIKE %s", notSchemas: "a.owner NOT IN (%s)", parent: "b.object_name LIKE :%d", }) conds = append(conds, "b.object_type = 'PROCEDURE' OR b.object_type = 'FUNCTION'") qstr += " WHERE " + strings.Join(conds, " AND ") qstr += ` ORDER BY procedure_schem, procedure_name, ordinal_position` rows, closeRows, err := r.Query(qstr, vals...) if err != nil { if err == sql.ErrNoRows { return metadata.NewFunctionColumnSet([]metadata.FunctionColumn{}), nil } return nil, err } defer closeRows() results := []metadata.FunctionColumn{} for rows.Next() { rec := metadata.FunctionColumn{} err = rows.Scan( &rec.Schema, &rec.FunctionName, &rec.Name, &rec.OrdinalPosition, &rec.Type, &rec.DataType, &rec.ColumnSize, &rec.DecimalDigits, &rec.NumPrecRadix, ) if err != nil { return nil, err } results = append(results, rec) } if rows.Err() != nil { return nil, rows.Err() } return metadata.NewFunctionColumnSet(results), nil } func (r metaReader) Indexes(f metadata.Filter) (*metadata.IndexSet, error) { qstr := `SELECT o.owner, o.table_name, o.index_name, decode(o.uniqueness,'UNIQUE','NO','YES') FROM all_indexes o ` conds, vals := r.conditions(f, formats{ schema: "o.owner LIKE %s", notSchemas: "o.owner NOT IN (%s)", parent: "o.table_name LIKE :%d", name: "o.index_name LIKE :%d", }) if len(conds) != 0 { qstr += " WHERE " + strings.Join(conds, " AND ") } qstr += ` ORDER BY o.owner, o.table_name, o.index_name` rows, closeRows, err := r.Query(qstr, vals...) if err != nil { if err == sql.ErrNoRows { return metadata.NewIndexSet([]metadata.Index{}), nil } return nil, err } defer closeRows() results := []metadata.Index{} for rows.Next() { rec := metadata.Index{} err = rows.Scan(&rec.Schema, &rec.Table, &rec.Name, &rec.IsUnique) if err != nil { return nil, err } results = append(results, rec) } if rows.Err() != nil { return nil, rows.Err() } return metadata.NewIndexSet(results), nil } func (r metaReader) IndexColumns(f metadata.Filter) (*metadata.IndexColumnSet, error) { qstr := `SELECT o.owner, o.table_name, o.index_name, b.column_name, b.column_position FROM all_indexes o JOIN all_ind_columns b ON o.owner = b.index_owner AND o.index_name = b.index_name ` conds, vals := r.conditions(f, formats{ schema: "o.owner LIKE %s", notSchemas: "o.owner NOT IN (%s)", parent: "o.table_name LIKE :%d", name: "o.index_name LIKE :%d", }) if len(conds) != 0 { qstr += " WHERE " + strings.Join(conds, " AND ") } qstr += ` ORDER BY o.owner, o.table_name, o.index_name, b.column_position` rows, closeRows, err := r.Query(qstr, vals...) if err != nil { if err == sql.ErrNoRows { return metadata.NewIndexColumnSet([]metadata.IndexColumn{}), nil } return nil, err } defer closeRows() results := []metadata.IndexColumn{} for rows.Next() { rec := metadata.IndexColumn{} err = rows.Scan(&rec.Schema, &rec.Table, &rec.IndexName, &rec.Name, &rec.OrdinalPosition) if err != nil { return nil, err } results = append(results, rec) } if rows.Err() != nil { return nil, rows.Err() } return metadata.NewIndexColumnSet(results), nil } func (r metaReader) conditions(filter metadata.Filter, formats formats) ([]string, []interface{}) { baseParam := 1 conds := []string{} vals := []interface{}{} if filter.Schema != "" && formats.schema != "" { vals = append(vals, strings.ToUpper(filter.Schema)) conds = append(conds, fmt.Sprintf(formats.schema, fmt.Sprintf(":%d", baseParam))) baseParam++ } if !filter.WithSystem && formats.notSchemas != "" { conds = append(conds, fmt.Sprintf(formats.notSchemas, r.systemSchemas)) } if filter.OnlyVisible && formats.schema != "" { conds = append(conds, fmt.Sprintf(formats.schema, "user")) } if filter.Parent != "" && formats.parent != "" { vals = append(vals, strings.ToUpper(filter.Parent)) conds = append(conds, fmt.Sprintf(formats.parent, baseParam)) baseParam++ } if filter.Name != "" && formats.name != "" { vals = append(vals, strings.ToUpper(filter.Name)) conds = append(conds, fmt.Sprintf(formats.name, baseParam)) baseParam++ } if len(filter.Types) != 0 && formats.types != "" { pholders := []string{} for _, t := range filter.Types { vals = append(vals, strings.ToUpper(t)) pholders = append(pholders, fmt.Sprintf(":%d", baseParam)) baseParam++ } if len(pholders) != 0 { conds = append(conds, fmt.Sprintf(formats.types, strings.Join(pholders, ", "))) } } return conds, vals } type formats struct { schema string notSchemas string parent string name string types string } usql-0.19.19/drivers/metadata/postgres/000077500000000000000000000000001476173253300177765ustar00rootroot00000000000000usql-0.19.19/drivers/metadata/postgres/metadata.go000066400000000000000000000331241476173253300221100ustar00rootroot00000000000000// Package postgres provides a metadata reader package postgres import ( "database/sql" "fmt" "strings" "github.com/lib/pq" "github.com/xo/usql/drivers" "github.com/xo/usql/drivers/metadata" infos "github.com/xo/usql/drivers/metadata/informationschema" ) type metaReader struct { metadata.LoggingReader limit int } var _ metadata.CatalogReader = &metaReader{} var _ metadata.TableReader = &metaReader{} var _ metadata.ColumnStatReader = &metaReader{} var _ metadata.IndexReader = &metaReader{} var _ metadata.IndexColumnReader = &metaReader{} var _ metadata.TriggerReader = &metaReader{} func NewReader() func(drivers.DB, ...metadata.ReaderOption) metadata.Reader { return func(db drivers.DB, opts ...metadata.ReaderOption) metadata.Reader { newIS := infos.New( infos.WithIndexes(false), infos.WithCustomClauses(map[infos.ClauseName]string{ infos.ColumnsColumnSize: "COALESCE(character_maximum_length, numeric_precision, datetime_precision, interval_precision, 0)", infos.FunctionColumnsColumnSize: "COALESCE(character_maximum_length, numeric_precision, datetime_precision, interval_precision, 0)", }), infos.WithSystemSchemas([]string{"pg_catalog", "pg_toast", "information_schema"}), infos.WithCurrentSchema("CURRENT_SCHEMA"), infos.WithDataTypeFormatter(dataTypeFormatter)) return metadata.NewPluginReader( newIS(db, opts...), &metaReader{ LoggingReader: metadata.NewLoggingReader(db, opts...), }, ) } } func dataTypeFormatter(col metadata.Column) string { switch col.DataType { case "bit", "character": return fmt.Sprintf("%s(%d)", col.DataType, col.ColumnSize) case "bit varying", "character varying": if col.ColumnSize != 0 { return fmt.Sprintf("%s(%d)", col.DataType, col.ColumnSize) } else { return col.DataType } case "numeric": if col.ColumnSize != 0 { return fmt.Sprintf("numeric(%d,%d)", col.ColumnSize, col.DecimalDigits) } else { return col.DataType } case "time without time zone": return fmt.Sprintf("time(%d) without time zone", col.ColumnSize) case "time with time zone": return fmt.Sprintf("time(%d) with time zone", col.ColumnSize) case "timestamp without time zone": return fmt.Sprintf("timestamp(%d) without time zone", col.ColumnSize) case "timestamp with time zone": return fmt.Sprintf("timestamp(%d) with time zone", col.ColumnSize) default: return col.DataType } } func (r *metaReader) SetLimit(l int) { r.limit = l } type Catalog struct { metadata.Catalog Owner string Encoding string Collate string Ctype string AccessPrivileges string } func (s Catalog) Values() []interface{} { return []interface{}{s.Catalog.Catalog, s.Owner, s.Encoding, s.Collate, s.Ctype, s.AccessPrivileges} } func (s Catalog) GetCatalog() metadata.Catalog { return s.Catalog } var ( catalogsColumnName = []string{"Catalog", "Owner", "Encoding", "Collate", "Ctype", "Access privileges"} ) func (r metaReader) Catalogs(metadata.Filter) (*metadata.CatalogSet, error) { qstr := `SELECT d.datname as "Name", pg_catalog.pg_get_userbyid(d.datdba) as "Owner", pg_catalog.pg_encoding_to_char(d.encoding) as "Encoding", d.datcollate as "Collate", d.datctype as "Ctype", COALESCE(pg_catalog.array_to_string(d.datacl, E'\n'),'') AS "Access privileges" FROM pg_catalog.pg_database d` rows, closeRows, err := r.query(qstr, []string{}, "1") if err != nil { return nil, err } defer closeRows() var results []metadata.Result for rows.Next() { rec := Catalog{ Catalog: metadata.Catalog{}, } err = rows.Scan(&rec.Catalog.Catalog, &rec.Owner, &rec.Encoding, &rec.Collate, &rec.Ctype, &rec.AccessPrivileges) if err != nil { return nil, err } results = append(results, &rec) } if rows.Err() != nil { return nil, rows.Err() } return metadata.NewCatalogSetWithColumns(results, catalogsColumnName), nil } func (r metaReader) Tables(f metadata.Filter) (*metadata.TableSet, error) { qstr := `SELECT n.nspname as "Schema", c.relname as "Name", CASE c.relkind WHEN 'r' THEN 'table' WHEN 'v' THEN 'view' WHEN 'm' THEN 'materialized view' WHEN 'i' THEN 'index' WHEN 'S' THEN 'sequence' WHEN 's' THEN 'special' WHEN 'f' THEN 'foreign table' WHEN 'p' THEN 'partitioned table' WHEN 'I' THEN 'partitioned index' ELSE 'unknown' END as "Type", COALESCE((c.reltuples / NULLIF(c.relpages, 0)) * (pg_catalog.pg_relation_size(c.oid) / current_setting('block_size')::int), 0)::bigint as "Rows", pg_catalog.pg_size_pretty(pg_catalog.pg_table_size(c.oid)) as "Size", COALESCE(pg_catalog.obj_description(c.oid, 'pg_class'), '') as "Description" FROM pg_catalog.pg_class c LEFT JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace ` conds := []string{"n.nspname !~ '^pg_toast' AND c.relkind != 'c'"} vals := []interface{}{} if f.OnlyVisible { conds = append(conds, "pg_catalog.pg_table_is_visible(c.oid)") } if !f.WithSystem { conds = append(conds, "n.nspname NOT IN ('pg_catalog', 'information_schema')") } if f.Schema != "" { vals = append(vals, f.Schema) conds = append(conds, fmt.Sprintf("n.nspname LIKE $%d", len(vals))) } if f.Name != "" { vals = append(vals, f.Name) conds = append(conds, fmt.Sprintf("c.relname LIKE $%d", len(vals))) } if len(f.Types) != 0 { tableTypes := map[string][]rune{ "TABLE": {'r', 'p', 's', 'f'}, "VIEW": {'v'}, "MATERIALIZED VIEW": {'m'}, "SEQUENCE": {'S'}, } pholders := []string{"''"} for _, t := range f.Types { for _, k := range tableTypes[t] { vals = append(vals, string(k)) pholders = append(pholders, fmt.Sprintf("$%d", len(vals))) } } conds = append(conds, fmt.Sprintf("c.relkind IN (%s)", strings.Join(pholders, ", "))) } rows, closeRows, err := r.query(qstr, conds, "1, 3, 2", vals...) if err != nil { if err == sql.ErrNoRows { return metadata.NewTableSet([]metadata.Table{}), nil } return nil, err } defer closeRows() results := []metadata.Table{} for rows.Next() { rec := metadata.Table{} err = rows.Scan(&rec.Schema, &rec.Name, &rec.Type, &rec.Rows, &rec.Size, &rec.Comment) if err != nil { return nil, err } results = append(results, rec) } if rows.Err() != nil { return nil, rows.Err() } return metadata.NewTableSet(results), nil } func (r metaReader) ColumnStats(f metadata.Filter) (*metadata.ColumnStatSet, error) { tables, err := r.Tables(metadata.Filter{Schema: f.Schema, Name: f.Parent, WithSystem: true}) if err != nil { return nil, err } rowNum := int64(0) if tables.Next() { rowNum = tables.Get().Rows } qstr := ` SELECT n.nspname, c.relname, a.attname, COALESCE(s.avg_width, 0), COALESCE(s.null_frac, 0.0), COALESCE(CASE WHEN n_distinct >= 0 THEN n_distinct ELSE (-n_distinct * $1) END::bigint, 0) AS n_distinct, COALESCE((histogram_bounds::text::text[])[1], ''), COALESCE((histogram_bounds::text::text[])[array_length(histogram_bounds::text::text[], 1)], ''), most_common_vals::text::text[], most_common_freqs::text::text[] FROM pg_catalog.pg_namespace n JOIN pg_catalog.pg_class c ON c.relnamespace = n.oid JOIN pg_catalog.pg_attribute a ON a.attrelid = c.oid AND a.attnum > 0 LEFT JOIN pg_catalog.pg_stats s ON n.nspname = s.schemaname AND c.relname = s.tablename AND a.attname = s.attname ` conds := []string{} vals := []interface{}{rowNum} if f.Schema != "" { vals = append(vals, f.Schema) conds = append(conds, fmt.Sprintf("n.nspname LIKE $%d", len(vals))) } if f.Parent != "" { vals = append(vals, f.Parent) conds = append(conds, fmt.Sprintf("c.relname LIKE $%d", len(vals))) } if f.Name != "" { vals = append(vals, f.Name) conds = append(conds, fmt.Sprintf("a.attname LIKE $%d", len(vals))) } rows, closeRows, err := r.query(qstr, conds, "a.attnum", vals...) if err != nil { return nil, err } defer closeRows() results := []metadata.ColumnStat{} for rows.Next() { rec := metadata.ColumnStat{} err = rows.Scan( &rec.Schema, &rec.Table, &rec.Name, &rec.AvgWidth, &rec.NullFrac, &rec.NumDistinct, &rec.Min, &rec.Max, pq.Array(&rec.TopN), pq.Array(&rec.TopNFreqs), ) if err != nil { return nil, err } results = append(results, rec) } if rows.Err() != nil { return nil, rows.Err() } return metadata.NewColumnStatSet(results), nil } func (r metaReader) Indexes(f metadata.Filter) (*metadata.IndexSet, error) { qstr := ` SELECT 'postgres' as "Catalog", n.nspname as "Schema", c2.relname as "Table", c.relname as "Name", CASE i.indisprimary WHEN TRUE THEN 'YES' ELSE 'NO' END, CASE i.indisunique WHEN TRUE THEN 'YES' ELSE 'NO' END, COALESCE(am.amname, CASE c.relkind WHEN 'i' THEN 'index' WHEN 'I' THEN 'partitioned index' END ) as "Type" FROM pg_catalog.pg_class c LEFT JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace LEFT JOIN pg_catalog.pg_index i ON i.indexrelid = c.oid LEFT JOIN pg_catalog.pg_class c2 ON i.indrelid = c2.oid LEFT JOIN pg_am am ON am.oid=c.relam` conds := []string{ "c.relkind IN ('i','I','')", "n.nspname !~ '^pg_toast'", } if f.OnlyVisible { conds = append(conds, "pg_catalog.pg_table_is_visible(c.oid)") } vals := []interface{}{} if !f.WithSystem { conds = append(conds, "n.nspname NOT IN ('pg_catalog', 'information_schema')") } if f.Schema != "" { vals = append(vals, f.Schema) conds = append(conds, fmt.Sprintf("n.nspname LIKE $%d", len(vals))) } if f.Parent != "" { vals = append(vals, f.Parent) conds = append(conds, fmt.Sprintf("c2.relname LIKE $%d", len(vals))) } if f.Name != "" { vals = append(vals, f.Name) conds = append(conds, fmt.Sprintf("c.relname LIKE $%d", len(vals))) } rows, closeRows, err := r.query(qstr, conds, "1, 2, 4", vals...) if err != nil { return nil, err } defer closeRows() results := []metadata.Index{} for rows.Next() { rec := metadata.Index{} err = rows.Scan(&rec.Catalog, &rec.Schema, &rec.Table, &rec.Name, &rec.IsUnique, &rec.IsPrimary, &rec.Type) if err != nil { return nil, err } results = append(results, rec) } if rows.Err() != nil { return nil, rows.Err() } return metadata.NewIndexSet(results), nil } func (r metaReader) IndexColumns(f metadata.Filter) (*metadata.IndexColumnSet, error) { qstr := ` SELECT 'postgres' as "Catalog", n.nspname as "Schema", c2.relname as "Table", c.relname as "IndexName", a.attname AS "Name", pg_catalog.format_type(a.atttypid, a.atttypmod) AS "DataType", a.attnum AS "OrdinalPosition" FROM pg_catalog.pg_class c JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace JOIN pg_catalog.pg_index i ON i.indexrelid = c.oid JOIN pg_catalog.pg_class c2 ON i.indrelid = c2.oid JOIN pg_catalog.pg_attribute a ON c.oid = a.attrelid ` conds := []string{ "c.relkind IN ('i','I','')", "n.nspname <> 'pg_catalog'", "n.nspname <> 'information_schema'", "n.nspname !~ '^pg_toast'", "a.attnum > 0", "NOT a.attisdropped", } if f.OnlyVisible { conds = append(conds, "pg_catalog.pg_table_is_visible(c.oid)") } vals := []interface{}{} if !f.WithSystem { conds = append(conds, "n.nspname NOT IN ('pg_catalog', 'pg_toast', 'information_schema')") } if f.Schema != "" { vals = append(vals, f.Schema) conds = append(conds, fmt.Sprintf("n.nspname LIKE $%d", len(vals))) } if f.Parent != "" { vals = append(vals, f.Parent) conds = append(conds, fmt.Sprintf("c2.relname LIKE $%d", len(vals))) } if f.Name != "" { vals = append(vals, f.Name) conds = append(conds, fmt.Sprintf("c.relname LIKE $%d", len(vals))) } rows, closeRows, err := r.query(qstr, conds, "1, 2, 3, 4, 7", vals...) if err != nil { return nil, err } defer closeRows() results := []metadata.IndexColumn{} for rows.Next() { rec := metadata.IndexColumn{} err = rows.Scan(&rec.Catalog, &rec.Schema, &rec.Table, &rec.IndexName, &rec.Name, &rec.DataType, &rec.OrdinalPosition) if err != nil { return nil, err } results = append(results, rec) } if rows.Err() != nil { return nil, rows.Err() } return metadata.NewIndexColumnSet(results), nil } func (r metaReader) Triggers(f metadata.Filter) (*metadata.TriggerSet, error) { qstr := `SELECT n.nspname, c.relname, t.tgname, pg_catalog.pg_get_triggerdef(t.oid, true) FROM pg_catalog.pg_trigger t JOIN pg_catalog.pg_class c ON c.oid = t.tgrelid LEFT JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace` conds := []string{`( NOT t.tgisinternal OR (t.tgisinternal AND t.tgenabled = 'D') OR EXISTS (SELECT 1 FROM pg_catalog.pg_depend WHERE objid = t.oid AND refclassid = 'pg_catalog.pg_trigger'::pg_catalog.regclass) )`} vals := []interface{}{} if f.Schema != "" { vals = append(vals, f.Schema) conds = append(conds, fmt.Sprintf("n.nspname LIKE $%d", len(vals))) } if f.Parent != "" { vals = append(vals, f.Parent) conds = append(conds, fmt.Sprintf("c.relname LIKE $%d", len(vals))) } if f.Name != "" { vals = append(vals, f.Name) conds = append(conds, fmt.Sprintf("t.tgname LIKE $%d", len(vals))) } rows, closeRows, err := r.query(qstr, conds, "t.tgname", vals...) if err != nil { return nil, err } defer closeRows() results := []metadata.Trigger{} for rows.Next() { rec := metadata.Trigger{} err = rows.Scan( &rec.Schema, &rec.Table, &rec.Name, &rec.Definition, ) if err != nil { return nil, err } results = append(results, rec) } if rows.Err() != nil { return nil, rows.Err() } return metadata.NewTriggerSet(results), nil } func (r metaReader) query(qstr string, conds []string, order string, vals ...interface{}) (*sql.Rows, func(), error) { if len(conds) != 0 { qstr += "\nWHERE " + strings.Join(conds, " AND ") } if order != "" { qstr += "\nORDER BY " + order } if r.limit != 0 { qstr += fmt.Sprintf("\nLIMIT %d", r.limit) } return r.Query(qstr, vals...) } usql-0.19.19/drivers/metadata/postgres/metadata_test.go000066400000000000000000000213531476173253300231500ustar00rootroot00000000000000package postgres_test import ( "database/sql" "flag" "fmt" "log" "os" "strings" "testing" dt "github.com/ory/dockertest/v3" dc "github.com/ory/dockertest/v3/docker" "github.com/xo/usql/drivers/metadata" "github.com/xo/usql/drivers/metadata/postgres" _ "github.com/xo/usql/drivers/postgres" ) type Database struct { BuildArgs []dc.BuildArg RunOptions *dt.RunOptions Exec []string Driver string URL string DockerPort string Resource *dt.Resource DB *sql.DB Opts []metadata.ReaderOption Reader metadata.BasicReader } var dbName string = "postgres" var db = Database{ BuildArgs: []dc.BuildArg{ {Name: "BASE_IMAGE", Value: "postgres:13"}, {Name: "SCHEMA_URL", Value: "https://raw.githubusercontent.com/jOOQ/sakila/main/postgres-sakila-db/postgres-sakila-schema.sql"}, {Name: "TARGET", Value: "/docker-entrypoint-initdb.d"}, {Name: "USER", Value: "root"}, }, RunOptions: &dt.RunOptions{ Name: "usql-pgsql", Cmd: []string{"-c", "log_statement=all", "-c", "log_min_duration_statement=0"}, Env: []string{"POSTGRES_PASSWORD=pw"}, }, Driver: "postgres", URL: "postgres://postgres:pw@localhost:%s/postgres?sslmode=disable", DockerPort: "5432/tcp", } func TestMain(m *testing.M) { cleanup := true flag.BoolVar(&cleanup, "cleanup", true, "delete containers when finished") flag.Parse() pool, err := dt.NewPool("") if err != nil { log.Fatalf("Could not connect to docker: %s", err) } var ok bool db.Resource, ok = pool.ContainerByName(db.RunOptions.Name) if !ok { buildOpts := &dt.BuildOptions{ ContextDir: "../../testdata/docker", BuildArgs: db.BuildArgs, } db.Resource, err = pool.BuildAndRunWithBuildOptions(buildOpts, db.RunOptions) if err != nil { log.Fatal("Could not start resource: ", err) } } // exponential backoff-retry, because the application in the container might not be ready to accept connections yet if err := pool.Retry(func() error { hostPort := db.Resource.GetPort(db.DockerPort) var err error db.DB, err = sql.Open(db.Driver, fmt.Sprintf(db.URL, hostPort)) if err != nil { return err } return db.DB.Ping() }); err != nil { log.Fatal("Timed out waiting for db: ", err) } db.Reader = postgres.NewReader()(db.DB).(metadata.BasicReader) if len(db.Exec) != 0 { exitCode, err := db.Resource.Exec(db.Exec, dt.ExecOptions{ StdIn: os.Stdin, StdOut: os.Stdout, StdErr: os.Stderr, TTY: true, }) if err != nil || exitCode != 0 { log.Fatal("Could not load schema: ", err) } } code := m.Run() // You can't defer this because os.Exit doesn't care for defer if cleanup { if err := pool.Purge(db.Resource); err != nil { log.Fatal("Could not purge resource: ", err) } } os.Exit(code) } func TestTriggers(t *testing.T) { schema := "public" expected := "film_fulltext_trigger, last_updated" parent := "film" r := postgres.NewReader()(db.DB).(metadata.TriggerReader) result, err := r.Triggers(metadata.Filter{Schema: schema, Parent: parent}) if err != nil { log.Fatalf("Could not read %s triggers: %v", dbName, err) } names := []string{} for result.Next() { names = append(names, result.Get().Name) } actual := strings.Join(names, ", ") if actual != expected { t.Errorf("Wrong %s trigger names, expected:\n %v\ngot:\n %v", dbName, expected, names) } } func TestColumns(t *testing.T) { // Only testing postgres specific datatype formatting. // The rest of the functionality is covered by informationschema/metadata_test.go:TestColumns type test struct { typeDef string want string } schema := "public" table := "test_dtypes" tests := []test{ {typeDef: "bit", want: "bit(1)"}, {typeDef: "bit(1)", want: "bit(1)"}, {typeDef: "bit varying", want: "bit varying"}, {typeDef: "bit varying(2)", want: "bit varying(2)"}, {typeDef: "character", want: "character(1)"}, {typeDef: "character(3)", want: "character(3)"}, {typeDef: "character varying", want: "character varying"}, {typeDef: "character varying(4)", want: "character varying(4)"}, {typeDef: "numeric", want: "numeric"}, {typeDef: "numeric(1,0)", want: "numeric(1,0)"}, {typeDef: "time", want: "time(6) without time zone"}, {typeDef: "time(4)", want: "time(4) without time zone"}, {typeDef: "time(6)", want: "time(6) without time zone"}, {typeDef: "time with time zone", want: "time(6) with time zone"}, {typeDef: "time(3) with time zone", want: "time(3) with time zone"}, {typeDef: "timestamp", want: "timestamp(6) without time zone"}, {typeDef: "timestamp(2)", want: "timestamp(2) without time zone"}, {typeDef: "timestamp with time zone", want: "timestamp(6) with time zone"}, {typeDef: "timestamp(1) with time zone", want: "timestamp(1) with time zone"}, {typeDef: "bigint", want: "bigint"}, {typeDef: "bigserial", want: "bigint"}, {typeDef: "boolean", want: "boolean"}, {typeDef: "box", want: "box"}, {typeDef: "bytea", want: "bytea"}, {typeDef: "cidr", want: "cidr"}, {typeDef: "circle", want: "circle"}, {typeDef: "date", want: "date"}, {typeDef: "double precision", want: "double precision"}, {typeDef: "inet", want: "inet"}, {typeDef: "integer", want: "integer"}, {typeDef: "json", want: "json"}, {typeDef: "jsonb", want: "jsonb"}, {typeDef: "line", want: "line"}, {typeDef: "lseg", want: "lseg"}, {typeDef: "macaddr", want: "macaddr"}, {typeDef: "macaddr8", want: "macaddr8"}, {typeDef: "money", want: "money"}, {typeDef: "path", want: "path"}, {typeDef: "pg_lsn", want: "pg_lsn"}, {typeDef: "pg_snapshot", want: "pg_snapshot"}, {typeDef: "point", want: "point"}, {typeDef: "polygon", want: "polygon"}, {typeDef: "real", want: "real"}, {typeDef: "smallint", want: "smallint"}, {typeDef: "smallserial", want: "smallint"}, {typeDef: "serial", want: "integer"}, {typeDef: "text", want: "text"}, {typeDef: "tsvector", want: "tsvector"}, {typeDef: "txid_snapshot", want: "txid_snapshot"}, {typeDef: "uuid", want: "uuid"}, {typeDef: "xml", want: "xml"}, } // Create table colExpressions := []string{} for i, test := range tests { colExpressions = append(colExpressions, fmt.Sprintf("column_%d %s", i, test.typeDef)) } query := fmt.Sprintf("CREATE TABLE %s.%s (%s)", schema, table, strings.Join(colExpressions, ", ")) db.DB.Exec(query) defer db.DB.Exec(fmt.Sprintf("DROP TABLE %s.%s", schema, table)) // Read data types r := postgres.NewReader()(db.DB).(metadata.ColumnReader) result, err := r.Columns(metadata.Filter{Schema: schema, Parent: table}) if err != nil { log.Fatalf("Could not read %s columns: %v", dbName, err) } actualTypes := []string{} for result.Next() { actualTypes = append(actualTypes, result.Get().DataType) } // Compare for i, test := range tests { if actualTypes[i] != test.want { t.Errorf("Wrong %s column data type, expected:\n %s, got:\n %s", dbName, test.want, actualTypes[i]) } } } func TestIndexes(t *testing.T) { schema := "public" table := "tmp_table" tests := []struct { indexType string want string }{ { indexType: "btree", want: "btree", }, { indexType: "hash", want: "hash", }, } columns := []string{} for _, v := range tests { columns = append(columns, fmt.Sprintf("column_%s integer", v.indexType)) } indexes := []string{} for _, v := range tests { indexes = append(indexes, fmt.Sprintf("CREATE INDEX %s_index ON %s.%s USING %[1]s (column_%[1]s)", v.indexType, schema, table)) } query := ` CREATE TABLE %s.%s (%s); -- Indexes creation sql %s ` db.DB.Exec(fmt.Sprintf(query, schema, table, strings.Join(columns, ", "), strings.Join(indexes, ";"))) defer db.DB.Exec(fmt.Sprintf("DROP TABLE %s.%s", schema, table)) r := postgres.NewReader()(db.DB).(metadata.IndexReader) t.Run("Get info about access method for specyfic index.", func(t *testing.T) { accessMethods := []string{} for _, v := range tests { result, err := r.Indexes(metadata.Filter{Name: fmt.Sprintf("%s_index", v.indexType)}) if err != nil { log.Fatalf("Could not get Index informatin: %s", err) } for result.Next() { accessMethods = append(accessMethods, result.Get().Type) } } for i, test := range tests { if accessMethods[i] != test.want { t.Errorf("Wrong %s index access method, expected:\n %s, got:\n %s", dbName, test.want, accessMethods[i]) } } }) t.Run("Get info about index access method for all table indexes.", func(t *testing.T) { result, err := r.Indexes(metadata.Filter{Schema: schema, Parent: table}) if err != nil { log.Fatalf("Could not get Index informatin: %s", err) } accessMethods := []string{} for result.Next() { accessMethods = append(accessMethods, result.Get().Type) } for i, test := range tests { if accessMethods[i] != test.want { t.Errorf("Wrong %s index access method, expected:\n %s, got:\n %s", dbName, test.want, accessMethods[i]) } } }) } usql-0.19.19/drivers/metadata/reader.go000066400000000000000000000144351476173253300177300ustar00rootroot00000000000000package metadata import ( "context" "database/sql" "time" "github.com/xo/usql/text" ) // PluginReader allows to be easily composed from other readers type PluginReader struct { catalogs func(Filter) (*CatalogSet, error) schemas func(Filter) (*SchemaSet, error) tables func(Filter) (*TableSet, error) columns func(Filter) (*ColumnSet, error) columnStats func(Filter) (*ColumnStatSet, error) indexes func(Filter) (*IndexSet, error) indexColumns func(Filter) (*IndexColumnSet, error) triggers func(Filter) (*TriggerSet, error) constraints func(Filter) (*ConstraintSet, error) constraintColumns func(Filter) (*ConstraintColumnSet, error) functions func(Filter) (*FunctionSet, error) functionColumns func(Filter) (*FunctionColumnSet, error) sequences func(Filter) (*SequenceSet, error) privilegeSummaries func(Filter) (*PrivilegeSummarySet, error) } var _ ExtendedReader = &PluginReader{} // NewPluginReader allows to be easily composed from other readers func NewPluginReader(readers ...Reader) Reader { p := PluginReader{} for _, i := range readers { if r, ok := i.(CatalogReader); ok { p.catalogs = r.Catalogs } if r, ok := i.(SchemaReader); ok { p.schemas = r.Schemas } if r, ok := i.(TableReader); ok { p.tables = r.Tables } if r, ok := i.(ColumnReader); ok { p.columns = r.Columns } if r, ok := i.(ColumnStatReader); ok { p.columnStats = r.ColumnStats } if r, ok := i.(IndexReader); ok { p.indexes = r.Indexes } if r, ok := i.(IndexColumnReader); ok { p.indexColumns = r.IndexColumns } if r, ok := i.(TriggerReader); ok { p.triggers = r.Triggers } if r, ok := i.(ConstraintReader); ok { p.constraints = r.Constraints } if r, ok := i.(ConstraintColumnReader); ok { p.constraintColumns = r.ConstraintColumns } if r, ok := i.(FunctionReader); ok { p.functions = r.Functions } if r, ok := i.(FunctionColumnReader); ok { p.functionColumns = r.FunctionColumns } if r, ok := i.(SequenceReader); ok { p.sequences = r.Sequences } if r, ok := i.(PrivilegeSummaryReader); ok { p.privilegeSummaries = r.PrivilegeSummaries } } return &p } func (p PluginReader) Catalogs(f Filter) (*CatalogSet, error) { if p.catalogs == nil { return nil, text.ErrNotSupported } return p.catalogs(f) } func (p PluginReader) Schemas(f Filter) (*SchemaSet, error) { if p.schemas == nil { return nil, text.ErrNotSupported } return p.schemas(f) } func (p PluginReader) Tables(f Filter) (*TableSet, error) { if p.tables == nil { return nil, text.ErrNotSupported } return p.tables(f) } func (p PluginReader) Columns(f Filter) (*ColumnSet, error) { if p.columns == nil { return nil, text.ErrNotSupported } return p.columns(f) } func (p PluginReader) ColumnStats(f Filter) (*ColumnStatSet, error) { if p.columnStats == nil { return nil, text.ErrNotSupported } return p.columnStats(f) } func (p PluginReader) Indexes(f Filter) (*IndexSet, error) { if p.indexes == nil { return nil, text.ErrNotSupported } return p.indexes(f) } func (p PluginReader) IndexColumns(f Filter) (*IndexColumnSet, error) { if p.indexColumns == nil { return nil, text.ErrNotSupported } return p.indexColumns(f) } func (p PluginReader) Triggers(f Filter) (*TriggerSet, error) { if p.triggers == nil { return nil, text.ErrNotSupported } return p.triggers(f) } func (p PluginReader) Constraints(f Filter) (*ConstraintSet, error) { if p.constraints == nil { return nil, text.ErrNotSupported } return p.constraints(f) } func (p PluginReader) ConstraintColumns(f Filter) (*ConstraintColumnSet, error) { if p.constraintColumns == nil { return nil, text.ErrNotSupported } return p.constraintColumns(f) } func (p PluginReader) Functions(f Filter) (*FunctionSet, error) { if p.functions == nil { return nil, text.ErrNotSupported } return p.functions(f) } func (p PluginReader) FunctionColumns(f Filter) (*FunctionColumnSet, error) { if p.functionColumns == nil { return nil, text.ErrNotSupported } return p.functionColumns(f) } func (p PluginReader) Sequences(f Filter) (*SequenceSet, error) { if p.sequences == nil { return nil, text.ErrNotSupported } return p.sequences(f) } func (p PluginReader) PrivilegeSummaries(f Filter) (*PrivilegeSummarySet, error) { if p.privilegeSummaries == nil { return nil, text.ErrNotSupported } return p.privilegeSummaries(f) } type LoggingReader struct { db DB logger logger dryRun bool timeout time.Duration } type logger interface { Println(...interface{}) } func NewLoggingReader(db DB, opts ...ReaderOption) LoggingReader { r := LoggingReader{ db: db, } for _, o := range opts { o(&r) } return r } // ReaderOption to configure the reader type ReaderOption func(Reader) // WithLogger used to log queries before executing them func WithLogger(l logger) ReaderOption { return func(r Reader) { r.(loggerSetter).setLogger(l) } } // WithDryRun allows to avoid running any queries func WithDryRun(d bool) ReaderOption { return func(r Reader) { r.(loggerSetter).setDryRun(d) } } // WithTimeout for a single query func WithTimeout(t time.Duration) ReaderOption { return func(r Reader) { r.(loggerSetter).setTimeout(t) } } // WithLimit for a single query, if the reader supports it func WithLimit(l int) ReaderOption { return func(r Reader) { if rl, ok := r.(limiter); ok { rl.SetLimit(l) } } } type loggerSetter interface { setLogger(logger) setDryRun(bool) setTimeout(t time.Duration) } type limiter interface { SetLimit(l int) } func (r *LoggingReader) setLogger(l logger) { r.logger = l } func (r *LoggingReader) setDryRun(d bool) { r.dryRun = d } func (r *LoggingReader) setTimeout(t time.Duration) { r.timeout = t } func (r LoggingReader) Query(q string, v ...interface{}) (*sql.Rows, CloseFunc, error) { if r.logger != nil { r.logger.Println(q) r.logger.Println(v) } if r.dryRun { return nil, nil, sql.ErrNoRows } if r.timeout != 0 { ctx, cancel := context.WithTimeout(context.Background(), r.timeout) rows, err := r.db.QueryContext(ctx, q, v...) return rows, func() { cancel(); rows.Close() }, err } rows, err := r.db.Query(q, v...) return rows, func() { rows.Close() }, err } // CloseFunc should be called when result wont be processed anymore type CloseFunc func() usql-0.19.19/drivers/metadata/writer.go000066400000000000000000000537611476173253300200070ustar00rootroot00000000000000package metadata import ( "context" "database/sql" "fmt" "io" "strings" "github.com/xo/dburl" "github.com/xo/tblfmt" "github.com/xo/usql/env" "github.com/xo/usql/text" ) // DB is the common interface for database operations, compatible with // database/sql.DB and database/sql.Tx. type DB interface { Exec(string, ...interface{}) (sql.Result, error) ExecContext(context.Context, string, ...interface{}) (sql.Result, error) Query(string, ...interface{}) (*sql.Rows, error) QueryContext(context.Context, string, ...interface{}) (*sql.Rows, error) QueryRow(string, ...interface{}) *sql.Row QueryRowContext(context.Context, string, ...interface{}) *sql.Row Prepare(string) (*sql.Stmt, error) PrepareContext(context.Context, string) (*sql.Stmt, error) } // DefaultWriter using an existing db introspector type DefaultWriter struct { r Reader db DB w io.Writer tableTypes map[rune][]string funcTypes map[rune][]string systemSchemas map[string]struct{} // custom functions for easier overloading listAllDbs func(string, bool) error } func NewDefaultWriter(r Reader, opts ...WriterOption) func(db DB, w io.Writer) Writer { defaultWriter := &DefaultWriter{ r: r, tableTypes: map[rune][]string{ 't': {"TABLE", "BASE TABLE", "SYSTEM TABLE", "SYNONYM", "LOCAL TEMPORARY", "GLOBAL TEMPORARY"}, 'v': {"VIEW", "SYSTEM VIEW"}, 'm': {"MATERIALIZED VIEW"}, 's': {"SEQUENCE"}, }, funcTypes: map[rune][]string{ 'a': {"AGGREGATE"}, 'n': {"FUNCTION"}, 'p': {"PROCEDURE", "PACKAGE"}, 't': {"TRIGGER"}, 'w': {"WINDOW"}, }, systemSchemas: map[string]struct{}{ "information_schema": {}, }, } for _, o := range opts { o(defaultWriter) } return func(db DB, w io.Writer) Writer { defaultWriter.db = db defaultWriter.w = w return defaultWriter } } // WriterOption to configure the DefaultWriter type WriterOption func(*DefaultWriter) // WithSystemSchemas that are ignored unless showSystem is true func WithSystemSchemas(schemas []string) WriterOption { return func(w *DefaultWriter) { w.systemSchemas = make(map[string]struct{}, len(schemas)) for _, s := range schemas { w.systemSchemas[s] = struct{}{} } } } // WithListAllDbs that lists all catalogs func WithListAllDbs(f func(string, bool) error) WriterOption { return func(w *DefaultWriter) { w.listAllDbs = f } } // DescribeFunctions matching pattern func (w DefaultWriter) DescribeFunctions(u *dburl.URL, funcTypes, pattern string, verbose, showSystem bool) error { r, ok := w.r.(FunctionReader) if !ok { return fmt.Errorf(text.NotSupportedByDriver, `\df`, u.Driver) } types := []string{} for k, v := range w.funcTypes { if strings.ContainsRune(funcTypes, k) { types = append(types, v...) } } sp, tp, err := parsePattern(pattern) if err != nil { return fmt.Errorf("failed to parse search pattern: %w", err) } res, err := r.Functions(Filter{Schema: sp, Name: tp, Types: types, WithSystem: showSystem}) if err != nil { return fmt.Errorf("failed to list functions: %w", err) } defer res.Close() if !showSystem { // in case the reader doesn't implement WithSystem res.SetFilter(func(r Result) bool { _, ok := w.systemSchemas[r.(*Function).Schema] return !ok }) } if _, ok := w.r.(FunctionColumnReader); ok { for res.Next() { f := res.Get() f.ArgTypes, err = w.getFunctionColumns(f.Catalog, f.Schema, f.SpecificName) if err != nil { return fmt.Errorf("failed to get columns of function %s.%s: %w", f.Schema, f.SpecificName, err) } } res.Reset() } columns := []string{"Schema", "Name", "Result data type", "Argument data types", "Type"} if verbose { columns = append(columns, "Volatility", "Security", "Language", "Source code") } res.SetColumns(columns) res.SetScanValues(func(r Result) []interface{} { f := r.(*Function) v := []interface{}{f.Schema, f.Name, f.ResultType, f.ArgTypes, f.Type} if verbose { v = append(v, f.Volatility, f.Security, f.Language, f.Source) } return v }) params := env.Pall() params["title"] = "List of functions" return tblfmt.EncodeAll(w.w, res, params) } func (w DefaultWriter) getFunctionColumns(c, s, f string) (string, error) { r := w.r.(FunctionColumnReader) cols, err := r.FunctionColumns(Filter{Catalog: c, Schema: s, Parent: f}) if err != nil { return "", err } args := []string{} for cols.Next() { c := cols.Get() // skip result params if c.OrdinalPosition == 0 { continue } typ := "" if c.Type != "IN" && c.Type != "" { typ = c.Type + " " } name := c.Name if name != "" { name += " " } args = append(args, fmt.Sprintf("%s%s%s", typ, name, c.DataType)) } return strings.Join(args, ", "), nil } // DescribeTableDetails matching pattern func (w DefaultWriter) DescribeTableDetails(u *dburl.URL, pattern string, verbose, showSystem bool) error { sp, tp, err := parsePattern(pattern) if err != nil { return fmt.Errorf("failed to parse search pattern: %w", err) } found := 0 tr, isTR := w.r.(TableReader) _, isCR := w.r.(ColumnReader) if isTR && isCR { res, err := tr.Tables(Filter{Schema: sp, Name: tp, WithSystem: showSystem}) if err != nil { return fmt.Errorf("failed to list tables: %w", err) } defer res.Close() if !showSystem { // in case the reader doesn't implement WithSystem res.SetFilter(func(r Result) bool { _, ok := w.systemSchemas[r.(*Table).Schema] return !ok }) } for res.Next() { t := res.Get() err = w.describeTableDetails(t.Type, t.Schema, t.Name, verbose, showSystem) if err != nil { return fmt.Errorf("failed to describe %s %s.%s: %w", t.Type, t.Schema, t.Name, err) } found++ } } if _, ok := w.r.(SequenceReader); ok { foundSeq, err := w.describeSequences(sp, tp, verbose, showSystem) if err != nil { return fmt.Errorf("failed to describe sequences: %w", err) } found += foundSeq } ir, isIR := w.r.(IndexReader) _, isICR := w.r.(IndexColumnReader) if isIR && isICR { res, err := ir.Indexes(Filter{Schema: sp, Name: tp, WithSystem: showSystem}) if err != nil && err != text.ErrNotSupported { return fmt.Errorf("failed to list indexes for table %s: %w", tp, err) } if res != nil { defer res.Close() if !showSystem { // in case the reader doesn't implement WithSystem res.SetFilter(func(r Result) bool { _, ok := w.systemSchemas[r.(*Index).Schema] return !ok }) } for res.Next() { i := res.Get() err = w.describeIndex(i) if err != nil { return fmt.Errorf("failed to describe index %s from table %s.%s: %w", i.Name, i.Schema, i.Table, err) } found++ } } } if found == 0 { fmt.Fprintf(w.w, text.RelationNotFound, pattern) fmt.Fprintln(w.w) } return nil } func (w DefaultWriter) describeTableDetails(typ, sp, tp string, verbose, showSystem bool) error { r := w.r.(ColumnReader) res, err := r.Columns(Filter{Schema: sp, Parent: tp, WithSystem: showSystem}) if err != nil { return fmt.Errorf("failed to list columns for table %s: %w", tp, err) } defer res.Close() columns := []string{"Name", "Type", "Nullable", "Default"} if verbose { columns = append(columns, "Size", "Decimal Digits", "Radix", "Octet Length") } res.SetColumns(columns) res.SetScanValues(func(r Result) []interface{} { f := r.(*Column) v := []interface{}{f.Name, f.DataType, f.IsNullable, f.Default} if verbose { v = append(v, f.ColumnSize, f.DecimalDigits, f.NumPrecRadix, f.CharOctetLength) } return v }) params := env.Pall() params["title"] = fmt.Sprintf("%s %s\n", typ, qualifiedIdentifier(sp, tp)) return w.encodeWithSummary(res, params, w.tableDetailsSummary(sp, tp)) } func (w DefaultWriter) encodeWithSummary(res tblfmt.ResultSet, params map[string]string, summary func(io.Writer, int) (int, error)) error { newEnc, opts := tblfmt.FromMap(params) opts = append(opts, tblfmt.WithSummary( map[int]func(io.Writer, int) (int, error){ -1: summary, }, )) enc, err := newEnc(res, opts...) if err != nil { return err } return enc.EncodeAll(w.w) } func (w DefaultWriter) tableDetailsSummary(sp, tp string) func(io.Writer, int) (int, error) { return func(out io.Writer, _ int) (int, error) { err := w.describeTableIndexes(out, sp, tp) if err != nil { return 0, err } err = w.describeTableConstraints( out, Filter{Schema: sp, Parent: tp}, func(r Result) bool { c := r.(*Constraint) return c.Type == "CHECK" && c.CheckClause != "" && !strings.HasSuffix(c.CheckClause, " IS NOT NULL") }, "Check constraints:", func(out io.Writer, c *Constraint) error { _, err := fmt.Fprintf(out, " \"%s\" %s (%s)\n", c.Name, c.Type, c.CheckClause) return err }, ) if err != nil { return 0, err } err = w.describeTableConstraints( out, Filter{Schema: sp, Parent: tp}, func(r Result) bool { return r.(*Constraint).Type == "FOREIGN KEY" }, "Foreign-key constraints:", func(out io.Writer, c *Constraint) error { columns, foreignColumns, err := w.getConstraintColumns(c.Catalog, c.Schema, c.Table, c.Name) if err != nil { return err } _, err = fmt.Fprintf(out, " \"%s\" %s (%s) REFERENCES %s(%s) ON UPDATE %s ON DELETE %s\n", c.Name, c.Type, columns, c.ForeignTable, foreignColumns, c.UpdateRule, c.DeleteRule) return err }, ) if err != nil { return 0, err } err = w.describeTableConstraints( out, Filter{Schema: sp, Reference: tp}, func(r Result) bool { return r.(*Constraint).Type == "FOREIGN KEY" }, "Referenced by:", func(out io.Writer, c *Constraint) error { columns, foreignColumns, err := w.getConstraintColumns(c.Catalog, c.Schema, c.Table, c.Name) if err != nil { return err } _, err = fmt.Fprintf(out, " TABLE \"%s\" CONSTRAINT \"%s\" %s (%s) REFERENCES %s(%s) ON UPDATE %s ON DELETE %s\n", c.Table, c.Name, c.Type, columns, c.ForeignTable, foreignColumns, c.UpdateRule, c.DeleteRule) return err }, ) err = w.describeTableTriggers(out, sp, tp) if err != nil { return 0, err } return 0, err } } func (w DefaultWriter) describeTableTriggers(out io.Writer, sp, tp string) error { r, ok := w.r.(TriggerReader) if !ok { return nil } res, err := r.Triggers(Filter{Schema: sp, Parent: tp}) if err != nil && err != text.ErrNotSupported { return fmt.Errorf("failed to list triggers for table %s: %w", tp, err) } if res == nil { return nil } defer res.Close() if res.Len() == 0 { return nil } fmt.Fprintln(out, "Triggers:") for res.Next() { t := res.Get() fmt.Fprintf(out, " \"%s\" %s\n", t.Name, t.Definition) } return nil } func (w DefaultWriter) describeTableIndexes(out io.Writer, sp, tp string) error { r, ok := w.r.(IndexReader) if !ok { return nil } res, err := r.Indexes(Filter{Schema: sp, Parent: tp}) if err != nil && err != text.ErrNotSupported { return fmt.Errorf("failed to list indexes for table %s: %w", tp, err) } if res == nil { return nil } defer res.Close() if res.Len() == 0 { return nil } fmt.Fprintln(out, "Indexes:") for res.Next() { i := res.Get() primary := "" unique := "" if i.IsPrimary == YES { primary = "PRIMARY_KEY, " } if i.IsUnique == YES { unique = "UNIQUE, " } i.Columns, err = w.getIndexColumns(i.Catalog, i.Schema, i.Table, i.Name) if err != nil { return fmt.Errorf("failed to get columns of index %s: %w", i.Name, err) } fmt.Fprintf(out, " \"%s\" %s%s%s (%s)\n", i.Name, primary, unique, i.Type, i.Columns) } return nil } func (w DefaultWriter) getIndexColumns(c, s, t, i string) (string, error) { r := w.r.(IndexColumnReader) cols, err := r.IndexColumns(Filter{Catalog: c, Schema: s, Parent: t, Name: i}) if err != nil { return "", err } result := []string{} for cols.Next() { result = append(result, cols.Get().Name) } return strings.Join(result, ", "), nil } func (w DefaultWriter) describeTableConstraints(out io.Writer, filter Filter, postFilter func(r Result) bool, label string, printer func(io.Writer, *Constraint) error) error { r, ok := w.r.(ConstraintReader) if !ok { return nil } res, err := r.Constraints(filter) if err != nil && err != text.ErrNotSupported { return fmt.Errorf("failed to list constraints: %w", err) } if res == nil { return nil } defer res.Close() res.SetFilter(postFilter) if res.Len() == 0 { return nil } fmt.Fprintln(out, label) for res.Next() { c := res.Get() err := printer(out, c) if err != nil { return err } } return nil } func (w DefaultWriter) getConstraintColumns(c, s, t, n string) (string, string, error) { r := w.r.(ConstraintColumnReader) cols, err := r.ConstraintColumns(Filter{Catalog: c, Schema: s, Parent: t, Name: n}) if err != nil { return "", "", err } columns := []string{} foreignColumns := []string{} for cols.Next() { columns = append(columns, cols.Get().Name) foreignColumns = append(foreignColumns, cols.Get().ForeignName) } return strings.Join(columns, ", "), strings.Join(foreignColumns, ", "), nil } func (w DefaultWriter) describeSequences(sp, tp string, verbose, showSystem bool) (int, error) { r := w.r.(SequenceReader) res, err := r.Sequences(Filter{Schema: sp, Name: tp, WithSystem: showSystem}) if err != nil && err != text.ErrNotSupported { return 0, err } if res == nil { return 0, nil } defer res.Close() found := 0 for res.Next() { s := res.Get() // wrap current record into a separate recordSet rows := NewSequenceSet([]Sequence{*s}) params := env.Pall() params["footer"] = "off" params["title"] = fmt.Sprintf("Sequence \"%s.%s\"\n", s.Schema, s.Name) err = tblfmt.EncodeAll(w.w, rows, params) if err != nil { return 0, err } // TODO footer should say which table this sequence belongs to found++ } return found, nil } func (w DefaultWriter) describeIndex(i *Index) error { r := w.r.(IndexColumnReader) res, err := r.IndexColumns(Filter{Schema: i.Schema, Parent: i.Table, Name: i.Name}) if err != nil { return fmt.Errorf("failed to get index columns: %w", err) } defer res.Close() if res.Len() == 0 { return nil } res.SetColumns([]string{"Name", "Type"}) res.SetScanValues(func(r Result) []interface{} { f := r.(*IndexColumn) return []interface{}{f.Name, f.DataType} }) params := env.Pall() params["title"] = fmt.Sprintf("Index %s\n", qualifiedIdentifier(i.Schema, i.Name)) return w.encodeWithSummary(res, params, func(out io.Writer, _ int) (int, error) { primary := "" if i.IsPrimary == YES { primary = "primary key, " } _, err := fmt.Fprintf(out, "%s%s, for table %s", primary, i.Type, i.Table) return 0, err }) } // ListAllDbs matching pattern func (w DefaultWriter) ListAllDbs(u *dburl.URL, pattern string, verbose bool) error { if w.listAllDbs != nil { return w.listAllDbs(pattern, verbose) } r, ok := w.r.(CatalogReader) if !ok { return fmt.Errorf(text.NotSupportedByDriver, `\l`, u.Driver) } res, err := r.Catalogs(Filter{Name: pattern}) if err != nil { return fmt.Errorf("failed to list catalogs: %w", err) } defer res.Close() params := env.Pall() params["title"] = "List of databases" return tblfmt.EncodeAll(w.w, res, params) } // ListTables matching pattern func (w DefaultWriter) ListTables(u *dburl.URL, tableTypes, pattern string, verbose, showSystem bool) error { r, ok := w.r.(TableReader) if !ok { return fmt.Errorf(text.NotSupportedByDriver, `\dt`, u.Driver) } types := []string{} for k, v := range w.tableTypes { if strings.ContainsRune(tableTypes, k) { types = append(types, v...) } } sp, tp, err := parsePattern(pattern) if err != nil { return fmt.Errorf("failed to parse search pattern: %w", err) } res, err := r.Tables(Filter{Schema: sp, Name: tp, Types: types, WithSystem: showSystem}) if err != nil { return fmt.Errorf("failed to list tables: %w", err) } defer res.Close() if !showSystem { // in case the reader doesn't implement WithSystem res.SetFilter(func(r Result) bool { _, ok := w.systemSchemas[r.(*Table).Schema] return !ok }) } if res.Len() == 0 { fmt.Fprintf(w.w, text.RelationNotFound, pattern) fmt.Fprintln(w.w) return nil } columns := []string{"Schema", "Name", "Type"} if verbose { columns = append(columns, "Rows", "Size", "Comment") } res.SetColumns(columns) res.SetScanValues(func(r Result) []interface{} { f := r.(*Table) v := []interface{}{f.Schema, f.Name, f.Type} if verbose { v = append(v, f.Rows, f.Size, f.Comment) } return v }) params := env.Pall() params["title"] = "List of relations" return tblfmt.EncodeAll(w.w, res, params) } // ListSchemas matching pattern func (w DefaultWriter) ListSchemas(u *dburl.URL, pattern string, verbose, showSystem bool) error { r, ok := w.r.(SchemaReader) if !ok { return fmt.Errorf(text.NotSupportedByDriver, `\d`, u.Driver) } res, err := r.Schemas(Filter{Name: pattern, WithSystem: showSystem}) if err != nil { return fmt.Errorf("failed to list schemas: %w", err) } defer res.Close() if !showSystem { // in case the reader doesn't implement WithSystem res.SetFilter(func(r Result) bool { _, ok := w.systemSchemas[r.(*Schema).Schema] return !ok }) } params := env.Pall() params["title"] = "List of schemas" return tblfmt.EncodeAll(w.w, res, params) } // ListIndexes matching pattern func (w DefaultWriter) ListIndexes(u *dburl.URL, pattern string, verbose, showSystem bool) error { r, ok := w.r.(IndexReader) if !ok { return fmt.Errorf(text.NotSupportedByDriver, `\di`, u.Driver) } sp, tp, err := parsePattern(pattern) if err != nil { return fmt.Errorf("failed to parse search pattern: %w", err) } res, err := r.Indexes(Filter{Schema: sp, Name: tp, WithSystem: showSystem}) if err != nil { return fmt.Errorf("failed to list indexes: %w", err) } defer res.Close() if !showSystem { // in case the reader doesn't implement WithSystem res.SetFilter(func(r Result) bool { _, ok := w.systemSchemas[r.(*Index).Schema] return !ok }) } if res.Len() == 0 { fmt.Fprintf(w.w, text.RelationNotFound, pattern) fmt.Fprintln(w.w) return nil } columns := []string{"Schema", "Name", "Type", "Table"} if verbose { columns = append(columns, "Primary?", "Unique?") } res.SetColumns(columns) res.SetScanValues(func(r Result) []interface{} { f := r.(*Index) v := []interface{}{f.Schema, f.Name, f.Type, f.Table} if verbose { v = append(v, f.IsPrimary, f.IsUnique) } return v }) params := env.Pall() params["title"] = "List of indexes" return tblfmt.EncodeAll(w.w, res, params) } // ShowStats of columns for tables matching pattern func (w DefaultWriter) ShowStats(u *dburl.URL, statTypes, pattern string, verbose bool, k int) error { r, ok := w.r.(ColumnStatReader) if !ok { return fmt.Errorf(text.NotSupportedByDriver, `\ss`, u.Driver) } sp, tp, err := parsePattern(pattern) if err != nil { return fmt.Errorf("failed to parse search pattern: %w", err) } rows := int64(0) tr, ok := w.r.(TableReader) if ok { tables, err := tr.Tables(Filter{Schema: sp, Name: tp}) if err != nil { return fmt.Errorf("failed to get table entry: %w", err) } defer tables.Close() if tables.Next() { rows = tables.Get().Rows } } types := []string{"basic"} if verbose { types = append(types, "extended") } res, err := r.ColumnStats(Filter{Schema: sp, Parent: tp, Types: types}) if err != nil { return fmt.Errorf("failed to get column stats: %w", err) } defer res.Close() if res.Len() == 0 { fmt.Fprintf(w.w, text.RelationNotFound, pattern) fmt.Fprintln(w.w) return nil } columns := []string{"Schema", "Table", "Name", "Average width", "Nulls fraction", "Distinct values", "Dist. fraction"} if verbose { columns = append(columns, "Minimum value", "Maximum value", "Mean value", "Top N common values", "Top N values freqs") } res.SetColumns(columns) res.SetScanValues(func(r Result) []interface{} { f := r.(*ColumnStat) freqs := []string{} for _, freq := range f.TopNFreqs { freqs = append(freqs, fmt.Sprintf("%.4f", freq)) } n := k if n > len(freqs) { n = len(freqs) } distFrac := 1.0 if rows != 0 && f.NumDistinct != rows { distFrac = float64(f.NumDistinct) / float64(rows) } v := []interface{}{ f.Schema, f.Table, f.Name, f.AvgWidth, f.NullFrac, f.NumDistinct, fmt.Sprintf("%.4f", distFrac), } if verbose { v = append(v, f.Min, f.Max, f.Mean, strings.Join(f.TopN[:n], ", "), strings.Join(freqs[:n], ", "), ) } return v }) params := env.Pall() params["title"] = "Column stats" return tblfmt.EncodeAll(w.w, res, params) } // ListPrivilegeSummaries matching pattern func (w DefaultWriter) ListPrivilegeSummaries(u *dburl.URL, pattern string, showSystem bool) error { r, ok := w.r.(PrivilegeSummaryReader) if !ok { return fmt.Errorf(text.NotSupportedByDriver, `\dp`, u.Driver) } sp, tp, err := parsePattern(pattern) if err != nil { return fmt.Errorf("failed to parse search pattern: %w", err) } // filter for tables, views and sequences const tableTypes = "tvms" types := []string{} for k, v := range w.tableTypes { if strings.ContainsRune(tableTypes, k) { types = append(types, v...) } } res, err := r.PrivilegeSummaries(Filter{Schema: sp, Name: tp, WithSystem: showSystem, Types: types}) if err != nil { return fmt.Errorf("failed to list table privileges: %w", err) } defer res.Close() if !showSystem { // in case the reader doesn't implement WithSystem res.SetFilter(func(r Result) bool { _, ok := w.systemSchemas[r.(*PrivilegeSummary).Schema] return !ok }) } res.SetScanValues(func(r Result) []interface{} { f := r.(*PrivilegeSummary) v := []interface{}{ f.Schema, f.Name, f.ObjectType, f.ObjectPrivileges, f.ColumnPrivileges, } return v }) params := env.Pall() params["title"] = "Access privileges" return tblfmt.EncodeAll(w.w, res, params) } func parsePattern(pattern string) (string, string, error) { // TODO do proper escaping, quoting etc if strings.ContainsRune(pattern, '.') { parts := strings.SplitN(pattern, ".", 2) return strings.ReplaceAll(parts[0], "*", "%"), strings.ReplaceAll(parts[1], "*", "%"), nil } return "", strings.ReplaceAll(pattern, "*", "%"), nil } func qualifiedIdentifier(schema, name string) string { if schema == "" { return fmt.Sprintf("\"%s\"", name) } return fmt.Sprintf("\"%s.%s\"", schema, name) } usql-0.19.19/drivers/moderncsqlite/000077500000000000000000000000001476173253300172215ustar00rootroot00000000000000usql-0.19.19/drivers/moderncsqlite/moderncsqlite.go000066400000000000000000000024151476173253300224230ustar00rootroot00000000000000// Package moderncsqlite defines and registers usql's ModernC SQLite3 driver. // Transpilation of SQLite3 to Go. // // See: https://gitlab.com/cznic/sqlite package moderncsqlite import ( "context" "database/sql" "io" "strconv" "github.com/xo/dburl" "github.com/xo/usql/drivers" "github.com/xo/usql/drivers/sqlite3/sqshared" "modernc.org/sqlite" // DRIVER ) func init() { drivers.Register("moderncsqlite", drivers.Driver{ AllowMultilineComments: true, Open: func(_ context.Context, u *dburl.URL, stdout, stderr func() io.Writer) (func(string, string) (*sql.DB, error), error) { return func(_ string, params string) (*sql.DB, error) { return sql.Open("sqlite", params) }, nil }, Version: func(ctx context.Context, db drivers.DB) (string, error) { var ver string err := db.QueryRowContext(ctx, `SELECT sqlite_version()`).Scan(&ver) if err != nil { return "", err } return "ModernC SQLite " + ver, nil }, Err: func(err error) (string, string) { if e, ok := err.(*sqlite.Error); ok { return strconv.Itoa(e.Code()), e.Error() } return "", err.Error() }, ConvertBytes: sqshared.ConvertBytes, NewMetadataReader: sqshared.NewMetadataReader, Copy: drivers.CopyWithInsert(func(int) string { return "?" }), }) } usql-0.19.19/drivers/mymysql/000077500000000000000000000000001476173253300160635ustar00rootroot00000000000000usql-0.19.19/drivers/mymysql/mymysql.go000066400000000000000000000023021476173253300201220ustar00rootroot00000000000000// Package mymysql defines and registers usql's MySQL MyMySQL driver. // // See: https://github.com/ziutek/mymysql package mymysql import ( "io" "strconv" "github.com/xo/usql/drivers" "github.com/xo/usql/drivers/metadata" mymeta "github.com/xo/usql/drivers/metadata/mysql" _ "github.com/ziutek/mymysql/godrv" // DRIVER "github.com/ziutek/mymysql/mysql" ) func init() { drivers.Register("mymysql", drivers.Driver{ AllowMultilineComments: true, AllowHashComments: true, LexerName: "mysql", UseColumnTypes: true, Err: func(err error) (string, string) { if e, ok := err.(*mysql.Error); ok { return strconv.Itoa(int(e.Code)), string(e.Msg) } return "", err.Error() }, IsPasswordErr: func(err error) bool { if e, ok := err.(*mysql.Error); ok { return e.Code == mysql.ER_ACCESS_DENIED_ERROR } return false }, NewMetadataReader: mymeta.NewReader, NewMetadataWriter: func(db drivers.DB, w io.Writer, opts ...metadata.ReaderOption) metadata.Writer { return metadata.NewDefaultWriter(mymeta.NewReader(db, opts...))(db, w) }, Copy: drivers.CopyWithInsert(func(int) string { return "?" }), NewCompleter: mymeta.NewCompleter, }) } usql-0.19.19/drivers/mysql/000077500000000000000000000000001476173253300155155ustar00rootroot00000000000000usql-0.19.19/drivers/mysql/mysql.go000066400000000000000000000026151476173253300172150ustar00rootroot00000000000000// Package mysql defines and registers usql's MySQL driver. // // Alias: memsql, SingleStore MemSQL // Alias: vitess, Vitess Database // Alias: tidb, TiDB // // See: https://github.com/go-sql-driver/mysql // Group: base package mysql import ( "io" "strconv" "github.com/go-sql-driver/mysql" // DRIVER "github.com/xo/usql/drivers" "github.com/xo/usql/drivers/metadata" mymeta "github.com/xo/usql/drivers/metadata/mysql" ) func init() { drivers.Register("mysql", drivers.Driver{ AllowMultilineComments: true, AllowHashComments: true, LexerName: "mysql", UseColumnTypes: true, ForceParams: drivers.ForceQueryParameters([]string{ "parseTime", "true", "loc", "Local", "sql_mode", "ansi", }), Err: func(err error) (string, string) { if e, ok := err.(*mysql.MySQLError); ok { return strconv.Itoa(int(e.Number)), e.Message } return "", err.Error() }, IsPasswordErr: func(err error) bool { if e, ok := err.(*mysql.MySQLError); ok { return e.Number == 1045 } return false }, NewMetadataReader: mymeta.NewReader, NewMetadataWriter: func(db drivers.DB, w io.Writer, opts ...metadata.ReaderOption) metadata.Writer { return metadata.NewDefaultWriter(mymeta.NewReader(db, opts...))(db, w) }, Copy: drivers.CopyWithInsert(func(int) string { return "?" }), NewCompleter: mymeta.NewCompleter, }, "memsql", "vitess", "tidb") } usql-0.19.19/drivers/netezza/000077500000000000000000000000001476173253300160305ustar00rootroot00000000000000usql-0.19.19/drivers/netezza/netezza.go000066400000000000000000000041611476173253300200410ustar00rootroot00000000000000// Package netezza defines and registers usql's Netezza driver. // // See: https://github.com/IBM/nzgo package netezza import ( "context" "io" "log" "github.com/IBM/nzgo/v12" // DRIVER: nzgo "github.com/xo/usql/drivers" "github.com/xo/usql/drivers/metadata" infos "github.com/xo/usql/drivers/metadata/informationschema" ) func init() { nzgo.Debug = log.New(io.Discard, "", 0) nzgo.Info = log.New(io.Discard, "", 0) nzgo.Fatal = log.New(io.Discard, "", 0) newReader := infos.New( infos.WithPlaceholder(func(int) string { return "?" }), infos.WithIndexes(false), infos.WithConstraints(false), infos.WithCustomClauses(map[infos.ClauseName]string{ infos.ColumnsColumnSize: "COALESCE(character_maximum_length, numeric_precision, datetime_precision, interval_precision, 0)", infos.FunctionColumnsColumnSize: "COALESCE(character_maximum_length, numeric_precision, datetime_precision, interval_precision, 0)", }), infos.WithSystemSchemas([]string{"DEFINITION_SCHEMA", "INFORMATION_SCHEMA"}), infos.WithCurrentSchema("CURRENT_SCHEMA"), ) drivers.Register("nzgo", drivers.Driver{ Name: "nz", AllowDollar: true, AllowMultilineComments: true, LexerName: "postgres", Version: func(ctx context.Context, db drivers.DB) (string, error) { var ver string err := db.QueryRowContext(ctx, `SELECT version()`).Scan(&ver) if err != nil { return "", err } return "Netezza " + ver, nil }, ChangePassword: func(db drivers.DB, user, newpw, _ string) error { _, err := db.Exec(`ALTER USER ` + user + ` PASSWORD '` + newpw + `'`) return err }, Err: func(err error) (string, string) { if e, ok := err.(*nzgo.Error); ok { return string(e.Code), e.Message } return "", err.Error() }, IsPasswordErr: func(err error) bool { if e, ok := err.(*nzgo.Error); ok { return e.Code.Name() == "invalid_password" } return false }, NewMetadataReader: newReader, NewMetadataWriter: func(db drivers.DB, w io.Writer, opts ...metadata.ReaderOption) metadata.Writer { return metadata.NewDefaultWriter(newReader(db, opts...))(db, w) }, }) } usql-0.19.19/drivers/odbc/000077500000000000000000000000001476173253300152575ustar00rootroot00000000000000usql-0.19.19/drivers/odbc/odbc.go000066400000000000000000000023511476173253300165160ustar00rootroot00000000000000// Package odbc defines and registers usql's ODBC driver. Requires CGO. Uses // respective platform's standard ODBC packages. // // See: https://github.com/alexbrainman/odbc // Group: all package odbc import ( "regexp" "strings" "github.com/alexbrainman/odbc" // DRIVER "github.com/xo/dburl" "github.com/xo/usql/drivers" ) func init() { endRE := regexp.MustCompile(`;?\s*$`) endAnchorRE := regexp.MustCompile(`(?i)\send\s*;\s*$`) drivers.Register("odbc", drivers.Driver{ LexerName: "tsql", Process: func(u *dburl.URL, prefix string, sqlstr string) (string, string, bool, error) { // trim last ; but only when not END; if s := strings.ToLower(u.Query().Get("usql_trim")); s != "" && s != "off" && s != "0" && s != "false" { if !endAnchorRE.MatchString(sqlstr) { sqlstr = endRE.ReplaceAllString(sqlstr, "") } } typ, q := drivers.QueryExecType(prefix, sqlstr) return typ, sqlstr, q, nil }, IsPasswordErr: func(err error) bool { if e, ok := err.(*odbc.Error); ok { msg := strings.ToLower(e.Error()) return strings.Contains(msg, "failed") && (strings.Contains(msg, "login") || strings.Contains(msg, "authentication") || strings.Contains(msg, "password")) } return false }, }) } usql-0.19.19/drivers/oracle/000077500000000000000000000000001476173253300156155ustar00rootroot00000000000000usql-0.19.19/drivers/oracle/oracle.go000066400000000000000000000021131476173253300174060ustar00rootroot00000000000000// Package oracle defines and registers usql's Oracle Database driver. // // See: https://github.com/sijms/go-ora // Group: base package oracle import ( "errors" "fmt" "strings" _ "github.com/sijms/go-ora/v2" // DRIVER "github.com/xo/usql/drivers/oracle/orshared" ) func init() { orshared.Register( "oracle", // unwrap error func(err error) (string, string) { if e := errors.Unwrap(err); e != nil { err = e } code, msg := "", err.Error() if e, ok := err.(interface { Code() int }); ok { code = fmt.Sprintf("ORA-%05d", e.Code()) } if e, ok := err.(interface { Message() string }); ok { msg = e.Message() } if i := strings.LastIndex(msg, "ORA-"); msg == "" && i != -1 { msg = msg[i:] if j := strings.Index(msg, ":"); j != -1 { msg = msg[j+1:] if code == "" { code = msg[i:j] } } } return code, strings.TrimSpace(msg) }, // is password error func(err error) bool { if e := errors.Unwrap(err); e != nil { err = e } return strings.Contains(err.Error(), "empty password") }, ) } usql-0.19.19/drivers/oracle/orshared/000077500000000000000000000000001476173253300174245ustar00rootroot00000000000000usql-0.19.19/drivers/oracle/orshared/orshared.go000066400000000000000000000045051476173253300215660ustar00rootroot00000000000000// Package orshared contains shared a shared driver implementation for the // Oracle Database. Used by Oracle and Godror drivers. package orshared import ( "context" "fmt" "io" "regexp" "strings" "github.com/xo/dburl" "github.com/xo/usql/drivers" "github.com/xo/usql/drivers/metadata" orameta "github.com/xo/usql/drivers/metadata/oracle" "github.com/xo/usql/env" ) // Register registers an oracle driver. func Register(name string, err func(error) (string, string), isPasswordErr func(error) bool) { endRE := regexp.MustCompile(`;?\s*$`) endAnchorRE := regexp.MustCompile(`(?i)\send\s*;\s*$`) drivers.Register(name, drivers.Driver{ AllowMultilineComments: true, LowerColumnNames: true, ForceParams: func(u *dburl.URL) { // if the service name is not specified, use the environment // variable if present if strings.TrimPrefix(u.Path, "/") == "" { if n, ok := env.Getenv("ORACLE_SID", "ORASID"); ok && n != "" { u.Path = "/" + n if u.Host == "" { u.Host = "localhost" } } } }, Version: func(ctx context.Context, db drivers.DB) (string, error) { var ver string if err := db.QueryRowContext(ctx, `SELECT version FROM v$instance`).Scan(&ver); err != nil { return "", err } return "Oracle Database " + ver, nil }, User: func(ctx context.Context, db drivers.DB) (string, error) { var user string if err := db.QueryRowContext(ctx, `SELECT user FROM dual`).Scan(&user); err != nil { return "", err } return user, nil }, ChangePassword: func(db drivers.DB, user, newpw, _ string) error { _, err := db.Exec(`ALTER USER ` + user + ` IDENTIFIED BY ` + newpw) return err }, Err: err, IsPasswordErr: isPasswordErr, Process: func(_ *dburl.URL, prefix string, sqlstr string) (string, string, bool, error) { if !endAnchorRE.MatchString(sqlstr) { // trim last ; but only when not END; sqlstr = endRE.ReplaceAllString(sqlstr, "") } typ, q := drivers.QueryExecType(prefix, sqlstr) return typ, sqlstr, q, nil }, NewMetadataReader: orameta.NewReader(), NewMetadataWriter: func(db drivers.DB, w io.Writer, opts ...metadata.ReaderOption) metadata.Writer { return metadata.NewDefaultWriter(orameta.NewReader()(db, opts...))(db, w) }, Copy: drivers.CopyWithInsert(func(n int) string { return fmt.Sprintf(":%d", n) }), }) } usql-0.19.19/drivers/ots/000077500000000000000000000000001476173253300151555ustar00rootroot00000000000000usql-0.19.19/drivers/ots/ots.go000066400000000000000000000004771476173253300163210ustar00rootroot00000000000000// Package ots defines and registers usql's Alibaba Tablestore driver. // // See: https://github.com/aliyun/aliyun-tablestore-go-sql-driver package ots import ( _ "github.com/aliyun/aliyun-tablestore-go-sql-driver" // DRIVER "github.com/xo/usql/drivers" ) func init() { drivers.Register("ots", drivers.Driver{}) } usql-0.19.19/drivers/pgx/000077500000000000000000000000001476173253300151465ustar00rootroot00000000000000usql-0.19.19/drivers/pgx/pgx.go000066400000000000000000000111331476173253300162720ustar00rootroot00000000000000// Package pgx defines and registers usql's PostgreSQL PGX driver. // // See: https://github.com/jackc/pgx package pgx import ( "context" "database/sql" "errors" "fmt" "io" "strings" "github.com/jackc/pgx/v5" "github.com/jackc/pgx/v5/pgconn" "github.com/jackc/pgx/v5/stdlib" // DRIVER "github.com/xo/dburl" "github.com/xo/usql/drivers" "github.com/xo/usql/drivers/metadata" pgmeta "github.com/xo/usql/drivers/metadata/postgres" "github.com/xo/usql/text" ) func init() { drivers.Register("pgx", drivers.Driver{ AllowDollar: true, AllowMultilineComments: true, LexerName: "postgres", Open: func(ctx context.Context, u *dburl.URL, stdout, stderr func() io.Writer) (func(string, string) (*sql.DB, error), error) { return func(_, dsn string) (*sql.DB, error) { config, err := pgx.ParseConfig(dsn) if err != nil { return nil, err } config.OnNotice = func(_ *pgconn.PgConn, notice *pgconn.Notice) { out := stderr() fmt.Fprintln(out, notice.Severity+": ", notice.Message) if notice.Hint != "" { fmt.Fprintln(out, "HINT: ", notice.Hint) } } config.OnNotification = func(_ *pgconn.PgConn, notification *pgconn.Notification) { var payload string if notification.Payload != "" { payload = fmt.Sprintf(text.NotificationPayload, notification.Payload) } fmt.Fprintln(stdout(), fmt.Sprintf(text.NotificationReceived, notification.Channel, payload, notification.PID)) } // NOTE: as opposed to the github.com/lib/pq driver, this // NOTE: driver has a "prefer" mode that is enabled by default. // NOTE: as such there is no logic here to try to reconnect as // NOTE: in the postgres driver. return stdlib.OpenDB(*config), nil }, nil }, Version: func(ctx context.Context, db drivers.DB) (string, error) { var ver string err := db.QueryRowContext(ctx, `SHOW server_version`).Scan(&ver) if err != nil { return "", err } return "PostgreSQL " + ver, nil }, ChangePassword: func(db drivers.DB, user, newpw, _ string) error { _, err := db.Exec(`ALTER USER ` + user + ` PASSWORD '` + newpw + `'`) return err }, Err: func(err error) (string, string) { var e *pgconn.PgError if errors.As(err, &e) { return e.Code, e.Message } return "", err.Error() }, IsPasswordErr: func(err error) bool { var e *pgconn.PgError if errors.As(err, &e) { return e.Code == "28P01" } return false }, NewMetadataReader: pgmeta.NewReader(), NewMetadataWriter: func(db drivers.DB, w io.Writer, opts ...metadata.ReaderOption) metadata.Writer { return metadata.NewDefaultWriter(pgmeta.NewReader()(db, opts...))(db, w) }, Copy: func(ctx context.Context, db *sql.DB, rows *sql.Rows, table string) (int64, error) { conn, err := db.Conn(context.Background()) if err != nil { return 0, fmt.Errorf("failed to get a connection from pool: %w", err) } leftParen := strings.IndexRune(table, '(') colQuery := "SELECT * FROM " + table + " WHERE 1=0" if leftParen != -1 { // pgx's CopyFrom needs a slice of column names and splitting them by a comma is unreliable // so evaluate the possible expressions against the target table colQuery = "SELECT " + table[leftParen+1:len(table)-1] + " FROM " + table[:leftParen] + " WHERE 1=0" table = table[:leftParen] } colStmt, err := db.PrepareContext(ctx, colQuery) if err != nil { return 0, fmt.Errorf("failed to prepare query to determine target table columns: %w", err) } colRows, err := colStmt.QueryContext(ctx) if err != nil { return 0, fmt.Errorf("failed to execute query to determine target table columns: %w", err) } columns, err := colRows.Columns() if err != nil { return 0, fmt.Errorf("failed to fetch target table columns: %w", err) } clen := len(columns) crows := ©Rows{ rows: rows, values: make([]interface{}, clen), } for i := 0; i < clen; i++ { crows.values[i] = new(interface{}) } var n int64 err = conn.Raw(func(driverConn interface{}) error { conn := driverConn.(*stdlib.Conn).Conn() n, err = conn.CopyFrom(ctx, pgx.Identifier(strings.SplitN(table, ".", 2)), columns, crows) return err }) return n, err }, }) } type copyRows struct { rows *sql.Rows values []interface{} } func (r *copyRows) Next() bool { return r.rows.Next() } func (r *copyRows) Values() ([]interface{}, error) { err := r.rows.Scan(r.values...) actuals := make([]interface{}, len(r.values)) for i, v := range r.values { actuals[i] = *(v.(*interface{})) } return actuals, err } func (r *copyRows) Err() error { return r.rows.Err() } usql-0.19.19/drivers/postgres/000077500000000000000000000000001476173253300162165ustar00rootroot00000000000000usql-0.19.19/drivers/postgres/postgres.go000066400000000000000000000133031476173253300204130ustar00rootroot00000000000000// Package postgres defines and registers usql's PostgreSQL driver. // // Alias: cockroachdb, CockroachDB // Alias: redshift, Amazon Redshift // // See: https://github.com/lib/pq // Group: base package postgres import ( "context" "database/sql" "errors" "fmt" "io" "strings" "github.com/lib/pq" // DRIVER "github.com/xo/dburl" "github.com/xo/usql/drivers" "github.com/xo/usql/drivers/metadata" pgmeta "github.com/xo/usql/drivers/metadata/postgres" "github.com/xo/usql/env" "github.com/xo/usql/text" ) func init() { openConn := func(stdout, stderr func() io.Writer, dsn string) (*sql.DB, error) { conn, err := pq.NewConnector(dsn) if err != nil { return nil, err } noticeConn := pq.ConnectorWithNoticeHandler(conn, func(notice *pq.Error) { out := stderr() fmt.Fprintln(out, notice.Severity+": ", notice.Message) if notice.Hint != "" { fmt.Fprintln(out, "HINT: ", notice.Hint) } }) notificationConn := pq.ConnectorWithNotificationHandler(noticeConn, func(notification *pq.Notification) { var payload string if notification.Extra != "" { payload = fmt.Sprintf(text.NotificationPayload, notification.Extra) } fmt.Fprintln(stdout(), fmt.Sprintf(text.NotificationReceived, notification.Channel, payload, notification.BePid)) }) return sql.OpenDB(notificationConn), nil } drivers.Register("postgres", drivers.Driver{ Name: "pq", AllowDollar: true, AllowMultilineComments: true, LexerName: "postgres", ForceParams: func(u *dburl.URL) { if u.Scheme == "cockroachdb" { drivers.ForceQueryParameters([]string{"sslmode", "disable"})(u) } }, Open: func(ctx context.Context, u *dburl.URL, stdout, stderr func() io.Writer) (func(string, string) (*sql.DB, error), error) { return func(_, dsn string) (*sql.DB, error) { conn, err := openConn(stdout, stderr, dsn) if err != nil { return nil, err } // special retry handling case, since there's no lib/pq retry mode if env.Get("SSLMODE") == "retry" && !u.Query().Has("sslmode") { switch err = conn.PingContext(ctx); { case errors.Is(err, pq.ErrSSLNotSupported): s := "sslmode=disable " + dsn conn, err = openConn(stdout, stderr, s) if err != nil { return nil, err } u.DSN = s case err != nil: return nil, err } } return conn, nil }, nil }, Version: func(ctx context.Context, db drivers.DB) (string, error) { // numeric version // SHOW server_version_num; var ver string err := db.QueryRowContext(ctx, `SHOW server_version`).Scan(&ver) if err != nil { return "", err } return "PostgreSQL " + ver, nil }, ChangePassword: func(db drivers.DB, user, newpw, _ string) error { _, err := db.Exec(`ALTER USER ` + user + ` PASSWORD '` + newpw + `'`) return err }, Err: func(err error) (string, string) { if e, ok := err.(*pq.Error); ok { return string(e.Code), e.Message } return "", err.Error() }, IsPasswordErr: func(err error) bool { if e, ok := err.(*pq.Error); ok { return e.Code.Name() == "invalid_password" } return false }, NewMetadataReader: pgmeta.NewReader(), NewMetadataWriter: func(db drivers.DB, w io.Writer, opts ...metadata.ReaderOption) metadata.Writer { return metadata.NewDefaultWriter(pgmeta.NewReader()(db, opts...))(db, w) }, Copy: func(ctx context.Context, db *sql.DB, rows *sql.Rows, table string) (int64, error) { columns, err := rows.Columns() if err != nil { return 0, fmt.Errorf("failed to fetch source rows columns: %w", err) } clen := len(columns) query := table if !strings.HasPrefix(strings.ToLower(query), "insert into") { leftParen := strings.IndexRune(table, '(') colQuery := "SELECT * FROM " + table + " WHERE 1=0" if leftParen != -1 { colQuery = "SELECT " + table[leftParen+1:len(table)-1] + " FROM " + table[:leftParen] + " WHERE 1=0" table = table[:leftParen] } colStmt, err := db.PrepareContext(ctx, colQuery) if err != nil { return 0, fmt.Errorf("failed to prepare query to determine target table columns: %w", err) } defer colStmt.Close() colRows, err := colStmt.QueryContext(ctx) if err != nil { return 0, fmt.Errorf("failed to execute query to determine target table columns: %w", err) } columns, err := colRows.Columns() if err != nil { return 0, fmt.Errorf("failed to fetch target table columns: %w", err) } if schemaSep := strings.Index(table, "."); schemaSep >= 0 { query = pq.CopyInSchema(table[:schemaSep], table[schemaSep+1:], columns...) } else { query = pq.CopyIn(table, columns...) } } tx, err := db.BeginTx(ctx, nil) if err != nil { return 0, fmt.Errorf("failed to begin transaction: %w", err) } stmt, err := tx.PrepareContext(ctx, query) if err != nil { return 0, fmt.Errorf("failed to prepare insert query: %w", err) } defer stmt.Close() values := make([]interface{}, clen) for i := 0; i < clen; i++ { values[i] = new(interface{}) } var n int64 for rows.Next() { err = rows.Scan(values...) if err != nil { return n, fmt.Errorf("failed to scan row: %w", err) } _, err := stmt.ExecContext(ctx, values...) if err != nil { return n, fmt.Errorf("failed to exec copy: %w", err) } } res, err := stmt.ExecContext(ctx) if err != nil { return n, fmt.Errorf("failed to final exec copy: %w", err) } rn, err := res.RowsAffected() if err != nil { return n, fmt.Errorf("failed to check rows affected: %w", err) } n += rn err = tx.Commit() if err != nil { return n, fmt.Errorf("failed to commit transaction: %w", err) } return n, rows.Err() }, }, "cockroachdb", "redshift") } usql-0.19.19/drivers/presto/000077500000000000000000000000001476173253300156645ustar00rootroot00000000000000usql-0.19.19/drivers/presto/presto.go000066400000000000000000000012541476173253300175310ustar00rootroot00000000000000// Package presto defines and registers usql's Presto driver. // // See: https://github.com/prestodb/presto-go-client package presto import ( "context" _ "github.com/prestodb/presto-go-client/presto" // DRIVER "github.com/xo/usql/drivers" ) func init() { drivers.Register("presto", drivers.Driver{ AllowMultilineComments: true, Process: drivers.StripTrailingSemicolon, Version: func(ctx context.Context, db drivers.DB) (string, error) { var ver string err := db.QueryRowContext( ctx, `SELECT node_version FROM system.runtime.nodes LIMIT 1`, ).Scan(&ver) if err != nil { return "", err } return "Presto " + ver, nil }, }) } usql-0.19.19/drivers/ql/000077500000000000000000000000001476173253300147645ustar00rootroot00000000000000usql-0.19.19/drivers/ql/ql.go000066400000000000000000000007151476173253300157320ustar00rootroot00000000000000// Package ql defines and registers usql's Cznic QL driver. // // See: https://gitlab.com/cznic/ql package ql import ( "github.com/xo/usql/drivers" "modernc.org/ql" // DRIVER ) func init() { ql.RegisterDriver() // ql.RegisterMemDriver() drivers.Register("ql", drivers.Driver{ AllowMultilineComments: true, AllowCComments: true, BatchQueryPrefixes: map[string]string{ "BEGIN TRANSACTION": "COMMIT", }, BatchAsTransaction: true, }) } usql-0.19.19/drivers/qtype.go000066400000000000000000000420071476173253300160440ustar00rootroot00000000000000package drivers import ( "strings" ) // queryMap is the map of SQL prefixes use as queries. var queryMap = map[string]bool{ "WITH": true, "PRAGMA": true, "EXPLAIN": true, // show the execution plan of a statement "DESCRIBE": true, // describe (mysql) "DESC": true, // describe (mysql) "FETCH": true, // retrieve rows from a query using a cursor "SELECT": true, // retrieve rows from a table or view "SHOW": true, // show the value of a run-time parameter "ADMIN SHOW": true, "VALUES": true, // compute a set of rows "LIST": true, // list permissions, roles, users (cassandra) "EXEC": true, // execute a stored procedure that returns rows (not postgres) "TABLE": true, // shortcut for select * from
(postgresql) "CALL": true, "FROM": true, } // execMap is the map of SQL prefixes to execute. // // Unless noted, these are extracted from the PostgreSQL docs. // // Note: originally extracted via a script, but maintained by hand as the // documentation for any new queries introduced by PostgreSQL need to be // manually scrutinized for variations. var execMap = map[string]bool{ // cassandra "ALTER KEYSPACE": true, // alter a keyspace "CREATE KEYSPACE": true, // create a keyspace "DROP KEYSPACE": true, // drop a keyspace "BEGIN BATCH": true, // begin batch "APPLY BATCH": true, // apply batch // sqlserver "CREATE LOGIN": true, // create login "CREATE PROCEDURE": true, // create procedure "DROP LOGIN": true, // drop login "DROP PROCEDURE": true, // drop procedure // ql "BEGIN TRANSACTION": true, // begin batch // postgresql "ABORT": true, // abort the current transaction "ALTER AGGREGATE": true, // change the definition of an aggregate function "ALTER COLLATION": true, // change the definition of a collation "ALTER CONVERSION": true, // change the definition of a conversion "ALTER DATABASE": true, // change a database "ALTER DEFAULT PRIVILEGES": true, // define default access privileges "ALTER DOMAIN": true, // change the definition of a domain "ALTER EVENT TRIGGER": true, // change the definition of an event trigger "ALTER EXTENSION": true, // change the definition of an extension "ALTER FOREIGN DATA WRAPPER": true, // change the definition of a foreign-data wrapper "ALTER FOREIGN TABLE": true, // change the definition of a foreign table "ALTER FUNCTION": true, // change the definition of a function "ALTER GROUP": true, // change role name or membership "ALTER INDEX": true, // change the definition of an index "ALTER LANGUAGE": true, // change the definition of a procedural language "ALTER LARGE OBJECT": true, // change the definition of a large object "ALTER MATERIALIZED VIEW": true, // change the definition of a materialized view "ALTER OPERATOR CLASS": true, // change the definition of an operator class "ALTER OPERATOR FAMILY": true, // change the definition of an operator family "ALTER OPERATOR": true, // change the definition of an operator "ALTER POLICY": true, // change the definition of a row level security policy "ALTER ROLE": true, // change a database role "ALTER RULE": true, // change the definition of a rule "ALTER SCHEMA": true, // change the definition of a schema "ALTER SEQUENCE": true, // change the definition of a sequence generator "ALTER SERVER": true, // change the definition of a foreign server "ALTER SYSTEM": true, // change a server configuration parameter "ALTER TABLESPACE": true, // change the definition of a tablespace "ALTER TABLE": true, // change the definition of a table "ALTER TEXT SEARCH CONFIGURATION": true, // change the definition of a text search configuration "ALTER TEXT SEARCH DICTIONARY": true, // change the definition of a text search dictionary "ALTER TEXT SEARCH PARSER": true, // change the definition of a text search parser "ALTER TEXT SEARCH TEMPLATE": true, // change the definition of a text search template "ALTER TRIGGER": true, // change the definition of a trigger "ALTER TYPE": true, // change the definition of a type "ALTER USER MAPPING": true, // change the definition of a user mapping "ALTER USER": true, // change a database role "ALTER VIEW": true, // change the definition of a view "ANALYZE": true, // collect statistics about a database "BEGIN": true, // start a transaction block "CHECKPOINT": true, // force a transaction log checkpoint "CLOSE": true, // close a cursor "CLUSTER": true, // cluster a table according to an index "COMMENT": true, // define or change the comment of an object "COMMIT PREPARED": true, // commit a transaction that was earlier prepared for two-phase commit "COMMIT": true, // commit the current transaction "COPY": true, // copy data between a file and a table "CREATE ACCESS METHOD": true, // define a new access method "CREATE AGGREGATE": true, // define a new aggregate function "CREATE CAST": true, // define a new cast "CREATE COLLATION": true, // define a new collation "CREATE CONVERSION": true, // define a new encoding conversion "CREATE DATABASE": true, // create a new database "CREATE DOMAIN": true, // define a new domain "CREATE EVENT TRIGGER": true, // define a new event trigger "CREATE EXTENSION": true, // install an extension "CREATE FOREIGN DATA WRAPPER": true, // define a new foreign-data wrapper "CREATE FOREIGN TABLE": true, // define a new foreign table "CREATE FUNCTION": true, // define a new function "CREATE GROUP": true, // define a new database role "CREATE INDEX": true, // define a new index "CREATE LANGUAGE": true, // define a new procedural language "CREATE MATERIALIZED VIEW": true, // define a new materialized view "CREATE OPERATOR CLASS": true, // define a new operator class "CREATE OPERATOR FAMILY": true, // define a new operator family "CREATE OPERATOR": true, // define a new operator "CREATE POLICY": true, // define a new row level security policy for a table "CREATE ROLE": true, // define a new database role "CREATE RULE": true, // define a new rewrite rule "CREATE SCHEMA": true, // define a new schema "CREATE SEQUENCE": true, // define a new sequence generator "CREATE SERVER": true, // define a new foreign server "CREATE STATISTICS": true, // define extended statistics "CREATE SUBSCRIPTION": true, // define a new subscription "CREATE TABLE AS": true, // define a new table from the results of a query "CREATE TABLESPACE": true, // define a new tablespace "CREATE TABLE": true, // define a new table "CREATE TEXT SEARCH CONFIGURATION": true, // define a new text search configuration "CREATE TEXT SEARCH DICTIONARY": true, // define a new text search dictionary "CREATE TEXT SEARCH PARSER": true, // define a new text search parser "CREATE TEXT SEARCH TEMPLATE": true, // define a new text search template "CREATE TRANSFORM": true, // define a new transform "CREATE TRIGGER": true, // define a new trigger "CREATE TYPE": true, // define a new data type "CREATE USER MAPPING": true, // define a new mapping of a user to a foreign server "CREATE USER": true, // define a new database role "CREATE VIEW": true, // define a new view "DEALLOCATE ALL": true, // deallocate all prepared statements "DEALLOCATE": true, // deallocate a prepared statement "DECLARE": true, // define a cursor "DELETE": true, // delete rows of a table "DISCARD": true, // discard session state "DO": true, // execute an anonymous code block "DROP ACCESS METHOD": true, // remove an access method "DROP AGGREGATE": true, // remove an aggregate function "DROP CAST": true, // remove a cast "DROP COLLATION": true, // remove a collation "DROP CONVERSION": true, // remove a conversion "DROP DATABASE": true, // remove a database "DROP DOMAIN": true, // remove a domain "DROP EVENT TRIGGER": true, // remove an event trigger "DROP EXTENSION": true, // remove an extension "DROP FOREIGN DATA WRAPPER": true, // remove a foreign-data wrapper "DROP FOREIGN TABLE": true, // remove a foreign table "DROP FUNCTION": true, // remove a function "DROP GROUP": true, // remove a database role "DROP INDEX": true, // remove an index "DROP LANGUAGE": true, // remove a procedural language "DROP MATERIALIZED VIEW": true, // remove a materialized view "DROP OPERATOR CLASS": true, // remove an operator class "DROP OPERATOR FAMILY": true, // remove an operator family "DROP OPERATOR": true, // remove an operator "DROP OWNED": true, // remove database objects owned by a database role "DROP POLICY": true, // remove a row level security policy from a table "DROP PUBLICATION": true, // remove a publication "DROP ROLE": true, // remove a database role "DROP RULE": true, // remove a rewrite rule "DROP SCHEMA": true, // remove a schema "DROP SEQUENCE": true, // remove a sequence "DROP SERVER": true, // remove a foreign server descriptor "DROP STATISTICS": true, // remove extended statistics "DROP SUBSCRIPTION": true, // remove a subscription "DROP TABLESPACE": true, // remove a tablespace "DROP TABLE": true, // remove a table "DROP TEXT SEARCH CONFIGURATION": true, // remove a text search configuration "DROP TEXT SEARCH DICTIONARY": true, // remove a text search dictionary "DROP TEXT SEARCH PARSER": true, // remove a text search parser "DROP TEXT SEARCH TEMPLATE": true, // remove a text search template "DROP TRANSFORM": true, // remove a transform "DROP TRIGGER": true, // remove a trigger "DROP TYPE": true, // remove a data type "DROP USER MAPPING": true, // remove a user mapping for a foreign server "DROP USER": true, // remove a database role "DROP VIEW": true, // remove a view "END": true, // commit the current transaction "EXECUTE": true, // execute a prepared statement "GRANT": true, // define access privileges "IMPORT FOREIGN SCHEMA": true, // import table definitions from a foreign server "INSERT": true, // create new rows in a table "LISTEN": true, // listen for a notification "LOAD": true, // load a shared library file "LOCK": true, // lock a table "MOVE": true, // position a cursor "NOTIFY": true, // generate a notification "PREPARE TRANSACTION": true, // prepare the current transaction for two-phase commit "PREPARE": true, // prepare a statement for execution "REASSIGN OWNED": true, // change the ownership of database objects owned by a database role "REFRESH MATERIALIZED VIEW": true, // replace the contents of a materialized view "REINDEX": true, // rebuild indexes "RELEASE": true, // destroy a previously defined savepoint "RESET": true, // restore the value of a run-time parameter to the default value "REVOKE": true, // remove access privileges "ROLLBACK PREPARED": true, // cancel a transaction that was earlier prepared for two-phase commit "ROLLBACK TO SAVEPOINT": true, // roll back to a savepoint "ROLLBACK": true, // abort the current transaction "SAVEPOINT": true, // define a new savepoint within the current transaction "SECURITY LABEL": true, // define or change a security label applied to an object "SELECT INTO": true, // define a new table from the results of a query "SET CONSTRAINTS": true, // set constraint check timing for the current transaction "SET ROLE": true, // set the current user identifier of the current session "SET SESSION AUTHORIZATION": true, // set the session user identifier and the current user identifier of the current session "SET TRANSACTION": true, // set the characteristics of the current transaction "SET": true, // change a run-time parameter "START TRANSACTION": true, // start a transaction block "TRUNCATE": true, // empty a table or set of tables "UNLISTEN": true, // stop listening for a notification "UPDATE": true, // update rows of a table "VACUUM": true, // garbage-collect and optionally analyze a database // oracle "ADMINISTER KEY MANAGEMENT": true, "ALTER ANALYTIC VIEW": true, "ALTER ATTRIBUTE DIMENSION": true, "ALTER AUDIT POLICY": true, "ALTER CLUSTER": true, "ALTER DATABASE DICTIONARY": true, "ALTER DATABASE LINK": true, "ALTER DIMENSION": true, "ALTER DISKGROUP": true, "ALTER FLASHBACK ARCHIVE": true, "ALTER HEIRARCHY": true, "ALTER INMEMORY JOIN GROUP": true, "ALTER JAVA": true, "ALTER LIBRARY": true, "ALTER LOCKDOWN PROFILE": true, "ALTER MATERIALIZED VIEW LOG": true, "ALTER MATERIALIZED ZONEMAP": true, "ALTER PACKAGE": true, "ALTER PLUGGABLE DATABASE": true, "ALTER PROCEDURE": true, "ALTER PROFILE": true, "ALTER RESOURCE COST": true, "ALTER ROLLBACK SEGMENT": true, "ALTER SESSION": true, "ALTER SYNONYM": true, "ALTER TABLESPACE SET": true, "ASSOCIATE STATISTICS": true, } // createIgnore are parts of the query exec type after CREATE to ignore. var createIgnore = map[string]bool{ "DEFAULT": true, "GLOBAL": true, "LOCAL": true, "OR": true, "PROCEDURAL": true, "RECURSIVE": true, "REPLACE": true, "TEMPORARY": true, "TEMP": true, "TRUSTED": true, "UNIQUE": true, "UNLOGGED": true, } // QueryExecType is the default way to determine the "EXEC" prefix for a SQL // query and whether or not it should be Exec'd or Query'd. func QueryExecType(prefix, sqlstr string) (string, bool) { if prefix == "" { return "EXEC", false } s := strings.Split(prefix, " ") if len(s) > 0 { // check query map if _, ok := queryMap[s[0]]; ok { typ := s[0] switch { case typ == "SELECT" && len(s) >= 2 && s[1] == "INTO": return "SELECT INTO", false case typ == "PRAGMA": return typ, !strings.ContainsRune(sqlstr, '=') } return typ, true } // normalize prefixes switch s[0] { // CREATE statements have a large number of variants case "CREATE": n := []string{"CREATE"} for _, x := range s[1:] { if _, ok := createIgnore[x]; ok { continue } n = append(n, x) } s = n case "DROP": // "DROP [PROCEDURAL] LANGUAGE" => "DROP LANGUAGE" n := []string{"DROP"} for _, x := range s[1:] { if x == "PROCEDURAL" { continue } n = append(n, x) } s = n } // find longest match for i := len(s); i > 0; i-- { typ := strings.Join(s[:i], " ") if _, ok := execMap[typ]; ok { return typ, false } } } return s[0], false } usql-0.19.19/drivers/ramsql/000077500000000000000000000000001476173253300156475ustar00rootroot00000000000000usql-0.19.19/drivers/ramsql/ramsql.go000066400000000000000000000004211476173253300174720ustar00rootroot00000000000000// Package ramsql defines and registers usql's RamSQL driver. // // See: https://github.com/proullon/ramsql package ql import ( _ "github.com/proullon/ramsql/driver" // DRIVER "github.com/xo/usql/drivers" ) func init() { drivers.Register("ramsql", drivers.Driver{}) } usql-0.19.19/drivers/sapase/000077500000000000000000000000001476173253300156245ustar00rootroot00000000000000usql-0.19.19/drivers/sapase/sapase.go000066400000000000000000000024041476173253300174270ustar00rootroot00000000000000// Package sapase defines and registers usql's SAP ASE driver. // // See: https://github.com/thda/tds package sapase import ( "context" "errors" "strconv" "strings" "github.com/thda/tds" // DRIVER: tds "github.com/xo/usql/drivers" ) func init() { drivers.Register("tds", drivers.Driver{ AllowMultilineComments: true, RequirePreviousPassword: true, LexerName: "tsql", Version: func(ctx context.Context, db drivers.DB) (string, error) { var ver string err := db.QueryRowContext(ctx, `SELECT @@version`).Scan(&ver) if err != nil { return "", err } return ver, nil }, ChangePassword: func(db drivers.DB, user, newpw, oldpw string) error { if user != "" { return errors.New("Cannot change password for another user") } _, err := db.Exec(`exec sp_password '` + oldpw + `', '` + newpw + `'`) return err }, Err: func(err error) (string, string) { if e, ok := err.(tds.SybError); ok { return strconv.Itoa(int(e.MsgNumber)), e.Message } msg := err.Error() if i := strings.LastIndex(msg, "tds:"); i != -1 { msg = msg[i:] } return "", msg }, IsPasswordErr: func(err error) bool { return strings.Contains(err.Error(), "Login failed") }, Process: drivers.StripTrailingSemicolon, }) } usql-0.19.19/drivers/saphana/000077500000000000000000000000001476173253300157635ustar00rootroot00000000000000usql-0.19.19/drivers/saphana/saphana.go000066400000000000000000000015331476173253300177270ustar00rootroot00000000000000// Package saphana defines and registers usql's SAP HANA driver. // // See: https://github.com/SAP/go-hdb package saphana import ( "context" "strconv" _ "github.com/SAP/go-hdb/driver" // DRIVER: hdb "github.com/xo/usql/drivers" ) func init() { drivers.Register("hdb", drivers.Driver{ AllowMultilineComments: true, Version: func(ctx context.Context, db drivers.DB) (string, error) { var ver string if err := db.QueryRowContext(ctx, `SELECT version FROM m_database`).Scan(&ver); err != nil { return "", err } return "SAP HANA " + ver, nil }, Err: func(err error) (string, string) { code, msg := "", err.Error() if e, ok := err.(interface { Code() int }); ok { code = strconv.Itoa(e.Code()) } if e, ok := err.(interface { Text() string }); ok { msg = e.Text() } return code, msg }, }) } usql-0.19.19/drivers/snowflake/000077500000000000000000000000001476173253300163415ustar00rootroot00000000000000usql-0.19.19/drivers/snowflake/snowflake.go000066400000000000000000000032671476173253300206710ustar00rootroot00000000000000// Package snowflake defines and registers usql's Snowflake driver. // // See: https://github.com/snowflakedb/gosnowflake package snowflake import ( "io" "strconv" "github.com/snowflakedb/gosnowflake" // DRIVER "github.com/xo/tblfmt" "github.com/xo/usql/drivers" "github.com/xo/usql/drivers/metadata" infos "github.com/xo/usql/drivers/metadata/informationschema" "github.com/xo/usql/env" ) func init() { gosnowflake.GetLogger().SetOutput(io.Discard) newReader := infos.New( infos.WithPlaceholder(func(int) string { return "?" }), infos.WithCustomClauses(map[infos.ClauseName]string{ infos.SequenceColumnsIncrement: "''", }), infos.WithFunctions(false), infos.WithIndexes(false), infos.WithConstraints(false), infos.WithColumnPrivileges(false), ) drivers.Register("snowflake", drivers.Driver{ AllowMultilineComments: true, Err: func(err error) (string, string) { if e, ok := err.(*gosnowflake.SnowflakeError); ok { return strconv.Itoa(e.Number), e.Message } return "", err.Error() }, NewMetadataReader: newReader, NewMetadataWriter: func(db drivers.DB, w io.Writer, opts ...metadata.ReaderOption) metadata.Writer { writerOpts := []metadata.WriterOption{ metadata.WithListAllDbs(func(pattern string, verbose bool) error { return listAllDbs(db, w, pattern, verbose) }), } return metadata.NewDefaultWriter(newReader(db, opts...), writerOpts...)(db, w) }, }) } func listAllDbs(db drivers.DB, w io.Writer, pattern string, verbose bool) error { rows, err := db.Query("SHOW databases") if err != nil { return err } defer rows.Close() params := env.Pall() params["title"] = "List of databases" return tblfmt.EncodeAll(w, rows, params) } usql-0.19.19/drivers/spanner/000077500000000000000000000000001476173253300160165ustar00rootroot00000000000000usql-0.19.19/drivers/spanner/spanner.go000066400000000000000000000004551476173253300200170ustar00rootroot00000000000000// Package spanner defines and registers usql's Google Spanner driver. // // See: https://github.com/googleapis/go-sql-spanner package spanner import ( _ "github.com/googleapis/go-sql-spanner" // DRIVER "github.com/xo/usql/drivers" ) func init() { drivers.Register("spanner", drivers.Driver{}) } usql-0.19.19/drivers/sqlite3/000077500000000000000000000000001476173253300157345ustar00rootroot00000000000000usql-0.19.19/drivers/sqlite3/sqlite3.go000066400000000000000000000022171476173253300176510ustar00rootroot00000000000000// Package sqlite3 defines and registers usql's SQLite3 driver. Requires CGO. // // See: https://github.com/mattn/go-sqlite3 // Group: base package sqlite3 import ( "context" "strconv" "github.com/mattn/go-sqlite3" // DRIVER "github.com/xo/usql/drivers" "github.com/xo/usql/drivers/sqlite3/sqshared" ) func init() { drivers.Register("sqlite3", drivers.Driver{ AllowMultilineComments: true, ForceParams: drivers.ForceQueryParameters([]string{ "loc", "auto", }), Version: func(ctx context.Context, db drivers.DB) (string, error) { var ver string err := db.QueryRowContext(ctx, `SELECT sqlite_version()`).Scan(&ver) if err != nil { return "", err } return "SQLite3 " + ver, nil }, Err: func(err error) (string, string) { if e, ok := err.(sqlite3.Error); ok { return strconv.Itoa(int(e.Code)), e.Error() } code, msg := "", err.Error() if e, ok := err.(sqlite3.ErrNo); ok { code = strconv.Itoa(int(e)) } return code, msg }, ConvertBytes: sqshared.ConvertBytes, NewMetadataReader: sqshared.NewMetadataReader, Copy: drivers.CopyWithInsert(func(int) string { return "?" }), }) } usql-0.19.19/drivers/sqlite3/sqshared/000077500000000000000000000000001476173253300175465ustar00rootroot00000000000000usql-0.19.19/drivers/sqlite3/sqshared/reader.go000066400000000000000000000174441476173253300213510ustar00rootroot00000000000000package sqshared import ( "database/sql" "fmt" "strings" "github.com/xo/usql/drivers" "github.com/xo/usql/drivers/metadata" ) type MetadataReader struct { metadata.LoggingReader limit int } // NewMetadataReader creates the metadata reader for sqlite3 databases. func NewMetadataReader(db drivers.DB, opts ...metadata.ReaderOption) metadata.Reader { return &MetadataReader{ LoggingReader: metadata.NewLoggingReader(db, opts...), } } var ( _ metadata.BasicReader = &MetadataReader{} _ metadata.FunctionReader = &MetadataReader{} _ metadata.FunctionColumnReader = &MetadataReader{} _ metadata.IndexReader = &MetadataReader{} _ metadata.IndexColumnReader = &MetadataReader{} ) func (r *MetadataReader) SetLimit(l int) { r.limit = l } // Columns from selected catalog (or all, if empty), matching schemas and tables func (r MetadataReader) Columns(f metadata.Filter) (*metadata.ColumnSet, error) { tables, err := r.Tables(metadata.Filter{Catalog: f.Catalog, Schema: f.Schema, Name: f.Parent}) if err != nil { return nil, err } results := []metadata.Column{} for tables.Next() { table := tables.Get() qstr := `SELECT cid, name, type, CASE WHEN "notnull" = 1 THEN 'NO' ELSE 'YES' END, COALESCE(dflt_value, '') FROM pragma_table_info(?)` rows, closeRows, err := r.query(qstr, []string{}, "name", table.Name) if err != nil { return nil, err } defer closeRows() rec := metadata.Column{ Catalog: table.Catalog, Schema: table.Schema, Table: table.Name, } for rows.Next() { err = rows.Scan( &rec.OrdinalPosition, &rec.Name, &rec.DataType, &rec.IsNullable, &rec.Default, ) if err != nil { return nil, err } results = append(results, rec) } if rows.Err() != nil { return nil, rows.Err() } } return metadata.NewColumnSet(results), nil } func (r MetadataReader) Tables(f metadata.Filter) (*metadata.TableSet, error) { qstr := `SELECT '' AS table_catalog, '' AS table_schem, table_name, table_type FROM ( SELECT name AS table_name, UPPER(type) AS table_type FROM sqlite_master WHERE name NOT LIKE 'sqlite\_%' ESCAPE '\' AND UPPER(type) IN ('TABLE', 'VIEW') UNION ALL SELECT name AS table_name, 'GLOBAL TEMPORARY' AS table_type FROM sqlite_temp_master UNION ALL SELECT name AS table_name, 'SYSTEM TABLE' AS table_type FROM sqlite_master WHERE name LIKE 'sqlite\_%' ESCAPE '\' AND UPPER(type) IN ('TABLE', 'VIEW') UNION ALL SELECT name AS table_name, 'SYSTEM TABLE' AS table_type FROM pragma_module_list )` conds := []string{} vals := []interface{}{} if f.Catalog != "" { vals = append(vals, f.Catalog) conds = append(conds, "table_catalog = ?") } if f.Schema != "" { vals = append(vals, f.Schema) conds = append(conds, "table_schema LIKE ?") } if f.Name != "" { vals = append(vals, f.Name) conds = append(conds, "table_name LIKE ?") } if len(f.Types) != 0 { pholders := []string{} for _, t := range f.Types { vals = append(vals, t) pholders = append(pholders, "?") } if len(pholders) != 0 { conds = append(conds, "table_type IN ("+strings.Join(pholders, ", ")+")") } } rows, closeRows, err := r.query(qstr, conds, "table_type, table_name", vals...) if err != nil { return nil, err } defer closeRows() results := []metadata.Table{} for rows.Next() { rec := metadata.Table{} err = rows.Scan(&rec.Catalog, &rec.Schema, &rec.Name, &rec.Type) if err != nil { return nil, err } results = append(results, rec) } if rows.Err() != nil { return nil, rows.Err() } return metadata.NewTableSet(results), nil } func (r MetadataReader) Schemas(f metadata.Filter) (*metadata.SchemaSet, error) { qstr := `SELECT name AS schema_name, '' AS catalog_name FROM pragma_database_list` conds := []string{} vals := []interface{}{} if f.Name != "" { vals = append(vals, f.Name) conds = append(conds, "schema_name LIKE ?") } rows, closeRows, err := r.query(qstr, conds, "seq", vals...) if err != nil { return nil, err } defer closeRows() results := []metadata.Schema{} for rows.Next() { rec := metadata.Schema{} err = rows.Scan(&rec.Schema, &rec.Catalog) if err != nil { return nil, err } results = append(results, rec) } if rows.Err() != nil { return nil, rows.Err() } return metadata.NewSchemaSet(results), nil } func (r MetadataReader) Functions(f metadata.Filter) (*metadata.FunctionSet, error) { qstr := `SELECT name AS specific_name, name AS routine_name, type AS routine_type FROM pragma_function_list` conds := []string{} vals := []interface{}{} if f.Name != "" { vals = append(vals, f.Name) conds = append(conds, "name LIKE ?") } if len(f.Types) != 0 { pholders := []string{} for _, t := range f.Types { vals = append(vals, t) pholders = append(pholders, "?") } if len(pholders) != 0 { conds = append(conds, "type IN ("+strings.Join(pholders, ", ")+")") } } rows, closeRows, err := r.query(qstr, conds, "name, type", vals...) if err != nil { return nil, err } defer closeRows() results := []metadata.Function{} for rows.Next() { rec := metadata.Function{} err = rows.Scan( &rec.SpecificName, &rec.Name, &rec.Type, ) if err != nil { return nil, err } results = append(results, rec) } if rows.Err() != nil { return nil, rows.Err() } return metadata.NewFunctionSet(results), nil } func (r MetadataReader) FunctionColumns(metadata.Filter) (*metadata.FunctionColumnSet, error) { return &metadata.FunctionColumnSet{}, nil } func (r MetadataReader) Indexes(f metadata.Filter) (*metadata.IndexSet, error) { qstr := `SELECT m.name, i.name, CASE WHEN i."unique" = 1 THEN 'YES' ELSE 'NO' END, CASE WHEN i.origin = 'pk' THEN 'YES' ELSE 'NO' END FROM sqlite_master m JOIN pragma_index_list(m.name) i` conds := []string{"m.type = 'table'"} vals := []interface{}{} if f.Parent != "" { vals = append(vals, f.Parent) conds = append(conds, "m.name LIKE ?") } if f.Name != "" { vals = append(vals, f.Name) conds = append(conds, "i.name LIKE ?") } rows, closeRows, err := r.query(qstr, conds, "m.name, i.seq", vals...) if err != nil { return nil, err } defer closeRows() results := []metadata.Index{} for rows.Next() { rec := metadata.Index{} err = rows.Scan(&rec.Table, &rec.Name, &rec.IsUnique, &rec.IsPrimary) if err != nil { return nil, err } results = append(results, rec) } if rows.Err() != nil { return nil, rows.Err() } return metadata.NewIndexSet(results), nil } func (r MetadataReader) IndexColumns(f metadata.Filter) (*metadata.IndexColumnSet, error) { qstr := `SELECT m.name, i.name, ic.name, ic.seqno FROM sqlite_master m JOIN pragma_index_list(m.name) i JOIN pragma_index_xinfo(i.name) ic` conds := []string{"m.type = 'table' AND ic.cid >= 0"} vals := []interface{}{} if f.Parent != "" { vals = append(vals, f.Parent) conds = append(conds, "m.name LIKE ?") } if f.Name != "" { vals = append(vals, f.Name) conds = append(conds, "i.name LIKE ?") } rows, closeRows, err := r.query(qstr, conds, "m.name, i.seq, ic.seqno", vals...) if err != nil { return nil, err } defer closeRows() results := []metadata.IndexColumn{} for rows.Next() { rec := metadata.IndexColumn{} err = rows.Scan(&rec.Table, &rec.IndexName, &rec.Name, &rec.OrdinalPosition) if err != nil { return nil, err } results = append(results, rec) } if rows.Err() != nil { return nil, rows.Err() } return metadata.NewIndexColumnSet(results), nil } func (r MetadataReader) query(qstr string, conds []string, order string, vals ...interface{}) (*sql.Rows, func(), error) { if len(conds) != 0 { qstr += "\nWHERE " + strings.Join(conds, " AND ") } if order != "" { qstr += "\nORDER BY " + order } if r.limit != 0 { qstr += fmt.Sprintf("\nLIMIT %d", r.limit) } return r.Query(qstr, vals...) } usql-0.19.19/drivers/sqlite3/sqshared/reader_test.go000066400000000000000000000204661476173253300224060ustar00rootroot00000000000000package sqshared import ( "bufio" "context" "database/sql" "fmt" "log" "os" "os/user" "path" "strings" "testing" "github.com/docker/docker/api/types" "github.com/docker/docker/api/types/container" "github.com/docker/docker/client" "github.com/docker/docker/pkg/archive" "github.com/xo/usql/drivers/metadata" ) var ( db *sql.DB reader *MetadataReader ) func TestMain(m *testing.M) { err := createDb("testdata", "sakila.db") if err != nil { log.Fatalf("Could not prepare the database: %s", err) } db, err = sql.Open("sqlite3", "testdata/sakila.db") if err != nil { log.Fatalf("Could not open the database: %s", err) } reader = &MetadataReader{LoggingReader: metadata.NewLoggingReader(db)} code := m.Run() os.Exit(code) } func createDb(location, name string) error { ctx := context.Background() cli, err := client.NewClientWithOpts(client.FromEnv, client.WithAPIVersionNegotiation()) if err != nil { return err } tar, err := archive.TarWithOptions("../metadata/testdata/docker", &archive.TarOptions{}) if err != nil { return err } baseImage := "centos:7" schemaURL := "https://raw.githubusercontent.com/jOOQ/sakila/main/sqlite-sakila-db/sqlite-sakila-schema.sql" target := "/schema" buildOptions := types.ImageBuildOptions{ Tags: []string{"usql-sqlite"}, BuildArgs: map[string]*string{ "BASE_IMAGE": &baseImage, "SCHEMA_URL": &schemaURL, "TARGET": &target, }, } res, err := cli.ImageBuild(ctx, tar, buildOptions) if err != nil { return err } defer res.Body.Close() scanner := bufio.NewScanner(res.Body) for scanner.Scan() { } cwd, err := os.Getwd() if err != nil { return err } u, err := user.Current() if err != nil { return err } resp, err := cli.ContainerCreate(ctx, &container.Config{ Image: "usql-sqlite", Cmd: []string{"bash", "-xc", "sqlite3 -batch -echo -init /schema/sqlite-sakila-schema.sql /data/" + name}, User: u.Uid + ":" + u.Gid, NetworkDisabled: true, }, &container.HostConfig{ Binds: []string{ path.Join(cwd, location) + ":/data", }, }, nil, nil, "") if err != nil { return err } err = cli.ContainerStart(ctx, resp.ID, types.ContainerStartOptions{}) if err != nil { return err } statusCh, errCh := cli.ContainerWait(ctx, resp.ID, container.WaitConditionNotRunning) select { case err := <-errCh: if err != nil { return err } case status := <-statusCh: fmt.Println(status.StatusCode, status.Error) } //out, err := cli.ContainerLogs(ctx, resp.ID, types.ContainerLogsOptions{ShowStdout: true, ShowStderr: true}) //if err != nil { // return err //} //_, err = stdcopy.StdCopy(os.Stdout, os.Stderr, out) //if err != nil { // return err //} return cli.ContainerRemove(ctx, resp.ID, types.ContainerRemoveOptions{}) } func TestSchemas(t *testing.T) { result, err := reader.Schemas(metadata.Filter{}) if err != nil { log.Fatalf("Could not read schemas: %v", err) } names := []string{} for result.Next() { names = append(names, result.Get().Schema) } actual := strings.Join(names, ", ") expected := "main" if actual != expected { t.Errorf("Wrong schema names, expected:\n %v\ngot:\n %v", expected, names) } } func TestTables(t *testing.T) { result, err := reader.Tables(metadata.Filter{Types: []string{"BASE TABLE", "TABLE", "VIEW"}}) if err != nil { log.Fatalf("Could not read tables: %v", err) } names := []string{} for result.Next() { names = append(names, result.Get().Name) } actual := strings.Join(names, ", ") expected := "actor, address, category, city, country, customer, film, film_actor, film_category, film_text, inventory, language, payment, rental, staff, store, customer_list, film_list, sales_by_film_category, sales_by_store, staff_list" if actual != expected { t.Errorf("Wrong table names, expected:\n %v\ngot:\n %v", expected, names) } } func TestColumns(t *testing.T) { result, err := reader.Columns(metadata.Filter{Parent: "film%"}) if err != nil { log.Fatalf("Could not read columns: %v", err) } names := []string{} for result.Next() { names = append(names, result.Get().Name) } actual := strings.Join(names, ", ") expected := "description, film_id, language_id, last_update, length, original_language_id, rating, release_year, rental_duration, rental_rate, replacement_cost, special_features, title, actor_id, film_id, last_update, category_id, film_id, last_update, description, film_id, title, FID, actors, category, description, length, price, rating, title" if actual != expected { t.Errorf("Wrong column names, expected:\n %v, got:\n %v", expected, names) } } func TestFunctions(t *testing.T) { result, err := reader.Functions(metadata.Filter{}) if err != nil { log.Fatalf("Could not read functions: %v", err) } names := []string{} for result.Next() { names = append(names, result.Get().Name) } actual := strings.Join(names, ", ") expected := "abs, auth_enabled, auth_user_add, auth_user_change, auth_user_delete, authenticate, avg, changes, char, coalesce, count, count, cume_dist, current_date, current_time, current_timestamp, date, datetime, dense_rank, first_value, fts3_tokenizer, fts3_tokenizer, glob, group_concat, group_concat, hex, ifnull, instr, julianday, lag, lag, lag, last_insert_rowid, last_value, lead, lead, lead, length, like, like, likelihood, likely, load_extension, load_extension, lower, ltrim, ltrim, match, matchinfo, matchinfo, max, max, min, min, nth_value, ntile, nullif, offsets, optimize, percent_rank, printf, quote, random, randomblob, rank, replace, round, round, row_number, rtreecheck, rtreedepth, rtreenode, rtrim, rtrim, snippet, sqlite_compileoption_get, sqlite_compileoption_used, sqlite_log, sqlite_source_id, sqlite_version, strftime, substr, substr, sum, time, total, total_changes, trim, trim, typeof, unicode, unlikely, upper, zeroblob" if actual != expected { t.Errorf("Wrong function names, expected:\n %v\ngot:\n %v", expected, names) } } func TestIndexes(t *testing.T) { result, err := reader.Indexes(metadata.Filter{}) if err != nil { log.Fatalf("Could not read indexes: %v", err) } names := []string{} for result.Next() { names = append(names, result.Get().Table+"."+result.Get().Name) } actual := strings.Join(names, ", ") expected := "actor.idx_actor_last_name, actor.sqlite_autoindex_actor_1, address.idx_fk_city_id, address.sqlite_autoindex_address_1, category.sqlite_autoindex_category_1, city.idx_fk_country_id, city.sqlite_autoindex_city_1, country.sqlite_autoindex_country_1, customer.idx_customer_last_name, customer.idx_customer_fk_address_id, customer.idx_customer_fk_store_id, customer.sqlite_autoindex_customer_1, film.idx_fk_original_language_id, film.idx_fk_language_id, film.sqlite_autoindex_film_1, film_actor.idx_fk_film_actor_actor, film_actor.idx_fk_film_actor_film, film_actor.sqlite_autoindex_film_actor_1, film_category.idx_fk_film_category_category, film_category.idx_fk_film_category_film, film_category.sqlite_autoindex_film_category_1, film_text.sqlite_autoindex_film_text_1, inventory.idx_fk_film_id_store_id, inventory.idx_fk_film_id, inventory.sqlite_autoindex_inventory_1, language.sqlite_autoindex_language_1, payment.idx_fk_customer_id, payment.idx_fk_staff_id, payment.sqlite_autoindex_payment_1, rental.idx_rental_uq, rental.idx_rental_fk_staff_id, rental.idx_rental_fk_customer_id, rental.idx_rental_fk_inventory_id, rental.sqlite_autoindex_rental_1, staff.idx_fk_staff_address_id, staff.idx_fk_staff_store_id, staff.sqlite_autoindex_staff_1, store.idx_fk_store_address, store.idx_store_fk_manager_staff_id, store.sqlite_autoindex_store_1" if actual != expected { t.Errorf("Wrong index names, expected:\n %v\ngot:\n %v", expected, names) } } func TestIndexColumns(t *testing.T) { result, err := reader.IndexColumns(metadata.Filter{Name: "idx%"}) if err != nil { log.Fatalf("Could not read index columns: %v", err) } names := []string{} for result.Next() { names = append(names, result.Get().Name) } actual := strings.Join(names, ", ") expected := "last_name, city_id, country_id, last_name, address_id, store_id, original_language_id, language_id, actor_id, film_id, category_id, film_id, store_id, film_id, film_id, customer_id, staff_id, rental_date, inventory_id, customer_id, staff_id, customer_id, inventory_id, address_id, store_id, address_id, manager_staff_id" if actual != expected { t.Errorf("Wrong index column names, expected:\n %v, got:\n %v", expected, names) } } usql-0.19.19/drivers/sqlite3/sqshared/sqshared.go000066400000000000000000000042661476173253300217170ustar00rootroot00000000000000// Package sqshared contains shared types for the sqlite3 and moderncsqlite // drivers. package sqshared import ( "database/sql/driver" "errors" "fmt" "strings" "time" ) // ConvertBytes is the byte formatter func for sqlite3 databases. func ConvertBytes(buf []byte, tfmt string) (string, error) { // attempt to convert buf if it matches a time format, and if it // does, then return a formatted time string. s := string(buf) if s != "" && strings.TrimSpace(s) != "" { t := new(Time) if err := t.Scan(buf); err == nil { return time.Time(*t).Format(tfmt), nil } } return s, nil } // Time provides a type that will correctly scan the various timestamps // values stored by the github.com/mattn/go-sqlite3 driver for time.Time // values, as well as correctly satisfying the sql/driver/Valuer interface. type Time time.Time // Value satisfies the Valuer interface. func (t *Time) Value() (driver.Value, error) { return t, nil } // Scan satisfies the Scanner interface. func (t *Time) Scan(v interface{}) error { switch x := v.(type) { case time.Time: *t = Time(x) return nil case []byte: return t.Parse(string(x)) case string: return t.Parse(x) } return fmt.Errorf("cannot convert type %T to Time", v) } // Parse attempts to Parse string s to t. func (t *Time) Parse(s string) error { if s == "" { return nil } for _, f := range SQLiteTimestampFormats { if z, err := time.Parse(f, s); err == nil { *t = Time(z) return nil } } return errors.New("could not parse time") } // SQLiteTimestampFormats is timestamp formats understood by both this module // and SQLite. The first format in the slice will be used when saving time // values into the database. When parsing a string from a timestamp or datetime // column, the formats are tried in order. var SQLiteTimestampFormats = []string{ // By default, store timestamps with whatever timezone they come with. // When parsed, they will be returned with the same timezone. "2006-01-02 15:04:05.999999999-07:00", "2006-01-02T15:04:05.999999999-07:00", "2006-01-02 15:04:05.999999999", "2006-01-02T15:04:05.999999999", "2006-01-02 15:04:05", "2006-01-02T15:04:05", "2006-01-02 15:04", "2006-01-02T15:04", "2006-01-02", } usql-0.19.19/drivers/sqlserver/000077500000000000000000000000001476173253300163765ustar00rootroot00000000000000usql-0.19.19/drivers/sqlserver/reader.go000066400000000000000000000146351476173253300202000ustar00rootroot00000000000000package sqlserver import ( "database/sql" "fmt" "strings" "github.com/xo/usql/drivers" "github.com/xo/usql/drivers/metadata" infos "github.com/xo/usql/drivers/metadata/informationschema" ) type metaReader struct { metadata.LoggingReader limit int } var _ metadata.CatalogReader = &metaReader{} var _ metadata.IndexReader = &metaReader{} var _ metadata.IndexColumnReader = &metaReader{} func NewReader(db drivers.DB, opts ...metadata.ReaderOption) metadata.Reader { ir := infos.New( infos.WithPlaceholder(placeholder), infos.WithIndexes(false), infos.WithSequences(false), infos.WithConstraints(false), infos.WithCustomClauses(map[infos.ClauseName]string{ infos.FunctionsSecurityType: "''", }), infos.WithSystemSchemas([]string{ "db_accessadmin", "db_backupoperator", "db_datareader", "db_datawriter", "db_ddladmin", "db_denydatareader", "db_denydatawriter", "db_owner", "db_securityadmin", "INFORMATION_SCHEMA", "sys", }), infos.WithCurrentSchema("schema_name()"), infos.WithDataTypeFormatter(dataTypeFormatter), infos.WithUsagePrivileges(false), )(db, opts...) mr := &metaReader{ LoggingReader: metadata.NewLoggingReader(db, opts...), } return metadata.NewPluginReader(ir, mr) } func dataTypeFormatter(col metadata.Column) string { switch col.DataType { case "numeric", "decimal": if col.ColumnSize == 18 && col.DecimalDigits == 0 { return col.DataType } else { return fmt.Sprintf("%s(%d,%d)", col.DataType, col.ColumnSize, col.DecimalDigits) } case "datetimeoffset", "datetime2", "time": if col.ColumnSize == 7 { return col.DataType } else { return fmt.Sprintf("%s(%d)", col.DataType, col.ColumnSize) } case "char", "nchar", "binary": if col.ColumnSize == 1 { return col.DataType } else { return fmt.Sprintf("%s(%d)", col.DataType, col.ColumnSize) } case "varchar", "nvarchar", "varbinary": if col.ColumnSize == -1 { return col.DataType + "(max)" } else if col.ColumnSize == 1 { return col.DataType } else { return fmt.Sprintf("%s(%d)", col.DataType, col.ColumnSize) } default: return col.DataType } } func (r *metaReader) SetLimit(l int) { r.limit = l } func (r metaReader) Catalogs(metadata.Filter) (*metadata.CatalogSet, error) { qstr := `SELECT name FROM sys.databases` rows, closeRows, err := r.query(qstr, []string{}, "name") if err != nil { return nil, err } defer closeRows() results := []metadata.Catalog{} for rows.Next() { rec := metadata.Catalog{} err = rows.Scan(&rec.Catalog) if err != nil { return nil, err } results = append(results, rec) } if rows.Err() != nil { return nil, rows.Err() } return metadata.NewCatalogSet(results), nil } func (r metaReader) Indexes(f metadata.Filter) (*metadata.IndexSet, error) { qstr := ` SELECT db_name(), s.name, t.name, COALESCE(i.name, ''), CASE WHEN i.is_primary_key = 1 THEN 'YES' ELSE 'NO' END, CASE WHEN i.is_unique = 1 THEN 'YES' ELSE 'NO' END, i.type_desc FROM sys.schemas s JOIN sys.tables t on t.schema_id = s.schema_id JOIN sys.indexes i ON i.object_id = t.object_id ` conds := []string{} vals := []interface{}{} if f.OnlyVisible { conds = append(conds, "s.name = schema_name()") } if !f.WithSystem { conds = append(conds, "s.name NOT IN ('db_accessadmin', 'db_backupoperator', 'db_datareader', 'db_datawriter', 'db_ddladmin', 'db_denydatareader', 'db_denydatawriter', 'db_owner', 'db_securityadmin', 'INFORMATION_SCHEMA', 'sys')") } if f.Schema != "" { vals = append(vals, f.Schema) conds = append(conds, fmt.Sprintf("s.name LIKE @p%d", len(vals))) } if f.Parent != "" { vals = append(vals, f.Parent) conds = append(conds, fmt.Sprintf("t.name LIKE @p%d", len(vals))) } if f.Name != "" { vals = append(vals, f.Name) conds = append(conds, fmt.Sprintf("i.name LIKE @p%d", len(vals))) } rows, closeRows, err := r.query(qstr, conds, "s.name, t.name, i.name", vals...) if err != nil { return nil, err } defer closeRows() results := []metadata.Index{} for rows.Next() { rec := metadata.Index{} err = rows.Scan(&rec.Catalog, &rec.Schema, &rec.Table, &rec.Name, &rec.IsUnique, &rec.IsPrimary, &rec.Type) if err != nil { return nil, err } results = append(results, rec) } if rows.Err() != nil { return nil, rows.Err() } return metadata.NewIndexSet(results), nil } func (r metaReader) IndexColumns(f metadata.Filter) (*metadata.IndexColumnSet, error) { qstr := ` SELECT db_name(), s.name, t.name, COALESCE(i.name, ''), c.name, t.name, ic.key_ordinal FROM sys.schemas s JOIN sys.tables t on t.schema_id = s.schema_id JOIN sys.indexes i ON i.object_id = t.object_id JOIN sys.index_columns ic ON i.object_id = ic.object_id and i.index_id = ic.index_id JOIN sys.columns c ON ic.object_id = c.object_id and ic.column_id = c.column_id JOIN sys.types ty ON ty.user_type_id = c.user_type_id ` conds := []string{} vals := []interface{}{} if f.OnlyVisible { conds = append(conds, "s.name = schema_name()") } if !f.WithSystem { conds = append(conds, "s.name NOT IN ('db_accessadmin', 'db_backupoperator', 'db_datareader', 'db_datawriter', 'db_ddladmin', 'db_denydatareader', 'db_denydatawriter', 'db_owner', 'db_securityadmin', 'INFORMATION_SCHEMA', 'sys')") } if f.Schema != "" { vals = append(vals, f.Schema) conds = append(conds, fmt.Sprintf("s.name LIKE @p%d", len(vals))) } if f.Parent != "" { vals = append(vals, f.Parent) conds = append(conds, fmt.Sprintf("t.name LIKE @p%d", len(vals))) } if f.Name != "" { vals = append(vals, f.Name) conds = append(conds, fmt.Sprintf("i.name LIKE @p%d", len(vals))) } rows, closeRows, err := r.query(qstr, conds, "s.name, t.name, i.name, ic.index_column_id", vals...) if err != nil { return nil, err } defer closeRows() results := []metadata.IndexColumn{} for rows.Next() { rec := metadata.IndexColumn{} err = rows.Scan(&rec.Catalog, &rec.Schema, &rec.Table, &rec.IndexName, &rec.Name, &rec.DataType, &rec.OrdinalPosition) if err != nil { return nil, err } results = append(results, rec) } if rows.Err() != nil { return nil, rows.Err() } return metadata.NewIndexColumnSet(results), nil } func (r metaReader) query(qstr string, conds []string, order string, vals ...interface{}) (*sql.Rows, func(), error) { if len(conds) != 0 { qstr += "\nWHERE " + strings.Join(conds, " AND ") } if order != "" { qstr += "\nORDER BY " + order } if r.limit != 0 { qstr += fmt.Sprintf("\nFETCH FIRST %d ROWS ONLY", r.limit) } return r.Query(qstr, vals...) } usql-0.19.19/drivers/sqlserver/sqlserver.go000066400000000000000000000070041476173253300207540ustar00rootroot00000000000000// Package sqlserver defines and registers usql's Microsoft SQL Server driver. // // See: https://github.com/microsoft/go-mssqldb // Group: base package sqlserver import ( "context" "database/sql" "fmt" "io" "strconv" "strings" mssql "github.com/microsoft/go-mssqldb" sqlserver "github.com/microsoft/go-mssqldb" // DRIVER "github.com/xo/usql/drivers" "github.com/xo/usql/drivers/metadata" // needed for azuresql authentication, named pipes, and shared memory transport protocols _ "github.com/microsoft/go-mssqldb/azuread" _ "github.com/microsoft/go-mssqldb/namedpipe" _ "github.com/microsoft/go-mssqldb/sharedmemory" ) func init() { drivers.Register("sqlserver", drivers.Driver{ AllowMultilineComments: true, RequirePreviousPassword: true, LexerName: "tsql", /* // NOTE: this has been commented out, as it is not necessary. if // NOTE: the azuread.DriverName is changed from `azuresql`, then // NOTE: this func will be necessary as dburl will never import non // NOTE: stdlib package. as is, dburl.Open will handle the call // NOTE: to sql.Open and will pass the `azuresql` driver name Open: func(_ context.Context, u *dburl.URL, _, _ func() io.Writer) (func(string, string) (*sql.DB, error), error) { return func(_ string, params string) (*sql.DB, error) { driver := "sqlserver" switch { case u.Query().Has("fedauth"), strings.Contains(strings.ToLower(u.OriginalScheme), "azuresql"): driver = azuread.DriverName } return sql.Open(driver, params) }, nil }, */ Version: func(ctx context.Context, db drivers.DB) (string, error) { var ver, level, edition string err := db.QueryRowContext( ctx, `SELECT SERVERPROPERTY('productversion'), SERVERPROPERTY ('productlevel'), SERVERPROPERTY ('edition')`, ).Scan(&ver, &level, &edition) if err != nil { return "", err } return "Microsoft SQL Server " + ver + ", " + level + ", " + edition, nil }, ChangePassword: func(db drivers.DB, user, newpw, oldpw string) error { _, err := db.Exec(`ALTER LOGIN ` + user + ` WITH password = '` + newpw + `' old_password = '` + oldpw + `'`) return err }, ColumnTypes: func(col *sql.ColumnType) (interface{}, error) { switch col.DatabaseTypeName() { case "UNIQUEIDENTIFIER": if nullable, ok := col.Nullable(); ok && nullable { return new(NullUniqueIdentifier), nil } return new(mssql.UniqueIdentifier), nil } return new(interface{}), nil }, Err: func(err error) (string, string) { if e, ok := err.(sqlserver.Error); ok { return strconv.Itoa(int(e.Number)), e.Message } msg := err.Error() if i := strings.LastIndex(msg, "sqlserver:"); i != -1 { msg = msg[i:] } return "", msg }, IsPasswordErr: func(err error) bool { return strings.Contains(err.Error(), "Login failed for") }, NewMetadataReader: NewReader, NewMetadataWriter: func(db drivers.DB, w io.Writer, opts ...metadata.ReaderOption) metadata.Writer { return metadata.NewDefaultWriter(NewReader(db, opts...))(db, w) }, Copy: drivers.CopyWithInsert(placeholder), }) } func placeholder(n int) string { return fmt.Sprintf("@p%d", n) } type NullUniqueIdentifier struct { ID mssql.UniqueIdentifier Valid bool } func (nui *NullUniqueIdentifier) Scan(v interface{}) error { nui.Valid = false if v == nil { return nil } if err := nui.ID.Scan(v); err != nil { return err } nui.Valid = true return nil } func (nui NullUniqueIdentifier) String() string { if nui.Valid { return nui.ID.String() } return "" } usql-0.19.19/drivers/sqlserver/sqlserver_test.go000066400000000000000000000162241476173253300220170ustar00rootroot00000000000000package sqlserver_test import ( "database/sql" "flag" "fmt" "log" "net/url" "os" "strings" "testing" dt "github.com/ory/dockertest/v3" dc "github.com/ory/dockertest/v3/docker" "github.com/xo/usql/drivers/metadata" "github.com/xo/usql/drivers/sqlserver" ) type Database struct { BuildArgs []dc.BuildArg RunOptions *dt.RunOptions Exec []string Driver string URL string ReadinessURL string DockerPort string Resource *dt.Resource DB *sql.DB Opts []metadata.ReaderOption Reader metadata.BasicReader } var dbName string = "sakila" const pw = "yourStrong123_Password" var db = Database{ BuildArgs: []dc.BuildArg{ {Name: "BASE_IMAGE", Value: "mcr.microsoft.com/mssql/server:2019-latest"}, {Name: "SCHEMA_URL", Value: "https://raw.githubusercontent.com/jOOQ/sakila/main/sql-server-sakila-db/sql-server-sakila-schema.sql"}, {Name: "TARGET", Value: "/schema"}, {Name: "USER", Value: "mssql:0"}, }, RunOptions: &dt.RunOptions{ Name: "usql-sqlserver", Env: []string{"ACCEPT_EULA=Y", "SA_PASSWORD=" + pw}, }, Exec: []string{"/opt/mssql-tools/bin/sqlcmd", "-S", "localhost", "-U", "sa", "-P", pw, "-d", "master", "-i", "/schema/sql-server-sakila-schema.sql"}, Driver: "sqlserver", URL: "sqlserver://sa:" + url.QueryEscape(pw) + "@127.0.0.1:%s?database=" + dbName, ReadinessURL: "sqlserver://sa:" + url.QueryEscape(pw) + "@127.0.0.1:%s", DockerPort: "1433/tcp", } func TestMain(m *testing.M) { cleanup := true flag.BoolVar(&cleanup, "cleanup", true, "delete containers when finished") flag.Parse() pool, err := dt.NewPool("") if err != nil { log.Fatalf("Could not connect to docker: %s", err) } var ok bool db.Resource, ok = pool.ContainerByName(db.RunOptions.Name) if !ok { buildOpts := &dt.BuildOptions{ ContextDir: "../testdata/docker", BuildArgs: db.BuildArgs, } db.Resource, err = pool.BuildAndRunWithBuildOptions(buildOpts, db.RunOptions) if err != nil { log.Fatal("Could not start resource: ", err) } } url := db.URL if db.ReadinessURL != "" { url = db.ReadinessURL } port := db.Resource.GetPort(db.DockerPort) if db.DB, err = waitForDbConnection(db.Driver, pool, url, port); err != nil { log.Fatal("Timed out waiting for db: ", err) } if len(db.Exec) != 0 { exitCode, err := db.Resource.Exec(db.Exec, dt.ExecOptions{ StdIn: os.Stdin, StdOut: os.Stdout, StdErr: os.Stderr, TTY: true, }) if err != nil || exitCode != 0 { log.Fatal("Could not load schema: ", err) } } // Reconnect with actual URL if a separate URL for readiness checking was used if db.ReadinessURL != "" { if db.DB, err = waitForDbConnection(db.Driver, pool, db.URL, port); err != nil { log.Fatal("Timed out waiting for db: ", err) } } code := m.Run() // You can't defer this because os.Exit doesn't care for defer if cleanup { if err := pool.Purge(db.Resource); err != nil { log.Fatal("Could not purge resource: ", err) } } os.Exit(code) } func waitForDbConnection(driver string, pool *dt.Pool, url string, port string) (*sql.DB, error) { // exponential backoff-retry, because the application in the container might not be ready to accept connections yet var db *sql.DB if err := pool.Retry(func() error { var err error db, err = sql.Open(driver, fmt.Sprintf(url, port)) if err != nil { return err } return db.Ping() }); err != nil { return nil, err } return db, nil } func TestColumns(t *testing.T) { // Only testing sqlserver specific datatype formatting. // The rest of the functionality is covered by informationschema/metadata_test.go:TestColumns type test struct { typeDef string want string } schema := "dbo" table := "test_dtypes" tests := []test{ {typeDef: "bigint", want: "bigint"}, {typeDef: "numeric", want: "numeric"}, {typeDef: "numeric(4,2)", want: "numeric(4,2)"}, {typeDef: "numeric(18,0)", want: "numeric"}, {typeDef: "decimal", want: "decimal"}, {typeDef: "decimal(4,2)", want: "decimal(4,2)"}, {typeDef: "decimal(18,0)", want: "decimal"}, {typeDef: "bit", want: "bit"}, {typeDef: "smallint", want: "smallint"}, {typeDef: "smallmoney", want: "smallmoney"}, {typeDef: "int", want: "int"}, {typeDef: "tinyint", want: "tinyint"}, {typeDef: "money", want: "money"}, {typeDef: "float", want: "float"}, {typeDef: "float(11)", want: "real"}, {typeDef: "float(30)", want: "float"}, {typeDef: "real", want: "real"}, {typeDef: "date", want: "date"}, {typeDef: "datetimeoffset", want: "datetimeoffset"}, {typeDef: "datetimeoffset(5)", want: "datetimeoffset(5)"}, {typeDef: "datetimeoffset(7)", want: "datetimeoffset"}, {typeDef: "datetime2", want: "datetime2"}, {typeDef: "datetime2(5)", want: "datetime2(5)"}, {typeDef: "datetime2(7)", want: "datetime2"}, {typeDef: "smalldatetime", want: "smalldatetime"}, {typeDef: "datetime", want: "datetime"}, {typeDef: "time", want: "time"}, {typeDef: "time(5)", want: "time(5)"}, {typeDef: "time(7)", want: "time"}, {typeDef: "char", want: "char"}, {typeDef: "char(3)", want: "char(3)"}, {typeDef: "char(1)", want: "char"}, {typeDef: "varchar", want: "varchar"}, {typeDef: "varchar(12)", want: "varchar(12)"}, {typeDef: "varchar(1)", want: "varchar"}, {typeDef: "varchar(max)", want: "varchar(max)"}, {typeDef: "text", want: "text"}, {typeDef: "nchar", want: "nchar"}, {typeDef: "nchar(2)", want: "nchar(2)"}, {typeDef: "nchar(1)", want: "nchar"}, {typeDef: "nvarchar", want: "nvarchar"}, {typeDef: "nvarchar(12)", want: "nvarchar(12)"}, {typeDef: "nvarchar(1)", want: "nvarchar"}, {typeDef: "nvarchar(max)", want: "nvarchar(max)"}, {typeDef: "ntext", want: "ntext"}, {typeDef: "binary", want: "binary"}, {typeDef: "binary(12)", want: "binary(12)"}, {typeDef: "binary(1)", want: "binary"}, {typeDef: "varbinary", want: "varbinary"}, {typeDef: "varbinary(12)", want: "varbinary(12)"}, {typeDef: "varbinary(1)", want: "varbinary"}, {typeDef: "varbinary(max)", want: "varbinary(max)"}, {typeDef: "image", want: "image"}, {typeDef: "rowversion", want: "timestamp"}, {typeDef: "hierarchyid", want: "hierarchyid"}, {typeDef: "uniqueidentifier", want: "uniqueidentifier"}, {typeDef: "sql_variant", want: "sql_variant"}, {typeDef: "xml", want: "xml"}, {typeDef: "geometry", want: "geometry"}, {typeDef: "geography", want: "geography"}, } // Create table colExpressions := []string{} for i, test := range tests { colExpressions = append(colExpressions, fmt.Sprintf("column_%d %s", i, test.typeDef)) } query := fmt.Sprintf("CREATE TABLE %s.%s (%s)", schema, table, strings.Join(colExpressions, ", ")) db.DB.Exec(query) defer db.DB.Exec(fmt.Sprintf("DROP TABLE %s.%s", schema, table)) // Read data types r := sqlserver.NewReader(db.DB).(metadata.ColumnReader) result, err := r.Columns(metadata.Filter{Schema: schema, Parent: table}) if err != nil { log.Fatalf("Could not read %s columns: %v", dbName, err) } actualTypes := []string{} for result.Next() { actualTypes = append(actualTypes, result.Get().DataType) } // Compare for i, test := range tests { if actualTypes[i] != test.want { t.Errorf("Wrong %s column data type, expected:\n %s, got:\n %s", dbName, test.want, actualTypes[i]) } } } usql-0.19.19/drivers/testdata/000077500000000000000000000000001476173253300161615ustar00rootroot00000000000000usql-0.19.19/drivers/testdata/.gitignore000066400000000000000000000000151476173253300201450ustar00rootroot00000000000000*.actual.txt usql-0.19.19/drivers/testdata/csvq/000077500000000000000000000000001476173253300171355ustar00rootroot00000000000000usql-0.19.19/drivers/testdata/csvq/.gitignore000066400000000000000000000000071476173253300211220ustar00rootroot00000000000000*_copy usql-0.19.19/drivers/testdata/csvq/staff.csv000066400000000000000000000002331476173253300207530ustar00rootroot00000000000000first_name,last_name,address_id,email,store_id,active,username,password,last_update John,Doe,1,john@invalid.com,1,true,jdoe,abc,2024-05-10T08:12:05.46875Z usql-0.19.19/drivers/testdata/docker/000077500000000000000000000000001476173253300174305ustar00rootroot00000000000000usql-0.19.19/drivers/testdata/docker/Dockerfile000066400000000000000000000003461476173253300214250ustar00rootroot00000000000000ARG BASE_IMAGE FROM $BASE_IMAGE ARG SCHEMA_URL ARG TARGET ARG USER ADD --chown=$USER $SCHEMA_URL $TARGET/ RUN [ ! -d "$TARGET" ] || chmod -R 777 $TARGET/ || echo "failed to change perms of $TARGET, leaving as $(ls -la $TARGET/)" usql-0.19.19/drivers/testdata/gen-golden.sh000077500000000000000000000037401476173253300205430ustar00rootroot00000000000000#!/usr/bin/env bash pgsql_in_docker=false pgsql_container=usql-pgsql if [ "$pgsql_in_docker" != true ]; then PGHOST="${PGHOST:-127.0.0.1}" port=$(docker port "$pgsql_container" 5432/tcp) PGPORT=${port##*:} else PGHOST="${PGHOST:-$pgsql_container}" PGPORT=5432 fi PGUSER="${PGUSER:-postgres}" PGPASSWORD="${PGPASSWORD:-pw}" export PGHOST PGPORT PGUSER PGPASSWORD declare -A queries queries=( [descTable]="\d+ film*" [listTables]="\dtvmsE+ film*" [listFuncs]="\df+" [listIndexes]="\di+" [listSchemas]="\dn+" [listDbs]="\l+" ) for q in "${!queries[@]}"; do query="${queries[$q]}" cmd=(psql --no-psqlrc --command "$query") if [ "$pgsql_in_docker" == true ]; then docker run -it --rm -e PGHOST -e PGPORT -e PGUSER -e PGPASSWORD --link "$pgsql_container" postgres:13 "${cmd[@]}" >"pgsql.$q.golden.txt" else "${cmd[@]}" -o "pgsql.$q.golden.txt" fi done mysql_in_docker=true mysql_container=usql-mysql if [ "$mysql_in_docker" != true ]; then MYHOST="${MYHOST:-127.0.0.1}" port=$(docker port "$mysql_container" 3306/tcp) MYPORT=${port##*:} else MYHOST="${MYHOST:-$mysql_container}" MYPORT=3306 fi MYUSER="${MYUSER:-root}" MYPASSWORD="${MYPASSWORD:-pw}" declare -A queries queries=( [descTable]="DESC film; SHOW INDEX FROM film; DESC film_actor; SHOW INDEX FROM film_actor; DESC film_category; SHOW INDEX FROM film_category; DESC film_list; SHOW INDEX FROM film_list; DESC film_text; SHOW INDEX FROM film_text;" [listTables]="SHOW TABLES LIKE 'film%'" [listSchemas]="SHOW DATABASES" ) for q in "${!queries[@]}"; do query="${queries[$q]}" cmd=(mysql -h "$MYHOST" -P "$MYPORT" -u "$MYUSER" --password="$MYPASSWORD" --no-auto-rehash --database sakila --execute "$query") if [ "$mysql_in_docker" == true ]; then docker run -it --rm --link "$mysql_container" mysql:8 "${cmd[@]}" 2>/dev/null >"mysql.$q.golden.txt" else "${cmd[@]}" 2>/dev/null >"mysql.$q.golden.txt" fi done usql-0.19.19/drivers/testdata/mysql.descTable.expected.txt000066400000000000000000000174341476173253300235650ustar00rootroot00000000000000 BASE TABLE "sakila.film" Name | Type | Nullable | Default | Size | Decimal Digits | Radix | Octet Length ----------------------+---------------------------------------------------------------------+----------+-------------------+-------+----------------+-------+-------------- film_id | int unsigned | "NO" | | 10 | 0 | 10 | 0 title | varchar(255) | "NO" | | 255 | 0 | 10 | 765 description | text | "YES" | | 65535 | 0 | 10 | 65535 release_year | year | "YES" | | 0 | 0 | 10 | 0 language_id | int unsigned | "NO" | | 10 | 0 | 10 | 0 original_language_id | int unsigned | "YES" | | 10 | 0 | 10 | 0 rental_duration | tinyint unsigned | "NO" | 3 | 3 | 0 | 10 | 0 rental_rate | decimal(4,2) | "NO" | 4.99 | 4 | 2 | 10 | 0 length | smallint unsigned | "YES" | | 5 | 0 | 10 | 0 replacement_cost | decimal(5,2) | "NO" | 19.99 | 5 | 2 | 10 | 0 rating | enum('G','PG','PG-13','R','NC-17') | "YES" | G | 5 | 0 | 10 | 15 special_features | set('Trailers','Commentaries','Deleted Scenes','Behind the Scenes') | "YES" | | 54 | 0 | 10 | 162 last_update | timestamp | "NO" | CURRENT_TIMESTAMP | 0 | 0 | 10 | 0 Indexes: "idx_fk_language_id" BTREE (language_id) "idx_fk_original_language_id" BTREE (original_language_id) "idx_title" BTREE (title) "PRIMARY" PRIMARY_KEY, UNIQUE, BTREE (film_id) Foreign-key constraints: "fk_film_language" FOREIGN KEY (language_id) REFERENCES film(film_id) ON UPDATE CASCADE ON DELETE RESTRICT "fk_film_language_original" FOREIGN KEY (original_language_id) REFERENCES film(film_id) ON UPDATE CASCADE ON DELETE RESTRICT Referenced by: TABLE "film" CONSTRAINT "fk_film_language" FOREIGN KEY (language_id) REFERENCES film(film_id) ON UPDATE CASCADE ON DELETE RESTRICT TABLE "film" CONSTRAINT "fk_film_language_original" FOREIGN KEY (original_language_id) REFERENCES film(film_id) ON UPDATE CASCADE ON DELETE RESTRICT BASE TABLE "sakila.film_actor" Name | Type | Nullable | Default | Size | Decimal Digits | Radix | Octet Length -------------+--------------+----------+-------------------+------+----------------+-------+-------------- actor_id | int unsigned | "NO" | | 10 | 0 | 10 | 0 film_id | int unsigned | "NO" | | 10 | 0 | 10 | 0 last_update | timestamp | "NO" | CURRENT_TIMESTAMP | 0 | 0 | 10 | 0 Indexes: "idx_fk_film_id" BTREE (film_id) "PRIMARY" PRIMARY_KEY, UNIQUE, BTREE (actor_id, film_id) Foreign-key constraints: "fk_film_actor_actor" FOREIGN KEY (actor_id) REFERENCES film_actor(actor_id) ON UPDATE CASCADE ON DELETE RESTRICT "fk_film_actor_film" FOREIGN KEY (film_id) REFERENCES film_actor(actor_id) ON UPDATE CASCADE ON DELETE RESTRICT Referenced by: TABLE "film_actor" CONSTRAINT "fk_film_actor_actor" FOREIGN KEY (actor_id) REFERENCES film_actor(actor_id) ON UPDATE CASCADE ON DELETE RESTRICT TABLE "film_actor" CONSTRAINT "fk_film_actor_film" FOREIGN KEY (film_id) REFERENCES film_actor(actor_id) ON UPDATE CASCADE ON DELETE RESTRICT BASE TABLE "sakila.film_category" Name | Type | Nullable | Default | Size | Decimal Digits | Radix | Octet Length -------------+--------------+----------+-------------------+------+----------------+-------+-------------- film_id | int unsigned | "NO" | | 10 | 0 | 10 | 0 category_id | int unsigned | "NO" | | 10 | 0 | 10 | 0 last_update | timestamp | "NO" | CURRENT_TIMESTAMP | 0 | 0 | 10 | 0 Indexes: "fk_film_category_category" BTREE (category_id) "PRIMARY" PRIMARY_KEY, UNIQUE, BTREE (film_id, category_id) Foreign-key constraints: "fk_film_category_category" FOREIGN KEY (category_id) REFERENCES film_category(film_id) ON UPDATE CASCADE ON DELETE RESTRICT "fk_film_category_film" FOREIGN KEY (film_id) REFERENCES film_category(film_id) ON UPDATE CASCADE ON DELETE RESTRICT Referenced by: TABLE "film_category" CONSTRAINT "fk_film_category_category" FOREIGN KEY (category_id) REFERENCES film_category(film_id) ON UPDATE CASCADE ON DELETE RESTRICT TABLE "film_category" CONSTRAINT "fk_film_category_film" FOREIGN KEY (film_id) REFERENCES film_category(film_id) ON UPDATE CASCADE ON DELETE RESTRICT BASE TABLE "sakila.film_text" Name | Type | Nullable | Default | Size | Decimal Digits | Radix | Octet Length -------------+--------------+----------+---------+-------+----------------+-------+-------------- film_id | int | "NO" | | 10 | 0 | 10 | 0 title | varchar(255) | "NO" | | 255 | 0 | 10 | 765 description | text | "YES" | | 65535 | 0 | 10 | 65535 Indexes: "idx_title_description" FULLTEXT (title, description) "PRIMARY" PRIMARY_KEY, UNIQUE, BTREE (film_id) VIEW "sakila.film_list" Name | Type | Nullable | Default | Size | Decimal Digits | Radix | Octet Length -------------+------------------------------------+----------+---------+-------+----------------+-------+-------------- FID | int unsigned | "YES" | 0 | 10 | 0 | 10 | 0 title | varchar(255) | "YES" | | 255 | 0 | 10 | 765 description | text | "YES" | | 65535 | 0 | 10 | 65535 category | varchar(25) | "NO" | | 25 | 0 | 10 | 75 price | decimal(4,2) | "YES" | 4.99 | 4 | 2 | 10 | 0 length | smallint unsigned | "YES" | | 5 | 0 | 10 | 0 rating | enum('G','PG','PG-13','R','NC-17') | "YES" | G | 5 | 0 | 10 | 15 actors | text | "YES" | | 65535 | 0 | 10 | 65535 usql-0.19.19/drivers/testdata/mysql.descTable.golden.txt000066400000000000000000000273261476173253300232350ustar00rootroot00000000000000mysql: [Warning] Using a password on the command line interface can be insecure. +----------------------+---------------------------------------------------------------------+------+-----+-------------------+-----------------------------------------------+ | Field | Type | Null | Key | Default | Extra | +----------------------+---------------------------------------------------------------------+------+-----+-------------------+-----------------------------------------------+ | film_id | smallint unsigned | NO | PRI | NULL | auto_increment | | title | varchar(255) | NO | MUL | NULL | | | description | text | YES | | NULL | | | release_year | year | YES | | NULL | | | language_id | tinyint unsigned | NO | MUL | NULL | | | original_language_id | tinyint unsigned | YES | MUL | NULL | | | rental_duration | tinyint unsigned | NO | | 3 | | | rental_rate | decimal(4,2) | NO | | 4.99 | | | length | smallint unsigned | YES | | NULL | | | replacement_cost | decimal(5,2) | NO | | 19.99 | | | rating | enum('G','PG','PG-13','R','NC-17') | YES | | G | | | special_features | set('Trailers','Commentaries','Deleted Scenes','Behind the Scenes') | YES | | NULL | | | last_update | timestamp | NO | | CURRENT_TIMESTAMP | DEFAULT_GENERATED on update CURRENT_TIMESTAMP | +----------------------+---------------------------------------------------------------------+------+-----+-------------------+-----------------------------------------------+ +-------+------------+-----------------------------+--------------+----------------------+-----------+-------------+----------+--------+------+------------+---------+---------------+---------+------------+ | Table | Non_unique | Key_name | Seq_in_index | Column_name | Collation | Cardinality | Sub_part | Packed | Null | Index_type | Comment | Index_comment | Visible | Expression | +-------+------------+-----------------------------+--------------+----------------------+-----------+-------------+----------+--------+------+------------+---------+---------------+---------+------------+ | film | 0 | PRIMARY | 1 | film_id | A | 0 | NULL | NULL | | BTREE | | | YES | NULL | | film | 1 | idx_title | 1 | title | A | 0 | NULL | NULL | | BTREE | | | YES | NULL | | film | 1 | idx_fk_language_id | 1 | language_id | A | 0 | NULL | NULL | | BTREE | | | YES | NULL | | film | 1 | idx_fk_original_language_id | 1 | original_language_id | A | 0 | NULL | NULL | YES | BTREE | | | YES | NULL | +-------+------------+-----------------------------+--------------+----------------------+-----------+-------------+----------+--------+------+------------+---------+---------------+---------+------------+ +-------------+-------------------+------+-----+-------------------+-----------------------------------------------+ | Field | Type | Null | Key | Default | Extra | +-------------+-------------------+------+-----+-------------------+-----------------------------------------------+ | actor_id | smallint unsigned | NO | PRI | NULL | | | film_id | smallint unsigned | NO | PRI | NULL | | | last_update | timestamp | NO | | CURRENT_TIMESTAMP | DEFAULT_GENERATED on update CURRENT_TIMESTAMP | +-------------+-------------------+------+-----+-------------------+-----------------------------------------------+ +------------+------------+----------------+--------------+-------------+-----------+-------------+----------+--------+------+------------+---------+---------------+---------+------------+ | Table | Non_unique | Key_name | Seq_in_index | Column_name | Collation | Cardinality | Sub_part | Packed | Null | Index_type | Comment | Index_comment | Visible | Expression | +------------+------------+----------------+--------------+-------------+-----------+-------------+----------+--------+------+------------+---------+---------------+---------+------------+ | film_actor | 0 | PRIMARY | 1 | actor_id | A | 0 | NULL | NULL | | BTREE | | | YES | NULL | | film_actor | 0 | PRIMARY | 2 | film_id | A | 0 | NULL | NULL | | BTREE | | | YES | NULL | | film_actor | 1 | idx_fk_film_id | 1 | film_id | A | 0 | NULL | NULL | | BTREE | | | YES | NULL | +------------+------------+----------------+--------------+-------------+-----------+-------------+----------+--------+------+------------+---------+---------------+---------+------------+ +-------------+-------------------+------+-----+-------------------+-----------------------------------------------+ | Field | Type | Null | Key | Default | Extra | +-------------+-------------------+------+-----+-------------------+-----------------------------------------------+ | film_id | smallint unsigned | NO | PRI | NULL | | | category_id | tinyint unsigned | NO | PRI | NULL | | | last_update | timestamp | NO | | CURRENT_TIMESTAMP | DEFAULT_GENERATED on update CURRENT_TIMESTAMP | +-------------+-------------------+------+-----+-------------------+-----------------------------------------------+ +---------------+------------+---------------------------+--------------+-------------+-----------+-------------+----------+--------+------+------------+---------+---------------+---------+------------+ | Table | Non_unique | Key_name | Seq_in_index | Column_name | Collation | Cardinality | Sub_part | Packed | Null | Index_type | Comment | Index_comment | Visible | Expression | +---------------+------------+---------------------------+--------------+-------------+-----------+-------------+----------+--------+------+------------+---------+---------------+---------+------------+ | film_category | 0 | PRIMARY | 1 | film_id | A | 0 | NULL | NULL | | BTREE | | | YES | NULL | | film_category | 0 | PRIMARY | 2 | category_id | A | 0 | NULL | NULL | | BTREE | | | YES | NULL | | film_category | 1 | fk_film_category_category | 1 | category_id | A | 0 | NULL | NULL | | BTREE | | | YES | NULL | +---------------+------------+---------------------------+--------------+-------------+-----------+-------------+----------+--------+------+------------+---------+---------------+---------+------------+ +-------------+------------------------------------+------+-----+---------+-------+ | Field | Type | Null | Key | Default | Extra | +-------------+------------------------------------+------+-----+---------+-------+ | FID | smallint unsigned | YES | | 0 | | | title | varchar(255) | YES | | NULL | | | description | text | YES | | NULL | | | category | varchar(25) | NO | | NULL | | | price | decimal(4,2) | YES | | 4.99 | | | length | smallint unsigned | YES | | NULL | | | rating | enum('G','PG','PG-13','R','NC-17') | YES | | G | | | actors | text | YES | | NULL | | +-------------+------------------------------------+------+-----+---------+-------+ +-------------+--------------+------+-----+---------+-------+ | Field | Type | Null | Key | Default | Extra | +-------------+--------------+------+-----+---------+-------+ | film_id | smallint | NO | PRI | NULL | | | title | varchar(255) | NO | MUL | NULL | | | description | text | YES | | NULL | | +-------------+--------------+------+-----+---------+-------+ +-----------+------------+-----------------------+--------------+-------------+-----------+-------------+----------+--------+------+------------+---------+---------------+---------+------------+ | Table | Non_unique | Key_name | Seq_in_index | Column_name | Collation | Cardinality | Sub_part | Packed | Null | Index_type | Comment | Index_comment | Visible | Expression | +-----------+------------+-----------------------+--------------+-------------+-----------+-------------+----------+--------+------+------------+---------+---------------+---------+------------+ | film_text | 0 | PRIMARY | 1 | film_id | A | 0 | NULL | NULL | | BTREE | | | YES | NULL | | film_text | 1 | idx_title_description | 1 | title | NULL | NULL | NULL | NULL | | FULLTEXT | | | YES | NULL | | film_text | 1 | idx_title_description | 2 | description | NULL | NULL | NULL | NULL | YES | FULLTEXT | | | YES | NULL | +-----------+------------+-----------------------+--------------+-------------+-----------+-------------+----------+--------+------+------------+---------+---------------+---------+------------+ usql-0.19.19/drivers/testdata/mysql.listFuncs.expected.txt000066400000000000000000000025701476173253300236440ustar00rootroot00000000000000 List of functions Schema | Name | Result data type | Argument data types | Type --------+----------------------------+------------------+---------------------------------------------------------------------------------------------+----------- sakila | film_in_stock | | p_film_id int, p_store_id int, OUT p_film_count int | PROCEDURE sakila | film_not_in_stock | | p_film_id int, p_store_id int, OUT p_film_count int | PROCEDURE sakila | get_customer_balance | decimal | p_customer_id int, p_effective_date datetime | FUNCTION sakila | inventory_held_by_customer | int | p_inventory_id int | FUNCTION sakila | inventory_in_stock | tinyint | p_inventory_id int | FUNCTION sakila | rewards_report | | min_monthly_purchases tinyint, min_dollar_amount_purchased decimal, OUT count_rewardees int | PROCEDURE (6 rows) usql-0.19.19/drivers/testdata/mysql.listIndexes.expected.txt000066400000000000000000000071611476173253300241660ustar00rootroot00000000000000 List of indexes Schema | Name | Type | Table | Primary? | Unique? --------+-----------------------------+----------+---------------+----------+--------- sakila | idx_actor_last_name | BTREE | actor | "NO" | "NO" sakila | PRIMARY | BTREE | actor | "YES" | "YES" sakila | idx_fk_city_id | BTREE | address | "NO" | "NO" sakila | PRIMARY | BTREE | address | "YES" | "YES" sakila | PRIMARY | BTREE | category | "YES" | "YES" sakila | idx_fk_country_id | BTREE | city | "NO" | "NO" sakila | PRIMARY | BTREE | city | "YES" | "YES" sakila | PRIMARY | BTREE | country | "YES" | "YES" sakila | idx_fk_address_id | BTREE | customer | "NO" | "NO" sakila | idx_fk_store_id | BTREE | customer | "NO" | "NO" sakila | idx_last_name | BTREE | customer | "NO" | "NO" sakila | PRIMARY | BTREE | customer | "YES" | "YES" sakila | idx_fk_language_id | BTREE | film | "NO" | "NO" sakila | idx_fk_original_language_id | BTREE | film | "NO" | "NO" sakila | idx_title | BTREE | film | "NO" | "NO" sakila | PRIMARY | BTREE | film | "YES" | "YES" sakila | idx_fk_film_id | BTREE | film_actor | "NO" | "NO" sakila | PRIMARY | BTREE | film_actor | "YES" | "YES" sakila | fk_film_category_category | BTREE | film_category | "NO" | "NO" sakila | PRIMARY | BTREE | film_category | "YES" | "YES" sakila | idx_title_description | FULLTEXT | film_text | "NO" | "NO" sakila | PRIMARY | BTREE | film_text | "YES" | "YES" sakila | idx_fk_film_id | BTREE | inventory | "NO" | "NO" sakila | idx_store_id_film_id | BTREE | inventory | "NO" | "NO" sakila | PRIMARY | BTREE | inventory | "YES" | "YES" sakila | PRIMARY | BTREE | language | "YES" | "YES" sakila | fk_payment_rental | BTREE | payment | "NO" | "NO" sakila | idx_fk_customer_id | BTREE | payment | "NO" | "NO" sakila | idx_fk_staff_id | BTREE | payment | "NO" | "NO" sakila | PRIMARY | BTREE | payment | "YES" | "YES" sakila | idx_fk_customer_id | BTREE | rental | "NO" | "NO" sakila | idx_fk_inventory_id | BTREE | rental | "NO" | "NO" sakila | idx_fk_staff_id | BTREE | rental | "NO" | "NO" sakila | PRIMARY | BTREE | rental | "YES" | "YES" sakila | rental_date | BTREE | rental | "NO" | "YES" sakila | idx_fk_address_id | BTREE | staff | "NO" | "NO" sakila | idx_fk_store_id | BTREE | staff | "NO" | "NO" sakila | PRIMARY | BTREE | staff | "YES" | "YES" sakila | idx_fk_address_id | BTREE | store | "NO" | "NO" sakila | idx_unique_manager | BTREE | store | "NO" | "YES" sakila | PRIMARY | BTREE | store | "YES" | "YES" (41 rows) usql-0.19.19/drivers/testdata/mysql.listSchemas.expected.txt000066400000000000000000000001161476173253300241430ustar00rootroot00000000000000 List of schemas Schema | Catalog --------+--------- sakila | def (1 row) usql-0.19.19/drivers/testdata/mysql.listSchemas.golden.txt000066400000000000000000000004521476173253300236150ustar00rootroot00000000000000mysql: [Warning] Using a password on the command line interface can be insecure. +--------------------+ | Database | +--------------------+ | information_schema | | mysql | | performance_schema | | sakila | | sys | +--------------------+ usql-0.19.19/drivers/testdata/mysql.listTables.expected.txt000066400000000000000000000007001476173253300237710ustar00rootroot00000000000000 List of relations Schema | Name | Type | Rows | Size | Comment --------+---------------+------------+------+------+--------- sakila | film | BASE TABLE | 0 | | sakila | film_actor | BASE TABLE | 0 | | sakila | film_category | BASE TABLE | 0 | | sakila | film_text | BASE TABLE | 0 | | sakila | film_list | VIEW | 0 | | (5 rows) usql-0.19.19/drivers/testdata/mysql.listTables.golden.txt000066400000000000000000000005401476173253300234420ustar00rootroot00000000000000mysql: [Warning] Using a password on the command line interface can be insecure. +--------------------------+ | Tables_in_sakila (film%) | +--------------------------+ | film | | film_actor | | film_category | | film_list | | film_text | +--------------------------+ usql-0.19.19/drivers/testdata/pgsql.descTable.expected.txt000066400000000000000000000176231476173253300235460ustar00rootroot00000000000000 table "public.film" Name | Type | Nullable | Default | Size | Decimal Digits | Radix | Octet Length ----------------------+--------------------------------+----------+---------------------------------------+------+----------------+-------+-------------- film_id | integer | "NO" | nextval('film_film_id_seq'::regclass) | 32 | 0 | 2 | 0 title | character varying(255) | "NO" | | 255 | 0 | 10 | 1020 description | text | "YES" | | 0 | 0 | 10 | 1073741824 release_year | integer | "YES" | | 32 | 0 | 2 | 0 language_id | smallint | "NO" | | 16 | 0 | 2 | 0 original_language_id | smallint | "YES" | | 16 | 0 | 2 | 0 rental_duration | smallint | "NO" | 3 | 16 | 0 | 2 | 0 rental_rate | numeric(4,2) | "NO" | 4.99 | 4 | 2 | 10 | 0 length | smallint | "YES" | | 16 | 0 | 2 | 0 replacement_cost | numeric(5,2) | "NO" | 19.99 | 5 | 2 | 10 | 0 rating | USER-DEFINED | "YES" | 'G'::mpaa_rating | 0 | 0 | 10 | 0 last_update | timestamp(6) without time zone | "NO" | now() | 6 | 0 | 10 | 0 special_features | ARRAY | "YES" | | 0 | 0 | 10 | 0 fulltext | tsvector | "NO" | | 0 | 0 | 10 | 0 Indexes: "film_fulltext_idx" gist (fulltext) "film_pkey" PRIMARY_KEY, UNIQUE, btree (film_id) "idx_fk_language_id" btree (language_id) "idx_fk_original_language_id" btree (original_language_id) "idx_title" btree (title) Foreign-key constraints: "film_language_id_fkey" FOREIGN KEY (language_id) REFERENCES language(language_id) ON UPDATE CASCADE ON DELETE RESTRICT "film_original_language_id_fkey" FOREIGN KEY (original_language_id) REFERENCES language(language_id) ON UPDATE CASCADE ON DELETE RESTRICT Referenced by: TABLE "film_actor" CONSTRAINT "film_actor_film_id_fkey" FOREIGN KEY (film_id) REFERENCES film(film_id) ON UPDATE CASCADE ON DELETE RESTRICT TABLE "film_category" CONSTRAINT "film_category_film_id_fkey" FOREIGN KEY (film_id) REFERENCES film(film_id) ON UPDATE CASCADE ON DELETE RESTRICT TABLE "inventory" CONSTRAINT "inventory_film_id_fkey" FOREIGN KEY (film_id) REFERENCES film(film_id) ON UPDATE CASCADE ON DELETE RESTRICT Triggers: "film_fulltext_trigger" CREATE TRIGGER film_fulltext_trigger BEFORE INSERT OR UPDATE ON film FOR EACH ROW EXECUTE FUNCTION tsvector_update_trigger('fulltext', 'pg_catalog.english', 'title', 'description') "last_updated" CREATE TRIGGER last_updated BEFORE UPDATE ON film FOR EACH ROW EXECUTE FUNCTION last_updated() table "public.film_actor" Name | Type | Nullable | Default | Size | Decimal Digits | Radix | Octet Length -------------+--------------------------------+----------+---------+------+----------------+-------+-------------- actor_id | smallint | "NO" | | 16 | 0 | 2 | 0 film_id | smallint | "NO" | | 16 | 0 | 2 | 0 last_update | timestamp(6) without time zone | "NO" | now() | 6 | 0 | 10 | 0 Indexes: "film_actor_pkey" PRIMARY_KEY, UNIQUE, btree (actor_id, film_id) "idx_fk_film_id" btree (film_id) Foreign-key constraints: "film_actor_actor_id_fkey" FOREIGN KEY (actor_id) REFERENCES actor(actor_id) ON UPDATE CASCADE ON DELETE RESTRICT "film_actor_film_id_fkey" FOREIGN KEY (film_id) REFERENCES film(film_id) ON UPDATE CASCADE ON DELETE RESTRICT Triggers: "last_updated" CREATE TRIGGER last_updated BEFORE UPDATE ON film_actor FOR EACH ROW EXECUTE FUNCTION last_updated() table "public.film_category" Name | Type | Nullable | Default | Size | Decimal Digits | Radix | Octet Length -------------+--------------------------------+----------+---------+------+----------------+-------+-------------- film_id | smallint | "NO" | | 16 | 0 | 2 | 0 category_id | smallint | "NO" | | 16 | 0 | 2 | 0 last_update | timestamp(6) without time zone | "NO" | now() | 6 | 0 | 10 | 0 Indexes: "film_category_pkey" PRIMARY_KEY, UNIQUE, btree (film_id, category_id) Foreign-key constraints: "film_category_category_id_fkey" FOREIGN KEY (category_id) REFERENCES category(category_id) ON UPDATE CASCADE ON DELETE RESTRICT "film_category_film_id_fkey" FOREIGN KEY (film_id) REFERENCES film(film_id) ON UPDATE CASCADE ON DELETE RESTRICT Triggers: "last_updated" CREATE TRIGGER last_updated BEFORE UPDATE ON film_category FOR EACH ROW EXECUTE FUNCTION last_updated() view "public.film_list" Name | Type | Nullable | Default | Size | Decimal Digits | Radix | Octet Length -------------+------------------------+----------+---------+------+----------------+-------+-------------- fid | integer | "YES" | | 32 | 0 | 2 | 0 title | character varying(255) | "YES" | | 255 | 0 | 10 | 1020 description | text | "YES" | | 0 | 0 | 10 | 1073741824 category | character varying(25) | "YES" | | 25 | 0 | 10 | 100 price | numeric(4,2) | "YES" | | 4 | 2 | 10 | 0 length | smallint | "YES" | | 16 | 0 | 2 | 0 rating | USER-DEFINED | "YES" | | 0 | 0 | 10 | 0 actors | text | "YES" | | 0 | 0 | 10 | 1073741824 Sequence "public.film_film_id_seq" Type | Start | Min | Max | Increment | Cycles? --------+-------+-----+---------------------+-----------+--------- bigint | 1 | 1 | 9223372036854775807 | 1 | "NO" Index "public.film_actor_pkey" Name | Type ----------+---------- actor_id | smallint film_id | smallint primary key, btree, for table film_actor Index "public.film_category_pkey" Name | Type -------------+---------- film_id | smallint category_id | smallint primary key, btree, for table film_category Index "public.film_fulltext_idx" Name | Type ----------+----------- fulltext | gtsvector gist, for table film Index "public.film_pkey" Name | Type ---------+--------- film_id | integer primary key, btree, for table film usql-0.19.19/drivers/testdata/pgsql.descTable.golden.txt000066400000000000000000000143321476173253300232070ustar00rootroot00000000000000 Table "public.film" Column | Type | Collation | Nullable | Default ----------------------+-----------------------------+-----------+----------+--------------------------------------- film_id | integer | | not null | nextval('film_film_id_seq'::regclass) title | character varying(255) | | not null | description | text | | | release_year | year | | | language_id | smallint | | not null | original_language_id | smallint | | | rental_duration | smallint | | not null | 3 rental_rate | numeric(4,2) | | not null | 4.99 length | smallint | | | replacement_cost | numeric(5,2) | | not null | 19.99 rating | mpaa_rating | | | 'G'::mpaa_rating last_update | timestamp without time zone | | not null | now() special_features | text[] | | | fulltext | tsvector | | not null | Indexes: "film_pkey" PRIMARY KEY, btree (film_id) "film_fulltext_idx" gist (fulltext) "idx_fk_language_id" btree (language_id) "idx_fk_original_language_id" btree (original_language_id) "idx_title" btree (title) Foreign-key constraints: "film_language_id_fkey" FOREIGN KEY (language_id) REFERENCES language(language_id) ON UPDATE CASCADE ON DELETE RESTRICT "film_original_language_id_fkey" FOREIGN KEY (original_language_id) REFERENCES language(language_id) ON UPDATE CASCADE ON DELETE RESTRICT Referenced by: TABLE "film_actor" CONSTRAINT "film_actor_film_id_fkey" FOREIGN KEY (film_id) REFERENCES film(film_id) ON UPDATE CASCADE ON DELETE RESTRICT TABLE "film_category" CONSTRAINT "film_category_film_id_fkey" FOREIGN KEY (film_id) REFERENCES film(film_id) ON UPDATE CASCADE ON DELETE RESTRICT TABLE "inventory" CONSTRAINT "inventory_film_id_fkey" FOREIGN KEY (film_id) REFERENCES film(film_id) ON UPDATE CASCADE ON DELETE RESTRICT Triggers: film_fulltext_trigger BEFORE INSERT OR UPDATE ON film FOR EACH ROW EXECUTE FUNCTION tsvector_update_trigger('fulltext', 'pg_catalog.english', 'title', 'description') last_updated BEFORE UPDATE ON film FOR EACH ROW EXECUTE FUNCTION last_updated() Table "public.film_actor" Column | Type | Collation | Nullable | Default -------------+-----------------------------+-----------+----------+--------- actor_id | smallint | | not null | film_id | smallint | | not null | last_update | timestamp without time zone | | not null | now() Indexes: "film_actor_pkey" PRIMARY KEY, btree (actor_id, film_id) "idx_fk_film_id" btree (film_id) Foreign-key constraints: "film_actor_actor_id_fkey" FOREIGN KEY (actor_id) REFERENCES actor(actor_id) ON UPDATE CASCADE ON DELETE RESTRICT "film_actor_film_id_fkey" FOREIGN KEY (film_id) REFERENCES film(film_id) ON UPDATE CASCADE ON DELETE RESTRICT Triggers: last_updated BEFORE UPDATE ON film_actor FOR EACH ROW EXECUTE FUNCTION last_updated() Index "public.film_actor_pkey" Column | Type | Key? | Definition ----------+----------+------+------------ actor_id | smallint | yes | actor_id film_id | smallint | yes | film_id primary key, btree, for table "public.film_actor" Table "public.film_category" Column | Type | Collation | Nullable | Default -------------+-----------------------------+-----------+----------+--------- film_id | smallint | | not null | category_id | smallint | | not null | last_update | timestamp without time zone | | not null | now() Indexes: "film_category_pkey" PRIMARY KEY, btree (film_id, category_id) Foreign-key constraints: "film_category_category_id_fkey" FOREIGN KEY (category_id) REFERENCES category(category_id) ON UPDATE CASCADE ON DELETE RESTRICT "film_category_film_id_fkey" FOREIGN KEY (film_id) REFERENCES film(film_id) ON UPDATE CASCADE ON DELETE RESTRICT Triggers: last_updated BEFORE UPDATE ON film_category FOR EACH ROW EXECUTE FUNCTION last_updated() Index "public.film_category_pkey" Column | Type | Key? | Definition -------------+----------+------+------------- film_id | smallint | yes | film_id category_id | smallint | yes | category_id primary key, btree, for table "public.film_category" Sequence "public.film_film_id_seq" Type | Start | Minimum | Maximum | Increment | Cycles? | Cache --------+-------+---------+---------------------+-----------+---------+------- bigint | 1 | 1 | 9223372036854775807 | 1 | no | 1 Index "public.film_fulltext_idx" Column | Type | Key? | Definition ----------+-----------+------+------------ fulltext | gtsvector | yes | fulltext gist, for table "public.film" View "public.film_list" Column | Type | Collation | Nullable | Default -------------+------------------------+-----------+----------+--------- fid | integer | | | title | character varying(255) | | | description | text | | | category | character varying(25) | | | price | numeric(4,2) | | | length | smallint | | | rating | mpaa_rating | | | actors | text | | | Index "public.film_pkey" Column | Type | Key? | Definition ---------+---------+------+------------ film_id | integer | yes | film_id primary key, btree, for table "public.film" usql-0.19.19/drivers/testdata/pgsql.listDbs.golden.txt000066400000000000000000000011611476173253300227210ustar00rootroot00000000000000 List of databases Name | Owner | Encoding | Collate | Ctype | Access privileges -----------+----------+----------+------------+------------+----------------------- postgres | postgres | UTF8 | en_US.utf8 | en_US.utf8 | template0 | postgres | UTF8 | en_US.utf8 | en_US.utf8 | =c/postgres + | | | | | postgres=CTc/postgres template1 | postgres | UTF8 | en_US.utf8 | en_US.utf8 | =c/postgres + | | | | | postgres=CTc/postgres (3 rows) usql-0.19.19/drivers/testdata/pgsql.listFuncs.expected.txt000066400000000000000000000033101476173253300236160ustar00rootroot00000000000000 List of functions Schema | Name | Result data type | Argument data types | Type --------+----------------------------+------------------+---------------------------------------------------------------------+---------- public | _group_concat | text | text, text | FUNCTION public | film_in_stock | integer | p_film_id integer, p_store_id integer, OUT p_film_count integer | FUNCTION public | film_not_in_stock | integer | p_film_id integer, p_store_id integer, OUT p_film_count integer | FUNCTION public | get_customer_balance | numeric | p_customer_id integer, p_effective_date timestamp without time zone | FUNCTION public | group_concat | text | text | public | inventory_held_by_customer | integer | p_inventory_id integer | FUNCTION public | inventory_in_stock | boolean | p_inventory_id integer | FUNCTION public | last_day | date | timestamp without time zone | FUNCTION public | last_updated | trigger | | FUNCTION public | rewards_report | USER-DEFINED | min_monthly_purchases integer, min_dollar_amount_purchased numeric | FUNCTION (10 rows) usql-0.19.19/drivers/testdata/pgsql.listFuncs.golden.txt000066400000000000000000000032241476173253300232710ustar00rootroot00000000000000 List of functions Schema | Name | Result data type | Argument data types | Type --------+----------------------------+------------------+---------------------------------------------------------------------+------ public | _group_concat | text | text, text | func public | film_in_stock | SETOF integer | p_film_id integer, p_store_id integer, OUT p_film_count integer | func public | film_not_in_stock | SETOF integer | p_film_id integer, p_store_id integer, OUT p_film_count integer | func public | get_customer_balance | numeric | p_customer_id integer, p_effective_date timestamp without time zone | func public | group_concat | text | text | agg public | inventory_held_by_customer | integer | p_inventory_id integer | func public | inventory_in_stock | boolean | p_inventory_id integer | func public | last_day | date | timestamp without time zone | func public | last_updated | trigger | | func public | rewards_report | SETOF customer | min_monthly_purchases integer, min_dollar_amount_purchased numeric | func (10 rows) usql-0.19.19/drivers/testdata/pgsql.listIndexes.expected.txt000066400000000000000000000117061476173253300241470ustar00rootroot00000000000000 List of indexes Schema | Name | Type | Table | Primary? | Unique? --------+-----------------------------------------------------+-------+------------------+----------+--------- public | actor_pkey | index | actor | "YES" | "YES" public | address_pkey | index | address | "YES" | "YES" public | category_pkey | index | category | "YES" | "YES" public | city_pkey | index | city | "YES" | "YES" public | country_pkey | index | country | "YES" | "YES" public | customer_pkey | index | customer | "YES" | "YES" public | film_actor_pkey | index | film_actor | "YES" | "YES" public | film_category_pkey | index | film_category | "YES" | "YES" public | film_fulltext_idx | index | film | "NO" | "NO" public | film_pkey | index | film | "YES" | "YES" public | idx_actor_last_name | index | actor | "NO" | "NO" public | idx_fk_address_id | index | customer | "NO" | "NO" public | idx_fk_city_id | index | address | "NO" | "NO" public | idx_fk_country_id | index | city | "NO" | "NO" public | idx_fk_customer_id | index | payment | "NO" | "NO" public | idx_fk_film_id | index | film_actor | "NO" | "NO" public | idx_fk_inventory_id | index | rental | "NO" | "NO" public | idx_fk_language_id | index | film | "NO" | "NO" public | idx_fk_original_language_id | index | film | "NO" | "NO" public | idx_fk_payment_p2007_01_customer_id | index | payment_p2007_01 | "NO" | "NO" public | idx_fk_payment_p2007_01_staff_id | index | payment_p2007_01 | "NO" | "NO" public | idx_fk_payment_p2007_02_customer_id | index | payment_p2007_02 | "NO" | "NO" public | idx_fk_payment_p2007_02_staff_id | index | payment_p2007_02 | "NO" | "NO" public | idx_fk_payment_p2007_03_customer_id | index | payment_p2007_03 | "NO" | "NO" public | idx_fk_payment_p2007_03_staff_id | index | payment_p2007_03 | "NO" | "NO" public | idx_fk_payment_p2007_04_customer_id | index | payment_p2007_04 | "NO" | "NO" public | idx_fk_payment_p2007_04_staff_id | index | payment_p2007_04 | "NO" | "NO" public | idx_fk_payment_p2007_05_customer_id | index | payment_p2007_05 | "NO" | "NO" public | idx_fk_payment_p2007_05_staff_id | index | payment_p2007_05 | "NO" | "NO" public | idx_fk_payment_p2007_06_customer_id | index | payment_p2007_06 | "NO" | "NO" public | idx_fk_payment_p2007_06_staff_id | index | payment_p2007_06 | "NO" | "NO" public | idx_fk_staff_id | index | payment | "NO" | "NO" public | idx_fk_store_id | index | customer | "NO" | "NO" public | idx_last_name | index | customer | "NO" | "NO" public | idx_store_id_film_id | index | inventory | "NO" | "NO" public | idx_title | index | film | "NO" | "NO" public | idx_unq_manager_staff_id | index | store | "YES" | "NO" public | idx_unq_rental_rental_date_inventory_id_customer_id | index | rental | "YES" | "NO" public | inventory_pkey | index | inventory | "YES" | "YES" public | language_pkey | index | language | "YES" | "YES" public | payment_pkey | index | payment | "YES" | "YES" public | rental_pkey | index | rental | "YES" | "YES" public | staff_pkey | index | staff | "YES" | "YES" public | store_pkey | index | store | "YES" | "YES" (44 rows) usql-0.19.19/drivers/testdata/pgsql.listIndexes.golden.txt000066400000000000000000000104241476173253300236120ustar00rootroot00000000000000 List of relations Schema | Name | Type | Owner | Table --------+-----------------------------------------------------+-------+----------+------------------ public | actor_pkey | index | postgres | actor public | address_pkey | index | postgres | address public | category_pkey | index | postgres | category public | city_pkey | index | postgres | city public | country_pkey | index | postgres | country public | customer_pkey | index | postgres | customer public | film_actor_pkey | index | postgres | film_actor public | film_category_pkey | index | postgres | film_category public | film_fulltext_idx | index | postgres | film public | film_pkey | index | postgres | film public | idx_actor_last_name | index | postgres | actor public | idx_fk_address_id | index | postgres | customer public | idx_fk_city_id | index | postgres | address public | idx_fk_country_id | index | postgres | city public | idx_fk_customer_id | index | postgres | payment public | idx_fk_film_id | index | postgres | film_actor public | idx_fk_inventory_id | index | postgres | rental public | idx_fk_language_id | index | postgres | film public | idx_fk_original_language_id | index | postgres | film public | idx_fk_payment_p2007_01_customer_id | index | postgres | payment_p2007_01 public | idx_fk_payment_p2007_01_staff_id | index | postgres | payment_p2007_01 public | idx_fk_payment_p2007_02_customer_id | index | postgres | payment_p2007_02 public | idx_fk_payment_p2007_02_staff_id | index | postgres | payment_p2007_02 public | idx_fk_payment_p2007_03_customer_id | index | postgres | payment_p2007_03 public | idx_fk_payment_p2007_03_staff_id | index | postgres | payment_p2007_03 public | idx_fk_payment_p2007_04_customer_id | index | postgres | payment_p2007_04 public | idx_fk_payment_p2007_04_staff_id | index | postgres | payment_p2007_04 public | idx_fk_payment_p2007_05_customer_id | index | postgres | payment_p2007_05 public | idx_fk_payment_p2007_05_staff_id | index | postgres | payment_p2007_05 public | idx_fk_payment_p2007_06_customer_id | index | postgres | payment_p2007_06 public | idx_fk_payment_p2007_06_staff_id | index | postgres | payment_p2007_06 public | idx_fk_staff_id | index | postgres | payment public | idx_fk_store_id | index | postgres | customer public | idx_last_name | index | postgres | customer public | idx_store_id_film_id | index | postgres | inventory public | idx_title | index | postgres | film public | idx_unq_manager_staff_id | index | postgres | store public | idx_unq_rental_rental_date_inventory_id_customer_id | index | postgres | rental public | inventory_pkey | index | postgres | inventory public | language_pkey | index | postgres | language public | payment_pkey | index | postgres | payment public | rental_pkey | index | postgres | rental public | staff_pkey | index | postgres | staff public | store_pkey | index | postgres | store (44 rows) usql-0.19.19/drivers/testdata/pgsql.listSchemas.expected.txt000066400000000000000000000001261476173253300241250ustar00rootroot00000000000000 List of schemas Schema | Catalog --------+---------- public | postgres (1 row) usql-0.19.19/drivers/testdata/pgsql.listSchemas.golden.txt000066400000000000000000000001261476173253300235740ustar00rootroot00000000000000 List of schemas Name | Owner --------+---------- public | postgres (1 row) usql-0.19.19/drivers/testdata/pgsql.listTables.expected.txt000066400000000000000000000007641476173253300237640ustar00rootroot00000000000000 List of relations Schema | Name | Type | Rows | Size | Comment --------+------------------+----------+------+------------+--------- public | film_film_id_seq | sequence | 1 | 8192 bytes | public | film | table | 0 | 8192 bytes | public | film_actor | table | 0 | 0 bytes | public | film_category | table | 0 | 0 bytes | public | film_list | view | 0 | 0 bytes | (5 rows) usql-0.19.19/drivers/testdata/pgsql.listTables.golden.txt000066400000000000000000000006051476173253300234250ustar00rootroot00000000000000 List of relations Schema | Name | Type | Owner --------+------------------+----------+---------- public | film | table | postgres public | film_actor | table | postgres public | film_category | table | postgres public | film_film_id_seq | sequence | postgres public | film_list | view | postgres (5 rows) usql-0.19.19/drivers/testdata/sqlserver.descTable.expected.txt000066400000000000000000000125321476173253300244400ustar00rootroot00000000000000 BASE TABLE "dbo.film" Name | Type | Nullable | Default | Size | Decimal Digits | Radix | Octet Length ----------------------+--------------+----------+-------------+------------+----------------+-------+-------------- film_id | int | "NO" | | 10 | 0 | 10 | 0 title | varchar(255) | "NO" | | 255 | 0 | 10 | 255 description | text | "YES" | (NULL) | 2147483647 | 0 | 10 | 2147483647 release_year | varchar(4) | "YES" | | 4 | 0 | 10 | 4 language_id | int | "NO" | | 10 | 0 | 10 | 0 original_language_id | int | "YES" | (NULL) | 10 | 0 | 10 | 0 rental_duration | tinyint | "NO" | ((3)) | 3 | 0 | 10 | 0 rental_rate | decimal(4,2) | "NO" | ((4.99)) | 4 | 2 | 10 | 0 length | smallint | "YES" | (NULL) | 5 | 0 | 10 | 0 replacement_cost | decimal(5,2) | "NO" | ((19.99)) | 5 | 2 | 10 | 0 rating | varchar(10) | "YES" | ('G') | 10 | 0 | 10 | 10 special_features | varchar(255) | "YES" | (NULL) | 255 | 0 | 10 | 255 last_update | datetime | "NO" | (getdate()) | 3 | 0 | 10 | 0 Indexes: "" HEAP (language_id, original_language_id, film_id) "idx_fk_language_id" NONCLUSTERED (language_id) "idx_fk_original_language_id" NONCLUSTERED (original_language_id) "PK__film__349764A85F0D1F82" PRIMARY_KEY, UNIQUE, NONCLUSTERED (film_id) BASE TABLE "dbo.film_actor" Name | Type | Nullable | Default | Size | Decimal Digits | Radix | Octet Length -------------+----------+----------+-------------+------+----------------+-------+-------------- actor_id | int | "NO" | | 10 | 0 | 10 | 0 film_id | int | "NO" | | 10 | 0 | 10 | 0 last_update | datetime | "NO" | (getdate()) | 3 | 0 | 10 | 0 Indexes: "" HEAP (actor_id, film_id, actor_id, film_id) "idx_fk_film_actor_actor" NONCLUSTERED (actor_id) "idx_fk_film_actor_film" NONCLUSTERED (film_id) "PK__film_act__086D31FFE010698E" PRIMARY_KEY, UNIQUE, NONCLUSTERED (actor_id, film_id) BASE TABLE "dbo.film_category" Name | Type | Nullable | Default | Size | Decimal Digits | Radix | Octet Length -------------+----------+----------+-------------+------+----------------+-------+-------------- film_id | int | "NO" | | 10 | 0 | 10 | 0 category_id | int | "NO" | | 10 | 0 | 10 | 0 last_update | datetime | "NO" | (getdate()) | 3 | 0 | 10 | 0 Indexes: "" HEAP (category_id, film_id, film_id, category_id) "idx_fk_film_category_category" NONCLUSTERED (category_id) "idx_fk_film_category_film" NONCLUSTERED (film_id) "PK__film_cat__69C38A33EABC8336" PRIMARY_KEY, UNIQUE, NONCLUSTERED (film_id, category_id) BASE TABLE "dbo.film_text" Name | Type | Nullable | Default | Size | Decimal Digits | Radix | Octet Length -------------+--------------+----------+---------+------------+----------------+-------+-------------- film_id | int | "NO" | | 10 | 0 | 10 | 0 title | varchar(255) | "NO" | | 255 | 0 | 10 | 255 description | text | "YES" | | 2147483647 | 0 | 10 | 2147483647 Indexes: "" HEAP (film_id) "PK__film_tex__349764A85D245C83" PRIMARY_KEY, UNIQUE, NONCLUSTERED (film_id) VIEW "dbo.film_list" Name | Type | Nullable | Default | Size | Decimal Digits | Radix | Octet Length -------------+--------------+----------+---------+------------+----------------+-------+-------------- FID | int | "YES" | | 10 | 0 | 10 | 0 title | varchar(255) | "YES" | | 255 | 0 | 10 | 255 description | text | "YES" | | 2147483647 | 0 | 10 | 2147483647 category | varchar(25) | "NO" | | 25 | 0 | 10 | 25 price | decimal(4,2) | "YES" | | 4 | 2 | 10 | 0 length | smallint | "YES" | | 5 | 0 | 10 | 0 rating | varchar(10) | "YES" | | 10 | 0 | 10 | 10 actors | varchar(91) | "NO" | | 91 | 0 | 10 | 91 usql-0.19.19/drivers/testdata/sqlserver.listFuncs.expected.txt000066400000000000000000000000111476173253300245110ustar00rootroot00000000000000(0 rows) usql-0.19.19/drivers/testdata/sqlserver.listIndexes.expected.txt000066400000000000000000000125201476173253300250420ustar00rootroot00000000000000 List of indexes Schema | Name | Type | Table | Primary? | Unique? --------+--------------------------------+--------------+---------------+----------+--------- dbo | | HEAP | actor | "NO" | "NO" dbo | idx_actor_last_name | NONCLUSTERED | actor | "NO" | "NO" dbo | PK__actor__8B2447B565179537 | NONCLUSTERED | actor | "YES" | "YES" dbo | | HEAP | address | "NO" | "NO" dbo | idx_fk_city_id | NONCLUSTERED | address | "NO" | "NO" dbo | PK__address__CAA247C920A28CDC | NONCLUSTERED | address | "YES" | "YES" dbo | | HEAP | category | "NO" | "NO" dbo | PK__category__D54EE9B5C54BFE50 | NONCLUSTERED | category | "YES" | "YES" dbo | | HEAP | city | "NO" | "NO" dbo | idx_fk_country_id | NONCLUSTERED | city | "NO" | "NO" dbo | PK__city__031491A980EA569B | NONCLUSTERED | city | "YES" | "YES" dbo | | HEAP | country | "NO" | "NO" dbo | PK__country__7E8CD054CAE966BB | NONCLUSTERED | country | "YES" | "YES" dbo | | HEAP | customer | "NO" | "NO" dbo | idx_fk_address_id | NONCLUSTERED | customer | "NO" | "NO" dbo | idx_fk_store_id | NONCLUSTERED | customer | "NO" | "NO" dbo | idx_last_name | NONCLUSTERED | customer | "NO" | "NO" dbo | PK__customer__CD65CB84BB7D0A31 | NONCLUSTERED | customer | "YES" | "YES" dbo | | HEAP | film | "NO" | "NO" dbo | idx_fk_language_id | NONCLUSTERED | film | "NO" | "NO" dbo | idx_fk_original_language_id | NONCLUSTERED | film | "NO" | "NO" dbo | PK__film__349764A85F0D1F82 | NONCLUSTERED | film | "YES" | "YES" dbo | | HEAP | film_actor | "NO" | "NO" dbo | idx_fk_film_actor_actor | NONCLUSTERED | film_actor | "NO" | "NO" dbo | idx_fk_film_actor_film | NONCLUSTERED | film_actor | "NO" | "NO" dbo | PK__film_act__086D31FFE010698E | NONCLUSTERED | film_actor | "YES" | "YES" dbo | | HEAP | film_category | "NO" | "NO" dbo | idx_fk_film_category_category | NONCLUSTERED | film_category | "NO" | "NO" dbo | idx_fk_film_category_film | NONCLUSTERED | film_category | "NO" | "NO" dbo | PK__film_cat__69C38A33EABC8336 | NONCLUSTERED | film_category | "YES" | "YES" dbo | | HEAP | film_text | "NO" | "NO" dbo | PK__film_tex__349764A85D245C83 | NONCLUSTERED | film_text | "YES" | "YES" dbo | | HEAP | inventory | "NO" | "NO" dbo | idx_fk_film_id | NONCLUSTERED | inventory | "NO" | "NO" dbo | idx_fk_film_id_store_id | NONCLUSTERED | inventory | "NO" | "NO" dbo | PK__inventor__B59ACC48C0DED777 | NONCLUSTERED | inventory | "YES" | "YES" dbo | | HEAP | language | "NO" | "NO" dbo | PK__language__804CF6B2AD65E24B | NONCLUSTERED | language | "YES" | "YES" dbo | | HEAP | payment | "NO" | "NO" dbo | idx_fk_customer_id | NONCLUSTERED | payment | "NO" | "NO" dbo | idx_fk_staff_id | NONCLUSTERED | payment | "NO" | "NO" dbo | PK__payment__ED1FC9EBDD7F3474 | NONCLUSTERED | payment | "YES" | "YES" dbo | | HEAP | rental | "NO" | "NO" dbo | idx_fk_customer_id | NONCLUSTERED | rental | "NO" | "NO" dbo | idx_fk_inventory_id | NONCLUSTERED | rental | "NO" | "NO" dbo | idx_fk_staff_id | NONCLUSTERED | rental | "NO" | "NO" dbo | idx_uq | NONCLUSTERED | rental | "YES" | "NO" dbo | PK__rental__67DB611A79AF93E5 | NONCLUSTERED | rental | "YES" | "YES" dbo | | HEAP | staff | "NO" | "NO" dbo | idx_fk_address_id | NONCLUSTERED | staff | "NO" | "NO" dbo | idx_fk_store_id | NONCLUSTERED | staff | "NO" | "NO" dbo | PK__staff__1963DD9DFC0374BE | NONCLUSTERED | staff | "YES" | "YES" dbo | | HEAP | staff_copy | "NO" | "NO" dbo | | HEAP | store | "NO" | "NO" dbo | idx_fk_address_id | NONCLUSTERED | store | "YES" | "NO" dbo | idx_fk_store_address | NONCLUSTERED | store | "NO" | "NO" dbo | PK__store__A2F2A30D66044831 | NONCLUSTERED | store | "YES" | "YES" (57 rows) usql-0.19.19/drivers/testdata/sqlserver.listSchemas.expected.txt000066400000000000000000000001441476173253300250250ustar00rootroot00000000000000 List of schemas Schema | Catalog --------+--------- dbo | sakila guest | sakila (2 rows) usql-0.19.19/drivers/testdata/sqlserver.listTables.expected.txt000066400000000000000000000007001476173253300246520ustar00rootroot00000000000000 List of relations Schema | Name | Type | Rows | Size | Comment --------+---------------+------------+------+------+--------- dbo | film | BASE TABLE | 0 | | dbo | film_actor | BASE TABLE | 0 | | dbo | film_category | BASE TABLE | 0 | | dbo | film_text | BASE TABLE | 0 | | dbo | film_list | VIEW | 0 | | (5 rows) usql-0.19.19/drivers/testdata/trino.descTable.expected.txt000066400000000000000000000240361476173253300235470ustar00rootroot00000000000000 BASE TABLE "sf1.orders" Name | Type | Nullable | Default | Size | Decimal Digits | Radix | Octet Length ---------------+-------------+----------+---------+------+----------------+-------+-------------- orderkey | bigint | "YES" | | 0 | 0 | 0 | 0 custkey | bigint | "YES" | | 0 | 0 | 0 | 0 orderstatus | varchar(1) | "YES" | | 0 | 0 | 0 | 0 totalprice | double | "YES" | | 0 | 0 | 0 | 0 orderdate | date | "YES" | | 0 | 0 | 0 | 0 orderpriority | varchar(15) | "YES" | | 0 | 0 | 0 | 0 clerk | varchar(15) | "YES" | | 0 | 0 | 0 | 0 shippriority | integer | "YES" | | 0 | 0 | 0 | 0 comment | varchar(79) | "YES" | | 0 | 0 | 0 | 0 BASE TABLE "sf100.orders" Name | Type | Nullable | Default | Size | Decimal Digits | Radix | Octet Length ---------------+-------------+----------+---------+------+----------------+-------+-------------- orderkey | bigint | "YES" | | 0 | 0 | 0 | 0 custkey | bigint | "YES" | | 0 | 0 | 0 | 0 orderstatus | varchar(1) | "YES" | | 0 | 0 | 0 | 0 totalprice | double | "YES" | | 0 | 0 | 0 | 0 orderdate | date | "YES" | | 0 | 0 | 0 | 0 orderpriority | varchar(15) | "YES" | | 0 | 0 | 0 | 0 clerk | varchar(15) | "YES" | | 0 | 0 | 0 | 0 shippriority | integer | "YES" | | 0 | 0 | 0 | 0 comment | varchar(79) | "YES" | | 0 | 0 | 0 | 0 BASE TABLE "sf1000.orders" Name | Type | Nullable | Default | Size | Decimal Digits | Radix | Octet Length ---------------+-------------+----------+---------+------+----------------+-------+-------------- orderkey | bigint | "YES" | | 0 | 0 | 0 | 0 custkey | bigint | "YES" | | 0 | 0 | 0 | 0 orderstatus | varchar(1) | "YES" | | 0 | 0 | 0 | 0 totalprice | double | "YES" | | 0 | 0 | 0 | 0 orderdate | date | "YES" | | 0 | 0 | 0 | 0 orderpriority | varchar(15) | "YES" | | 0 | 0 | 0 | 0 clerk | varchar(15) | "YES" | | 0 | 0 | 0 | 0 shippriority | integer | "YES" | | 0 | 0 | 0 | 0 comment | varchar(79) | "YES" | | 0 | 0 | 0 | 0 BASE TABLE "sf10000.orders" Name | Type | Nullable | Default | Size | Decimal Digits | Radix | Octet Length ---------------+-------------+----------+---------+------+----------------+-------+-------------- orderkey | bigint | "YES" | | 0 | 0 | 0 | 0 custkey | bigint | "YES" | | 0 | 0 | 0 | 0 orderstatus | varchar(1) | "YES" | | 0 | 0 | 0 | 0 totalprice | double | "YES" | | 0 | 0 | 0 | 0 orderdate | date | "YES" | | 0 | 0 | 0 | 0 orderpriority | varchar(15) | "YES" | | 0 | 0 | 0 | 0 clerk | varchar(15) | "YES" | | 0 | 0 | 0 | 0 shippriority | integer | "YES" | | 0 | 0 | 0 | 0 comment | varchar(79) | "YES" | | 0 | 0 | 0 | 0 BASE TABLE "sf100000.orders" Name | Type | Nullable | Default | Size | Decimal Digits | Radix | Octet Length ---------------+-------------+----------+---------+------+----------------+-------+-------------- orderkey | bigint | "YES" | | 0 | 0 | 0 | 0 custkey | bigint | "YES" | | 0 | 0 | 0 | 0 orderstatus | varchar(1) | "YES" | | 0 | 0 | 0 | 0 totalprice | double | "YES" | | 0 | 0 | 0 | 0 orderdate | date | "YES" | | 0 | 0 | 0 | 0 orderpriority | varchar(15) | "YES" | | 0 | 0 | 0 | 0 clerk | varchar(15) | "YES" | | 0 | 0 | 0 | 0 shippriority | integer | "YES" | | 0 | 0 | 0 | 0 comment | varchar(79) | "YES" | | 0 | 0 | 0 | 0 BASE TABLE "sf300.orders" Name | Type | Nullable | Default | Size | Decimal Digits | Radix | Octet Length ---------------+-------------+----------+---------+------+----------------+-------+-------------- orderkey | bigint | "YES" | | 0 | 0 | 0 | 0 custkey | bigint | "YES" | | 0 | 0 | 0 | 0 orderstatus | varchar(1) | "YES" | | 0 | 0 | 0 | 0 totalprice | double | "YES" | | 0 | 0 | 0 | 0 orderdate | date | "YES" | | 0 | 0 | 0 | 0 orderpriority | varchar(15) | "YES" | | 0 | 0 | 0 | 0 clerk | varchar(15) | "YES" | | 0 | 0 | 0 | 0 shippriority | integer | "YES" | | 0 | 0 | 0 | 0 comment | varchar(79) | "YES" | | 0 | 0 | 0 | 0 BASE TABLE "sf3000.orders" Name | Type | Nullable | Default | Size | Decimal Digits | Radix | Octet Length ---------------+-------------+----------+---------+------+----------------+-------+-------------- orderkey | bigint | "YES" | | 0 | 0 | 0 | 0 custkey | bigint | "YES" | | 0 | 0 | 0 | 0 orderstatus | varchar(1) | "YES" | | 0 | 0 | 0 | 0 totalprice | double | "YES" | | 0 | 0 | 0 | 0 orderdate | date | "YES" | | 0 | 0 | 0 | 0 orderpriority | varchar(15) | "YES" | | 0 | 0 | 0 | 0 clerk | varchar(15) | "YES" | | 0 | 0 | 0 | 0 shippriority | integer | "YES" | | 0 | 0 | 0 | 0 comment | varchar(79) | "YES" | | 0 | 0 | 0 | 0 BASE TABLE "sf30000.orders" Name | Type | Nullable | Default | Size | Decimal Digits | Radix | Octet Length ---------------+-------------+----------+---------+------+----------------+-------+-------------- orderkey | bigint | "YES" | | 0 | 0 | 0 | 0 custkey | bigint | "YES" | | 0 | 0 | 0 | 0 orderstatus | varchar(1) | "YES" | | 0 | 0 | 0 | 0 totalprice | double | "YES" | | 0 | 0 | 0 | 0 orderdate | date | "YES" | | 0 | 0 | 0 | 0 orderpriority | varchar(15) | "YES" | | 0 | 0 | 0 | 0 clerk | varchar(15) | "YES" | | 0 | 0 | 0 | 0 shippriority | integer | "YES" | | 0 | 0 | 0 | 0 comment | varchar(79) | "YES" | | 0 | 0 | 0 | 0 BASE TABLE "tiny.orders" Name | Type | Nullable | Default | Size | Decimal Digits | Radix | Octet Length ---------------+-------------+----------+---------+------+----------------+-------+-------------- orderkey | bigint | "YES" | | 0 | 0 | 0 | 0 custkey | bigint | "YES" | | 0 | 0 | 0 | 0 orderstatus | varchar(1) | "YES" | | 0 | 0 | 0 | 0 totalprice | double | "YES" | | 0 | 0 | 0 | 0 orderdate | date | "YES" | | 0 | 0 | 0 | 0 orderpriority | varchar(15) | "YES" | | 0 | 0 | 0 | 0 clerk | varchar(15) | "YES" | | 0 | 0 | 0 | 0 shippriority | integer | "YES" | | 0 | 0 | 0 | 0 comment | varchar(79) | "YES" | | 0 | 0 | 0 | 0 usql-0.19.19/drivers/testdata/trino.listSchemas.expected.txt000066400000000000000000000003471476173253300241370ustar00rootroot00000000000000 List of schemas Schema | Catalog ----------+--------- sf1 | tpch sf100 | tpch sf1000 | tpch sf10000 | tpch sf100000 | tpch sf300 | tpch sf3000 | tpch sf30000 | tpch tiny | tpch (9 rows) usql-0.19.19/drivers/testdata/trino.listTables.expected.txt000066400000000000000000000011421476173253300237600ustar00rootroot00000000000000 List of relations Schema | Name | Type | Rows | Size | Comment ----------+--------+------------+------+------+--------- sf1 | orders | BASE TABLE | 0 | | sf100 | orders | BASE TABLE | 0 | | sf1000 | orders | BASE TABLE | 0 | | sf10000 | orders | BASE TABLE | 0 | | sf100000 | orders | BASE TABLE | 0 | | sf300 | orders | BASE TABLE | 0 | | sf3000 | orders | BASE TABLE | 0 | | sf30000 | orders | BASE TABLE | 0 | | tiny | orders | BASE TABLE | 0 | | (9 rows) usql-0.19.19/drivers/trino/000077500000000000000000000000001476173253300155035ustar00rootroot00000000000000usql-0.19.19/drivers/trino/reader.go000066400000000000000000000041161476173253300172760ustar00rootroot00000000000000package trino import ( "database/sql" "fmt" "strings" "github.com/xo/usql/drivers/metadata" ) type metaReader struct { metadata.LoggingReader } var _ metadata.CatalogReader = &metaReader{} var _ metadata.ColumnStatReader = &metaReader{} func (r metaReader) Catalogs(metadata.Filter) (*metadata.CatalogSet, error) { qstr := `SHOW catalogs` rows, closeRows, err := r.Query(qstr) if err != nil { return nil, err } defer closeRows() results := []metadata.Catalog{} for rows.Next() { rec := metadata.Catalog{} err = rows.Scan(&rec.Catalog) if err != nil { return nil, err } results = append(results, rec) } if rows.Err() != nil { return nil, rows.Err() } return metadata.NewCatalogSet(results), nil } func (r metaReader) ColumnStats(f metadata.Filter) (*metadata.ColumnStatSet, error) { names := []string{} if f.Catalog != "" { names = append(names, f.Catalog+".") } if f.Schema != "" { names = append(names, f.Schema+".") } names = append(names, f.Parent) rows, closeRows, err := r.Query(fmt.Sprintf("SHOW STATS FOR %s", strings.Join(names, ""))) if err != nil { return nil, err } defer closeRows() results := []metadata.ColumnStat{} for rows.Next() { rec := metadata.ColumnStat{Catalog: f.Catalog, Schema: f.Schema, Table: f.Parent} name := sql.NullString{} avgWidth := sql.NullInt32{} numDistinct := sql.NullInt64{} nullFrac := sql.NullFloat64{} numRows := sql.NullInt64{} min := sql.NullString{} max := sql.NullString{} err = rows.Scan( &name, &avgWidth, &numDistinct, &nullFrac, &numRows, &min, &max, ) if err != nil { return nil, err } if !name.Valid { continue } rec.Name = name.String if avgWidth.Valid { rec.AvgWidth = int(avgWidth.Int32) } if numDistinct.Valid { rec.NumDistinct = numDistinct.Int64 } if nullFrac.Valid { rec.NullFrac = nullFrac.Float64 } if min.Valid { rec.Min = min.String } if max.Valid { rec.Max = max.String } results = append(results, rec) } if rows.Err() != nil { return nil, rows.Err() } return metadata.NewColumnStatSet(results), nil } usql-0.19.19/drivers/trino/trino.go000066400000000000000000000033661476173253300171750ustar00rootroot00000000000000// Package trino defines and registers usql's Trino driver. // // See: https://github.com/trinodb/trino-go-client package trino import ( "context" "io" _ "github.com/trinodb/trino-go-client/trino" // DRIVER "github.com/xo/usql/drivers" "github.com/xo/usql/drivers/metadata" infos "github.com/xo/usql/drivers/metadata/informationschema" ) func init() { newReader := func(db drivers.DB, opts ...metadata.ReaderOption) metadata.Reader { ir := infos.New( infos.WithPlaceholder(func(int) string { return "?" }), infos.WithCustomClauses(map[infos.ClauseName]string{ infos.ColumnsColumnSize: "0", infos.ColumnsNumericScale: "0", infos.ColumnsNumericPrecRadix: "0", infos.ColumnsCharOctetLength: "0", }), infos.WithFunctions(false), infos.WithSequences(false), infos.WithIndexes(false), infos.WithConstraints(false), infos.WithColumnPrivileges(false), infos.WithUsagePrivileges(false), )(db, opts...) mr := &metaReader{ LoggingReader: metadata.NewLoggingReader(db, opts...), } return metadata.NewPluginReader(ir, mr) } drivers.Register("trino", drivers.Driver{ AllowMultilineComments: true, Process: drivers.StripTrailingSemicolon, Version: func(ctx context.Context, db drivers.DB) (string, error) { var ver string err := db.QueryRowContext( ctx, `SELECT node_version FROM system.runtime.nodes LIMIT 1`, ).Scan(&ver) if err != nil { return "", err } return "Trino " + ver, nil }, NewMetadataReader: newReader, NewMetadataWriter: func(db drivers.DB, w io.Writer, opts ...metadata.ReaderOption) metadata.Writer { return metadata.NewDefaultWriter(newReader(db, opts...))(db, w) }, Copy: drivers.CopyWithInsert(func(int) string { return "?" }), }) } usql-0.19.19/drivers/vertica/000077500000000000000000000000001476173253300160055ustar00rootroot00000000000000usql-0.19.19/drivers/vertica/vertica.go000066400000000000000000000051241476173253300177730ustar00rootroot00000000000000// Package vertica defines and registers usql's Vertica driver. // // See: https://github.com/vertica/vertica-sql-go package vertica import ( "context" "crypto/tls" "crypto/x509" "database/sql" "errors" "io" "net/url" "os" "regexp" "strings" vertica "github.com/vertica/vertica-sql-go" // DRIVER "github.com/vertica/vertica-sql-go/logger" "github.com/xo/dburl" "github.com/xo/usql/drivers" ) func init() { // turn off logging if os.Getenv("VERTICA_SQL_GO_LOG_LEVEL") == "" { logger.SetLogLevel(logger.NONE) } errCodeRE := regexp.MustCompile(`(?i)^\[([0-9a-z]+)\]\s+(.+)`) drivers.Register("vertica", drivers.Driver{ AllowDollar: true, AllowMultilineComments: true, Version: func(ctx context.Context, db drivers.DB) (string, error) { var ver string if err := db.QueryRowContext(ctx, `SELECT version()`).Scan(&ver); err != nil { return "", err } return ver, nil }, Open: func(_ context.Context, u *dburl.URL, stdout, stderr func() io.Writer) (func(string, string) (*sql.DB, error), error) { return func(driver, dsn string) (*sql.DB, error) { u, err := url.Parse(dsn) if err != nil { return nil, err } q := u.Query() if name := q.Get("ca_path"); name != "" { if q.Get("tlsmode") != "server-strict" { return nil, errors.New("tlsmode must be set to server-strict: ca_path is set") } cfg := &tls.Config{ ServerName: u.Hostname(), } if err := addCA(name, cfg); err != nil { return nil, err } if err := vertica.RegisterTLSConfig("custom_tls_config", cfg); err != nil { return nil, err } q.Set("tlsmode", "custom_tls_config") } return sql.Open(driver, u.String()) }, nil }, ChangePassword: func(db drivers.DB, user, newpw, _ string) error { _, err := db.Exec(`ALTER USER ` + user + ` IDENTIFIED BY '` + newpw + `'`) return err }, Err: func(err error) (string, string) { msg := strings.TrimSpace(strings.TrimPrefix(err.Error(), "Error:")) if m := errCodeRE.FindAllStringSubmatch(msg, -1); m != nil { return m[0][1], strings.TrimSpace(m[0][2]) } return "", msg }, IsPasswordErr: func(err error) bool { return strings.HasSuffix(strings.TrimSpace(err.Error()), "Invalid username or password") }, }) } // addCA adds the specified file name as a ca to the tls config. func addCA(name string, cfg *tls.Config) error { pool := x509.NewCertPool() switch pem, err := os.ReadFile(name); { case err != nil: return err case !pool.AppendCertsFromPEM(pem): return errors.New("failed to append pem to cert pool") } cfg.RootCAs = pool return nil } usql-0.19.19/drivers/voltdb/000077500000000000000000000000001476173253300156425ustar00rootroot00000000000000usql-0.19.19/drivers/voltdb/voltdb.go000066400000000000000000000005151476173253300174640ustar00rootroot00000000000000// Package voltdb defines and registers usql's VoltDB driver. // // See: https://github.com/VoltDB/voltdb-client-go package voltdb import ( _ "github.com/VoltDB/voltdb-client-go/voltdbclient" // DRIVER "github.com/xo/usql/drivers" ) func init() { drivers.Register("voltdb", drivers.Driver{ AllowMultilineComments: true, }) } usql-0.19.19/drivers/ydb/000077500000000000000000000000001476173253300151265ustar00rootroot00000000000000usql-0.19.19/drivers/ydb/ydb.go000066400000000000000000000007311476173253300162340ustar00rootroot00000000000000// Package ydb defines and registers usql's YDB driver. // // See: https://github.com/ydb-platform/ydb-go-sdk package ydb import ( "errors" "strconv" "github.com/xo/usql/drivers" "github.com/ydb-platform/ydb-go-sdk/v3" // DRIVER ) func init() { drivers.Register("ydb", drivers.Driver{ Err: func(err error) (string, string) { var e ydb.Error if errors.As(err, &e) { return strconv.Itoa(int(e.Code())), e.Error() } return "", err.Error() }, }) } usql-0.19.19/env/000077500000000000000000000000001476173253300134625ustar00rootroot00000000000000usql-0.19.19/env/env.go000066400000000000000000000203611476173253300146030ustar00rootroot00000000000000// Package env contains runtime environment variables for usql, along with // various helper funcs to determine the user's configuration. package env import ( "bytes" "io" "os" "os/exec" "os/user" "path/filepath" "regexp" "runtime" "strconv" "strings" "unicode/utf8" "github.com/kenshaw/rasterm" "github.com/xo/dburl/passfile" "github.com/xo/usql/text" ) // Getenv tries retrieving successive keys from os environment variables. func Getenv(keys ...string) (string, bool) { m := make(map[string]string) for _, v := range os.Environ() { if i := strings.Index(v, "="); i != -1 { m[v[:i]] = v[i+1:] } } for _, key := range keys { if v, ok := m[key]; ok { return v, true } } return "", false } // Chdir changes the current working directory to the specified path, or to the // user's home directory if path is not specified. func Chdir(u *user.User, path string) error { if path != "" { path = passfile.Expand(u.HomeDir, path) } else { path = u.HomeDir } return os.Chdir(path) } // OpenFile opens a file for read (os.O_RDONLY), returning the full, expanded // path of the file. Callers are responsible for closing the returned file. func OpenFile(u *user.User, path string, relative bool) (string, *os.File, error) { path, err := filepath.EvalSymlinks(passfile.Expand(u.HomeDir, path)) switch { case err != nil && os.IsNotExist(err): return "", nil, text.ErrNoSuchFileOrDirectory case err != nil: return "", nil, err } fi, err := os.Stat(path) switch { case err != nil && os.IsNotExist(err): return "", nil, text.ErrNoSuchFileOrDirectory case err != nil: return "", nil, err case fi.IsDir(): return "", nil, text.ErrCannotIncludeDirectories } f, err := os.OpenFile(path, os.O_RDONLY, 0) if err != nil { return "", nil, err } return path, f, nil } // EditFile edits a file. If path is empty, then a temporary file will be created. func EditFile(u *user.User, path, line, s string) ([]rune, error) { ed := All()["EDITOR"] if ed == "" { if p, err := exec.LookPath("vi"); err == nil { ed = p } else { return nil, text.ErrNoEditorDefined } } if path != "" { path = passfile.Expand(u.HomeDir, path) } else { f, err := os.CreateTemp("", text.CommandLower()+".*.sql") if err != nil { return nil, err } err = f.Close() if err != nil { return nil, err } path = f.Name() err = os.WriteFile(path, []byte(strings.TrimSuffix(s, "\n")+"\n"), 0o644) if err != nil { return nil, err } } // setup args args := []string{path} if line != "" { if s, ok := Getenv(text.CommandUpper() + "_EDITOR_LINENUMBER_ARG"); ok { args = append(args, s+line) } else { args = append(args, "+"+line) } } // create command c := exec.Command(ed, args...) c.Stdin = os.Stdin c.Stdout = os.Stdout c.Stderr = os.Stderr // run if err := c.Run(); err != nil { return nil, err } // read buf, err := os.ReadFile(path) if err != nil { return nil, err } return []rune(strings.TrimSuffix(string(buf), "\n")), nil } // HistoryFile returns the path to the history file. // // Defaults to ~/._history, overridden by environment variable // _HISTORY (ie, ~/.usql_history and USQL_HISTORY). func HistoryFile(u *user.User) string { n := text.CommandUpper() + "_HISTORY" path := "~/." + strings.ToLower(n) if s, ok := Getenv(n); ok { path = s } return passfile.Expand(u.HomeDir, path) } // RCFile returns the path to the RC file. // // Defaults to ~/.rc, overridden by environment variable // RC (ie, ~/.usqlrc and USQLRC). func RCFile(u *user.User) string { n := text.CommandUpper() + "RC" path := "~/." + strings.ToLower(n) if s, ok := Getenv(n); ok { path = s } return passfile.Expand(u.HomeDir, path) } // Getshell returns the user's defined SHELL, or system default (if found on // path) and the appropriate command-line argument for the returned shell. // // Looks at the SHELL environment variable first, and then COMSPEC/ComSpec on // Windows. Defaults to sh on non-Windows systems, and to cmd.exe on Windows. func Getshell() (string, string) { shell, ok := Getenv("SHELL") param := "-c" if !ok && runtime.GOOS == "windows" { shell, _ = Getenv("COMSPEC", "ComSpec") param = "/c" } // look up path for "cmd.exe" if no other SHELL if shell == "" && runtime.GOOS == "windows" { shell, _ = exec.LookPath("cmd.exe") if shell != "" { param = "/c" } } // lookup path for "sh" if no other SHELL if shell == "" { shell, _ = exec.LookPath("sh") if shell != "" { param = "-c" } } return shell, param } // Shell runs s as a shell. When s is empty the user's SHELL or COMSPEC is // used. See Getshell. func Shell(s string) error { shell, param := Getshell() if shell == "" { return text.ErrNoShellAvailable } s = strings.TrimSpace(s) var params []string if s != "" { params = append(params, param, s) } // drop to shell cmd := exec.Command(shell, params...) cmd.Stdin, cmd.Stdout, cmd.Stderr = os.Stdin, os.Stdout, os.Stderr _ = cmd.Run() return nil } // Pipe starts a command and returns its input for writing. func Pipe(stdout, stderr io.Writer, c string) (io.WriteCloser, *exec.Cmd, error) { shell, param := Getshell() if shell == "" { return nil, nil, text.ErrNoShellAvailable } cmd := exec.Command(shell, param, c) cmd.Stdout, cmd.Stderr = stdout, stderr out, err := cmd.StdinPipe() if err != nil { return nil, nil, err } return out, cmd, cmd.Start() } // Exec executes s using the user's SHELL / COMSPEC with -c (or /c) and // returning the captured output. See Getshell. // // When SHELL or COMSPEC is not defined, then "sh" / "cmd.exe" will be used // instead, assuming it is found on the system's PATH. func Exec(s string) (string, error) { s = strings.TrimSpace(s) if s == "" { return "", nil } shell, param := Getshell() if shell == "" { return "", text.ErrNoShellAvailable } buf, err := exec.Command(shell, param, s).CombinedOutput() if err != nil { return "", err } // remove ending \r\n buf = bytes.TrimSuffix(buf, []byte{'\n'}) buf = bytes.TrimSuffix(buf, []byte{'\r'}) return string(buf), nil } var cleanDoubleRE = regexp.MustCompile(`(^|[^\\])''`) // Dequote unquotes a string. func Dequote(s string, quote byte) (string, error) { if len(s) < 2 || s[len(s)-1] != quote { return "", text.ErrUnterminatedQuotedString } s = s[1 : len(s)-1] if quote == '\'' { s = cleanDoubleRE.ReplaceAllString(s, "$1\\'") } // this is the last part of strconv.Unquote var runeTmp [utf8.UTFMax]byte buf := make([]byte, 0, 3*len(s)/2) // Try to avoid more allocations. for len(s) > 0 { c, multibyte, ss, err := strconv.UnquoteChar(s, quote) switch { case err != nil && err == strconv.ErrSyntax: return "", text.ErrInvalidQuotedString case err != nil: return "", err } s = ss if c < utf8.RuneSelf || !multibyte { buf = append(buf, byte(c)) } else { n := utf8.EncodeRune(runeTmp[:], c) buf = append(buf, runeTmp[:n]...) } } return string(buf), nil } // Getvar retrieves an environment variable. func Getvar(s string, v Vars) (bool, string, error) { q, n := "", s if c := s[0]; c == '\'' || c == '"' { var err error if n, err = Dequote(s, c); err != nil { return false, "", err } q = string(c) } if val, ok := v[n]; ok { return true, q + val + q, nil } return false, s, nil } // Unquote returns a func that unquotes strings for the user. // // When exec is true, backtick'd strings will be executed using the provided // user's shell (see Exec). func Unquote(u *user.User, exec bool, v Vars) func(string, bool) (bool, string, error) { return func(s string, isvar bool) (bool, string, error) { // log.Printf(">>> UNQUOTE: %q", s) if isvar { return Getvar(s, v) } if len(s) < 2 { return false, "", text.ErrInvalidQuotedString } c := s[0] z, err := Dequote(s, c) if err != nil { return false, "", err } if c == '\'' || c == '"' { return true, z, nil } if c != '`' { return false, "", text.ErrInvalidQuotedString } if !exec { return true, z, nil } res, err := Exec(z) if err != nil { return false, "", err } return true, res, nil } } // TermGraphics returns the [rasterm.TermType] based on TERM_GRAPHICS // environment variable. func TermGraphics() rasterm.TermType { var typ rasterm.TermType _ = typ.UnmarshalText([]byte(Get("TERM_GRAPHICS"))) return typ } usql-0.19.19/env/types.go000066400000000000000000000411201476173253300151530ustar00rootroot00000000000000package env import ( "fmt" "io" "os" "os/exec" "path/filepath" "regexp" "runtime" "sort" "strconv" "strings" "time" "unicode" syslocale "github.com/jeandeaual/go-locale" "github.com/xo/terminfo" "github.com/xo/usql/text" "github.com/yookoala/realpath" ) type varName struct { name string desc string } func (v varName) String() string { return fmt.Sprintf(" %s\n %s\n", v.name, v.desc) } var varNames = []varName{ { "ECHO_HIDDEN", "if set, display internal queries executed by backslash commands; if set to \"noexec\", just show them without execution", }, { "ON_ERROR_STOP", "stop batch execution after error", }, { "PROMPT1", "specifies the standard " + text.CommandName + " prompt", }, { "QUIET", "run quietly (same as -q option)", }, { "ROW_COUNT", "number of rows returned or affected by last query, or 0", }, } var pvarNames = []varName{ { "border", "border style (number)", }, { "columns", "target width for the wrapped format", }, { "csv_fieldsep", `field separator for CSV output (default ",")`, }, { "expanded", "expanded output [on, off, auto]", }, { "fieldsep", `field separator for unaligned output (default "|")`, }, { "fieldsep_zero", "set field separator for unaligned output to a zero byte", }, { "footer", "enable or disable display of the table footer [on, off]", }, { "format", "set output format [unaligned, aligned, wrapped, vertical, html, asciidoc, csv, json, ...]", }, { "linestyle", "set the border line drawing style [ascii, old-ascii, unicode]", }, { "null", "set the string to be printed in place of a null value", }, { "numericlocale", "enable display of a locale-specific character to separate groups of digits", }, { "pager_min_lines", "minimum number of lines required in the output to use a pager, 0 to disable (default)", }, { "pager", "control when an external pager is used [on, off, always]", }, { "recordsep", "record (line) separator for unaligned output", }, { "recordsep_zero", "set record separator for unaligned output to a zero byte", }, { "tableattr", "specify attributes for table tag in html format, or proportional column widths for left-aligned data types in latex-longtable format", }, { "time", `format used to display time/date column values (default "RFC3339Nano")`, }, { "timezone", `the timezone to display dates in (default '')`, }, { "title", "set the table title for subsequently printed tables", }, { "tuples_only", "if set, only actual table data is shown", }, { "unicode_border_linestyle", "set the style of Unicode line drawing [single, double]", }, { "unicode_column_linestyle", "set the style of Unicode line drawing [single, double]", }, { "unicode_header_linestyle", "set the style of Unicode line drawing [single, double]", }, } var envVarNames = []varName{ { text.CommandUpper() + "_EDITOR, EDITOR, VISUAL", "editor used by the \\e, \\ef, and \\ev commands", }, { text.CommandUpper() + "_EDITOR_LINENUMBER_ARG", "how to specify a line number when invoking the editor", }, { text.CommandUpper() + "_HISTORY", "alternative location for the command history file", }, { text.CommandUpper() + "_PAGER, PAGER", "name of external pager program", }, { text.CommandUpper() + "_SHOW_HOST_INFORMATION", "display host information when connecting to a database", }, { text.CommandUpper() + "RC", "alternative location for the user's .usqlrc file", }, { text.CommandUpper() + "_SSLMODE, SSLMODE", "when set to 'retry', allows connections to attempt to reconnect when no ?sslmode= was specified on the url", }, { "SYNTAX_HL", "enable syntax highlighting", }, { "SYNTAX_HL_FORMAT", "chroma library formatter name", }, { "SYNTAX_HL_STYLE", `chroma library style name (default "monokai")`, }, { "SYNTAX_HL_OVERRIDE_BG", "enables overriding the background color of the chroma styles", }, { "TERM_GRAPHICS", `use the specified terminal graphics`, }, { "SHELL", "shell used by the \\! command", }, } // Vars is a map of variables to their values. type Vars map[string]string // Set sets a variable name. func (v Vars) Set(name, value string) { v[name] = value } // Unset unsets a variable name. func (v Vars) Unset(name string) { delete(v, name) } // All returns all variables as a map. func (v Vars) All() map[string]string { return map[string]string(v) } // vars are the environment variables. var vars Vars // pvars are the environment printing variables. var pvars Vars // cvars are the environment named connections. var cvars map[string][]string func init() { cmdNameUpper := strings.ToUpper(text.CommandName) // get USQL_* variables enableHostInformation := "true" if v, _ := Getenv(cmdNameUpper + "_SHOW_HOST_INFORMATION"); v != "" { enableHostInformation = v } // get NO_COLOR noColor := false if s, ok := Getenv("NO_COLOR"); ok { noColor = s != "0" && s != "false" && s != "off" } // get color level colorLevel, _ := terminfo.ColorLevelFromEnv() enableSyntaxHL := "true" if noColor || colorLevel < terminfo.ColorLevelBasic { enableSyntaxHL = "false" } // pager pagerCmd, ok := Getenv(cmdNameUpper+"_PAGER", "PAGER") pager := "off" if !ok { for _, s := range []string{"less", "more"} { if _, err := exec.LookPath(s); err == nil { pagerCmd = s break } } } if pagerCmd != "" { pager = "on" } // editor editorCmd, _ := Getenv(cmdNameUpper+"_EDITOR", "EDITOR", "VISUAL") // sslmode sslmode, ok := Getenv(cmdNameUpper+"_SSLMODE", "SSLMODE") if !ok { sslmode = "retry" } vars = Vars{ // usql related logic "SHOW_HOST_INFORMATION": enableHostInformation, "PAGER": pagerCmd, "EDITOR": editorCmd, "QUIET": "off", "ON_ERROR_STOP": "off", // prompts "PROMPT1": "%S%N%m%/%R%# ", // syntax highlighting variables "SYNTAX_HL": enableSyntaxHL, "SYNTAX_HL_FORMAT": colorLevel.ChromaFormatterName(), "SYNTAX_HL_STYLE": "monokai", "SYNTAX_HL_OVERRIDE_BG": "true", "SSLMODE": sslmode, "TERM_GRAPHICS": "none", } // determine locale locale := "en-US" if s, err := syslocale.GetLocale(); err == nil { locale = s } pvars = Vars{ "border": "1", "columns": "0", "csv_fieldsep": ",", "expanded": "off", "fieldsep": "|", "fieldsep_zero": "off", "footer": "on", "format": "aligned", "linestyle": "ascii", "locale": locale, "null": "", "numericlocale": "off", "pager_min_lines": "0", "pager": pager, "recordsep": "\n", "recordsep_zero": "off", "tableattr": "", "time": "RFC3339Nano", "timezone": "", "title": "", "tuples_only": "off", "unicode_border_linestyle": "single", "unicode_column_linestyle": "single", "unicode_header_linestyle": "single", } cvars = make(map[string][]string) } // ValidIdentifier returns an error when n is not a valid identifier. func ValidIdentifier(n string) error { r := []rune(n) rlen := len(r) if rlen < 1 { return text.ErrInvalidIdentifier } for i := 0; i < rlen; i++ { if c := r[i]; c != '_' && !unicode.IsLetter(c) && !unicode.IsNumber(c) { return text.ErrInvalidIdentifier } } return nil } // Set sets a variable. func Set(name, value string) error { if err := ValidIdentifier(name); err != nil { return err } if name == "ON_ERROR_STOP" || name == "QUIET" { if value == "" { value = "on" } else { var err error if value, err = ParseBool(value, name); err != nil { return err } } } vars.Set(name, value) return nil } // Unset unsets a variable. func Unset(name string) error { if err := ValidIdentifier(name); err != nil { return err } vars.Unset(name) return nil } // All returns all variables. func All() Vars { m := make(Vars) for k, v := range vars { m[k] = v } return m } // Pall returns all p variables. func Pall() Vars { m := make(Vars) for k, v := range pvars { m[k] = v } return m } // Pwrite writes the p variables to the writer. func Pwrite(w io.Writer) error { keys := make([]string, len(pvars)) var i, width int for k := range pvars { keys[i], width = k, max(len(k), width) i++ } sort.Strings(keys) for _, k := range keys { val := pvars[k] switch k { case "csv_fieldsep", "fieldsep", "recordsep", "null": val = strconv.QuoteToASCII(val) case "tableattr", "title": if val != "" { val = strconv.QuoteToASCII(val) } } fmt.Fprintln(w, k+strings.Repeat(" ", width-len(k)), val) } return nil } var ( formatRE = regexp.MustCompile(`^(unaligned|aligned|wrapped|html|asciidoc|latex|latex-longtable|troff-ms|csv|json|vertical)$`) linestlyeRE = regexp.MustCompile(`^(ascii|old-ascii|unicode)$`) borderRE = regexp.MustCompile(`^(single|double)$`) ) func ParseBool(value, name string) (string, error) { switch strings.ToLower(value) { case "1", "t", "tr", "tru", "true", "on": return "on", nil case "0", "f", "fa", "fal", "fals", "false", "of", "off": return "off", nil } return "", fmt.Errorf(text.FormatFieldInvalidValue, value, name, "Boolean") } func ParseKeywordBool(value, name string, keywords ...string) (string, error) { v := strings.ToLower(value) switch v { case "1", "t", "tr", "tru", "true", "on": return "on", nil case "0", "f", "fa", "fal", "fals", "false", "of", "off": return "off", nil } for _, k := range keywords { if v == k { return v, nil } } return "", fmt.Errorf(text.FormatFieldInvalid, value, name) } func Get(name string) string { return vars[name] } func Pget(name string) (string, error) { v, ok := pvars[name] if !ok { return "", fmt.Errorf(text.UnknownFormatFieldName, name) } return v, nil } // Ptoggle toggles a p variable. func Ptoggle(name, extra string) (string, error) { _, ok := pvars[name] if !ok { return "", fmt.Errorf(text.UnknownFormatFieldName, name) } switch name { case "border", "columns", "pager_min_lines": case "pager": switch pvars[name] { case "on", "always": pvars[name] = "off" case "off": pvars[name] = "on" default: panic(fmt.Sprintf("invalid state for field %s", name)) } case "expanded": switch pvars[name] { case "on", "auto": pvars[name] = "off" case "off": pvars[name] = "on" default: panic(fmt.Sprintf("invalid state for field %s", name)) } case "fieldsep_zero", "footer", "numericlocale", "recordsep_zero", "tuples_only": switch pvars[name] { case "on": pvars[name] = "off" case "off": pvars[name] = "on" default: panic(fmt.Sprintf("invalid state for field %s", name)) } case "format": switch { case extra != "" && pvars[name] != extra: pvars[name] = extra case pvars[name] == "aligned": pvars[name] = "unaligned" default: pvars[name] = "aligned" } case "linestyle": case "csv_fieldsep", "fieldsep", "null", "recordsep", "time", "timezone", "locale": case "tableattr", "title": pvars[name] = "" case "unicode_border_linestyle", "unicode_column_linestyle", "unicode_header_linestyle": default: panic(fmt.Sprintf("field %s was defined in package pvars variable, but not in switch", name)) } return pvars[name], nil } // Pset sets a p variable. func Pset(name, value string) (string, error) { _, ok := pvars[name] if !ok { return "", fmt.Errorf(text.UnknownFormatFieldName, name) } switch name { case "border", "columns", "pager_min_lines": i, _ := strconv.Atoi(value) pvars[name] = fmt.Sprintf("%d", i) case "pager": s, err := ParseKeywordBool(value, name, "always") if err != nil { return "", text.ErrInvalidFormatPagerType } pvars[name] = s case "expanded": s, err := ParseKeywordBool(value, name, "auto") if err != nil { return "", text.ErrInvalidFormatExpandedType } pvars[name] = s case "fieldsep_zero", "footer", "numericlocale", "recordsep_zero", "tuples_only": s, err := ParseBool(value, name) if err != nil { return "", err } pvars[name] = s case "format": if !formatRE.MatchString(value) { return "", text.ErrInvalidFormatType } pvars[name] = value case "linestyle": if !linestlyeRE.MatchString(value) { return "", text.ErrInvalidFormatLineStyle } pvars[name] = value case "csv_fieldsep", "fieldsep", "null", "recordsep", "tableattr", "time", "title", "locale": pvars[name] = value case "timezone": if _, err := time.LoadLocation(value); err != nil { return "", text.ErrInvalidTimezoneLocation } pvars[name] = value case "unicode_border_linestyle", "unicode_column_linestyle", "unicode_header_linestyle": if !borderRE.MatchString(value) { return "", text.ErrInvalidFormatBorderLineStyle } pvars[name] = value default: panic(fmt.Sprintf("field %s was defined in package pvars variable, but not in switch", name)) } return pvars[name], nil } // Cset sets a named connection for the environment. func Cset(name string, vals ...string) error { if err := ValidIdentifier(name); err != nil { return err } if _, ok := cvars[name]; len(vals) == 0 || vals[0] == "" && ok { delete(cvars, name) } else { v := make([]string, len(vals)) copy(v, vals) cvars[name] = v } return nil } // Cget returns the environment's named connection. func Cget(name string) ([]string, bool) { vals, ok := cvars[name] if !ok { return nil, false } v := make([]string, len(vals)) copy(v, vals) return v, true } // Call returns all named connections from the environment. func Call() map[string][]string { m := make(map[string][]string, len(cvars)) for k, vals := range cvars { v := make([]string, len(vals)) copy(v, vals) m[k] = v } return m } // timeConsts are well known time consts. var timeConsts = map[string]string{ "ANSIC": time.ANSIC, "UnixDate": time.UnixDate, "RubyDate": time.RubyDate, "RFC822": time.RFC822, "RFC822Z": time.RFC822Z, "RFC850": time.RFC850, "RFC1123": time.RFC1123, "RFC1123Z": time.RFC1123Z, "RFC3339": time.RFC3339, "RFC3339Nano": time.RFC3339Nano, "Kitchen": time.Kitchen, "Stamp": time.Stamp, "StampMilli": time.StampMilli, "StampMicro": time.StampMicro, "StampNano": time.StampNano, } // GoTime returns the user's time format converted to Go's time.Format value. func GoTime() string { tfmt := pvars["time"] if s, ok := timeConsts[tfmt]; ok { return s } return tfmt } // Listing writes a formatted listing of the special environment variables to // w, separated in sections based on variable type. func Listing(w io.Writer) { varsWithDesc := make([]string, len(varNames)) for i, v := range varNames { varsWithDesc[i] = v.String() } pvarsWithDesc := make([]string, len(pvarNames)) for i, v := range pvarNames { pvarsWithDesc[i] = v.String() } // determine config dir name configDir, configExtra := buildConfigDir("config.yaml") // environment var names configDesc := configDir if configExtra != "" { configDesc = configExtra } ev := []varName{ { text.CommandUpper() + "_CONFIG", fmt.Sprintf(`config file path (default %q)`, configDesc), }, } envVarsWithDesc := make([]string, len(envVarNames)+1) for i, v := range append(ev, envVarNames...) { envVarsWithDesc[i] = v.String() } if configExtra != "" { configExtra = " (" + configExtra + ")" } template := `List of specially treated variables %s variables: Usage: %[1]s --set=NAME=VALUE or \set NAME VALUE inside %[1]s %[2]s Display settings: Usage: %[1]s --pset=NAME[=VALUE] or \pset NAME [VALUE] inside %[1]s %[3]s Environment variables: Usage: NAME=VALUE [NAME=VALUE] %[1]s ... or \setenv NAME [VALUE] inside %[1]s %[4]s Connection variables: Usage: %[1]s --cset NAME[=DSN] or \cset NAME [DSN] inside %[1]s or \cset NAME DRIVER PARAMS... inside %[1]s or define in %[5]s%[6]s ` fmt.Fprintf( w, template, text.CommandName, strings.TrimRightFunc(strings.Join(varsWithDesc, ""), unicode.IsSpace), strings.TrimRightFunc(strings.Join(pvarsWithDesc, ""), unicode.IsSpace), strings.TrimRightFunc(strings.Join(envVarsWithDesc, ""), unicode.IsSpace), configDir, configExtra, ) } func buildConfigDir(configName string) (string, string) { dir := `$HOME/.config/usql` switch runtime.GOOS { case "darwin": dir = `$HOME/Library/Application Support` case "windows": dir = `%AppData%\usql` } configDir, err := os.UserConfigDir() if err != nil { return filepath.Join(dir, configName), "" } if configDir, err = realpath.Realpath(configDir); err != nil { return filepath.Join(dir, configName), "" } return filepath.Join(dir, configName), filepath.Join(configDir, "usql", configName) } usql-0.19.19/gen.go000066400000000000000000000350551476173253300140020ustar00rootroot00000000000000//go:build ignore package main import ( "bytes" "errors" "flag" "fmt" "go/format" "go/parser" "go/token" "io/fs" "os" "path/filepath" "regexp" "slices" "sort" "strings" "time" "github.com/mattn/go-runewidth" "github.com/xo/dburl" "github.com/yookoala/realpath" ) type DriverInfo struct { // Tag is the build Tag / name of the directory the driver lives in. Tag string // Driver is the Go SQL Driver Driver (parsed from the import tagged with // // DRIVER: ), otherwise same as the tag / directory Driver. Driver string // Pkg is the imported driver package, taken from the import tagged with // DRIVER. Pkg string // Desc is the descriptive text of the driver, parsed from doc comment, ie, // "Package defines and registers usql's ." Desc string // URL is the driver's reference URL, parsed from doc comment's "See: ". URL string // CGO is whether or not the driver requires CGO, based on presence of // 'Requires CGO.' in the comment CGO bool // Aliases are the parsed Alias: entries. Aliases [][]string // Wire indicates it is a Wire compatible driver. Wire bool // Group is the build Group Group string } // baseDrivers are drivers included in a build with no build tags listed. var baseDrivers = map[string]DriverInfo{} // mostDrivers are drivers included with the most tag. Populated below. var mostDrivers = map[string]DriverInfo{} // allDrivers are drivers forced to 'all' build tag. var allDrivers = map[string]DriverInfo{} // badDrivers are drivers forced to 'bad' build tag. var badDrivers = map[string]DriverInfo{} // wireDrivers are the wire compatible drivers. var wireDrivers = map[string]DriverInfo{} func main() { licenseStart := flag.Int("license-start", 2016, "license start year") licenseAuthor := flag.String("license-author", "Kenneth Shaw", "license author") dburlGen := flag.Bool("dburl-gen", false, "enable dburl generation") dburlDir := flag.String("dburl-dir", getDburlDir(), "dburl dir") dburlLicenseStart := flag.Int("dburl-license-start", 2015, "dburl license start year") flag.Parse() if err := run(*licenseStart, *licenseAuthor, *dburlGen, *dburlDir, *dburlLicenseStart); err != nil { fmt.Fprintf(os.Stderr, "error: %v\n", err) os.Exit(1) } } func run(licenseStart int, licenseAuthor string, dburlGen bool, dburlDir string, dburlLicenseStart int) error { wd, err := os.Getwd() if err != nil { return err } if err := loadDrivers(filepath.Join(wd, "drivers")); err != nil { return err } if err := writeInternal(filepath.Join(wd, "internal"), baseDrivers, mostDrivers, allDrivers, badDrivers); err != nil { return err } if err := writeReadme(wd, true); err != nil { return err } if err := writeLicenseFiles(licenseStart, licenseAuthor); err != nil { return err } if dburlGen { if err := writeReadme(dburlDir, false); err != nil { return err } if err := writeDburlLicense(dburlDir, dburlLicenseStart, licenseAuthor); err != nil { return err } } return nil } func getDburlDir() string { dir := filepath.Join(os.Getenv("GOPATH"), "src/github.com/xo/dburl") var err error if dir, err = realpath.Realpath(dir); err != nil { panic(err) } return dir } var dirRE = regexp.MustCompile(`^([^/]+)/([^\./]+)\.go$`) // loadDrivers loads the driver descriptions. func loadDrivers(wd string) error { skipDirs := []string{"completer", "metadata"} err := fs.WalkDir(os.DirFS(wd), ".", func(n string, d fs.DirEntry, err error) error { switch { case err != nil: return err case d.IsDir(): return nil } m := dirRE.FindAllStringSubmatch(n, -1) if m == nil || m[0][1] != m[0][2] || slices.Contains(skipDirs, m[0][1]) { return nil } tag, dest := m[0][1], mostDrivers driver, err := parseDriverInfo(tag, filepath.Join(wd, n)) switch { case err != nil: return err case driver.Group == "base": dest = baseDrivers case driver.Group == "most": case driver.Group == "all": dest = allDrivers case driver.Group == "bad": dest = badDrivers default: return fmt.Errorf("driver %s has invalid group %q", tag, driver.Group) } dest[tag] = driver if dest[tag].Aliases != nil { for _, alias := range dest[tag].Aliases { wireDrivers[alias[0]] = DriverInfo{ Tag: tag, Driver: alias[0], Pkg: dest[tag].Pkg, Desc: alias[1], Wire: true, } } } return nil }) if err != nil { return err } return nil } func parseDriverInfo(tag, filename string) (DriverInfo, error) { f, err := parser.ParseFile(token.NewFileSet(), filename, nil, parser.ParseComments) if err != nil { return DriverInfo{}, err } name := tag var pkg string for _, imp := range f.Imports { if imp.Comment == nil || len(imp.Comment.List) == 0 || !strings.Contains(imp.Comment.List[0].Text, "DRIVER") { continue } pkg = imp.Path.Value[1 : len(imp.Path.Value)-1] if i := strings.Index(imp.Comment.List[0].Text, ":"); i != -1 { name = strings.TrimSpace(imp.Comment.List[0].Text[i+1:]) } break } // parse doc comment comment := f.Doc.Text() prefix := "Package " + tag + " defines and registers usql's " if !strings.HasPrefix(comment, prefix) { return DriverInfo{}, fmt.Errorf("invalid doc comment prefix for driver %q", tag) } desc := strings.TrimPrefix(comment, prefix) i := strings.Index(desc, " driver.") if i == -1 { return DriverInfo{}, fmt.Errorf("cannot find description suffix for driver %q", tag) } desc = strings.TrimSpace(desc[:i]) if desc == "" { return DriverInfo{}, fmt.Errorf("unable to parse description for driver %q", tag) } // parse alias: var aliases [][]string aliasesm := aliasRE.FindAllStringSubmatch(comment, -1) for _, m := range aliasesm { s := strings.Split(m[1], ",") aliases = append(aliases, []string{ strings.TrimSpace(s[0]), strings.TrimSpace(s[1]), }) } // parse see: url urlm := seeRE.FindAllStringSubmatch(comment, -1) if urlm == nil { return DriverInfo{}, fmt.Errorf("missing See: for driver %q", tag) } // parse group: group := "most" if groupm := groupRE.FindAllStringSubmatch(comment, -1); groupm != nil { group = strings.TrimSpace(groupm[0][1]) } return DriverInfo{ Tag: tag, Driver: name, Pkg: pkg, Desc: cleanRE.ReplaceAllString(desc, ""), URL: strings.TrimSpace(urlm[0][1]), CGO: strings.Contains(cleanRE.ReplaceAllString(comment, ""), "Requires CGO."), Aliases: aliases, Group: group, }, nil } var ( aliasRE = regexp.MustCompile(`(?m)^Alias:\s+(.*)$`) seeRE = regexp.MustCompile(`(?m)^See:\s+(.*)$`) groupRE = regexp.MustCompile(`(?m)^Group:\s+(.*)$`) cleanRE = regexp.MustCompile(`[\r\n]`) ) func writeInternal(wd string, drivers ...map[string]DriverInfo) error { // build known build tags var known []DriverInfo for _, m := range drivers { for _, v := range m { known = append(known, v) } } sort.Slice(known, func(i, j int) bool { return known[i].Tag < known[j].Tag }) knownStr := "" for _, v := range known { knownStr += fmt.Sprintf("\n%q: %q, // %s", v.Tag, v.Driver, v.Pkg) } // format and write internal.go buf, err := format.Source([]byte(fmt.Sprintf(internalGo, knownStr))) if err != nil { return err } if err := os.WriteFile(filepath.Join(wd, "internal.go"), buf, 0o644); err != nil { return err } // write .go for _, v := range known { var tags string switch v.Group { case "base": tags = "(!no_base || " + v.Tag + ")" case "most": tags = "(all || most || " + v.Tag + ")" case "all": tags = "(all || " + v.Tag + ")" case "bad": tags = "(bad || " + v.Tag + ")" default: panic(v.Tag) } tags += " && !no_" + v.Tag buf, err := format.Source([]byte(fmt.Sprintf(internalTagGo, tags, "github.com/xo/usql/drivers/"+v.Tag, v.Desc))) if err != nil { return err } if err := os.WriteFile(filepath.Join(wd, v.Tag+".go"), buf, 0o644); err != nil { return err } } return nil } const internalTagGo = `//go:build %s package internal // Code generated by gen.go. DO NOT EDIT. import ( _ %q // %s driver )` const internalGo = `// Package internal provides a way to obtain information about which database // drivers were included at build. package internal // Code generated by gen.go. DO NOT EDIT. // KnownBuildTags returns a map of known driver names to its respective build // tags. func KnownBuildTags() map[string]string{ return map[string]string{%s } }` const ( driverTableStart = "" driverTableEnd = "" ) func writeReadme(dir string, includeTagSummary bool) error { readme := filepath.Join(dir, "README.md") buf, err := os.ReadFile(readme) if err != nil { return err } start := bytes.Index(buf, []byte(driverTableStart)) end := bytes.Index(buf, []byte(driverTableEnd)) if start == -1 || end == -1 { return errors.New("unable to find driver table start/end in README.md") } b := new(bytes.Buffer) if _, err := b.Write(append(buf[:start+len(driverTableStart)], '\n')); err != nil { return err } if _, err := b.Write([]byte(buildDriverTable(includeTagSummary))); err != nil { return err } if _, err := b.Write(buf[end:]); err != nil { return err } return os.WriteFile(readme, b.Bytes(), 0o644) } func buildDriverTable(includeTagSummary bool) string { hdr := []string{"Database", "Scheme / Tag", "Scheme Aliases", "Driver Package / Notes"} widths := []int{len(hdr[0]), len(hdr[1]), len(hdr[2]), len(hdr[3])} baseRows, widths := buildRows(baseDrivers, widths) mostRows, widths := buildRows(mostDrivers, widths) allRows, widths := buildRows(allDrivers, widths) badRows, widths := buildRows(badDrivers, widths) wireRows, widths := buildRows(wireDrivers, widths) s := tableRows(widths, ' ', hdr) s += tableRows(widths, '-') s += tableRows(widths, ' ', baseRows...) s += tableRows(widths, ' ') s += tableRows(widths, ' ', mostRows...) s += tableRows(widths, ' ') s += tableRows(widths, ' ', allRows...) s += tableRows(widths, ' ') s += tableRows(widths, ' ', wireRows...) s += tableRows(widths, ' ') s += tableRows(widths, ' ', badRows...) if includeTagSummary { s += tableRows(widths, ' ') s += tableRows(widths, ' ', []string{"**NO DRIVERS**", "`no_base`", "", "_no base drivers (useful for development)_"}, []string{"**MOST DRIVERS**", "`most`", "", "_all stable drivers_"}, []string{"**ALL DRIVERS**", "`all`", "", "_all drivers, excluding bad drivers_"}, []string{"**BAD DRIVERS**", "`bad`", "", "_bad drivers (broken/non-working drivers)_"}, []string{"**NO <TAG>**", "`no_`", "", "_exclude driver with ``_"}, ) } return s + "\n" + buildTableLinks(baseDrivers, mostDrivers, allDrivers, badDrivers) } var baseOrder = map[string]int{ "postgres": 0, "mysql": 1, "sqlserver": 2, "oracle": 3, "sqlite3": 4, "clickhouse": 5, "csvq": 6, } func buildRows(m map[string]DriverInfo, widths []int) ([][]string, []int) { var drivers []DriverInfo for _, v := range m { drivers = append(drivers, v) } sort.Slice(drivers, func(i, j int) bool { switch { case drivers[i].Group == "base": return baseOrder[drivers[i].Driver] < baseOrder[drivers[j].Driver] } return strings.ToLower(drivers[i].Desc) < strings.ToLower(drivers[j].Desc) }) var rows [][]string for i, v := range drivers { notes := "" if v.CGO { notes += " [†][f-cgo]" } if v.Wire { notes += " [‡][f-wire]" } rows = append(rows, []string{ v.Desc, "`" + v.Tag + "`", buildAliases(v), fmt.Sprintf("[%s][d-%s]%s", v.Pkg, v.Tag, notes), }) // calc max for j := 0; j < len(rows[i]); j++ { widths[j] = max(runewidth.StringWidth(rows[i][j]), widths[j]) } } return rows, widths } func buildAliases(v DriverInfo) string { name := v.Tag if v.Wire { name = v.Driver } _, aliases := dburl.SchemeDriverAndAliases(name) if v.Wire { aliases = append(aliases, name) } for i := 0; i < len(aliases); i++ { if !v.Wire && aliases[i] == v.Tag { aliases[i] = v.Driver } } fileTypes := dburl.FileTypes() if slices.Contains(fileTypes, name) { aliases = append(aliases, `file`) } if len(aliases) > 0 { return "`" + strings.Join(aliases, "`, `") + "`" } return "" } func tableRows(widths []int, c rune, rows ...[]string) string { padding := string(c) if len(rows) == 0 { rows = [][]string{make([]string, len(widths))} } var s string for _, row := range rows { for i := 0; i < len(row); i++ { s += "|" + padding + row[i] + strings.Repeat(padding, widths[i]-runewidth.StringWidth(row[i])) + padding } s += "|\n" } return s } func buildTableLinks(drivers ...map[string]DriverInfo) string { var d []DriverInfo for _, m := range drivers { for _, v := range m { d = append(d, v) } } sort.Slice(d, func(i, j int) bool { return d[i].Tag < d[j].Tag }) var s string for _, v := range d { s += fmt.Sprintf("[d-%s]: %s\n", v.Tag, v.URL) } return s } func writeLicenseFiles(licenseStart int, licenseAuthor string) error { s := fmt.Sprintf(license, licenseStart, time.Now().Year(), licenseAuthor) if err := os.WriteFile("LICENSE", append([]byte(s), '\n'), 0o644); err != nil { return err } textGo := fmt.Sprintf(licenseTextGo, s) if err := os.WriteFile("text/license.go", []byte(textGo), 0o644); err != nil { return err } return nil } func writeDburlLicense(dir string, licenseStart int, licenseAuthor string) error { s := fmt.Sprintf(license, licenseStart, time.Now().Year(), licenseAuthor) if err := os.WriteFile(filepath.Join(dir, "LICENSE"), append([]byte(s), '\n'), 0o644); err != nil { return err } return nil } const license = `The MIT License (MIT) Copyright (c) %d-%d %s Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.` const licenseTextGo = `package text // Code generated by gen.go. DO NOT EDIT. // License contains the license text for usql. const License = ` + "`%s`" + ` ` usql-0.19.19/go.mod000066400000000000000000000435001476173253300140020ustar00rootroot00000000000000module github.com/xo/usql go 1.24 toolchain go1.24.0 require ( github.com/ClickHouse/clickhouse-go/v2 v2.32.2 github.com/IBM/nzgo/v12 v12.0.9 github.com/MichaelS11/go-cql-driver v0.1.1 github.com/SAP/go-hdb v1.13.3 github.com/VoltDB/voltdb-client-go v1.0.16 github.com/alecthomas/chroma/v2 v2.15.0 github.com/alexbrainman/odbc v0.0.0-20250224181725-329517659778 github.com/aliyun/aliyun-tablestore-go-sql-driver v0.0.0-20220418015234-4d337cb3eed9 github.com/amsokol/ignite-go-client v0.12.2 github.com/apache/arrow/go/v17 v17.0.0 github.com/apache/calcite-avatica-go/v5 v5.3.0 github.com/bippio/go-impala v2.1.0+incompatible github.com/btnguyen2k/gocosmos v1.1.0 github.com/btnguyen2k/godynamo v1.3.0 github.com/chaisql/chai v0.16.1-0.20240218103834-23e406360fd2 github.com/couchbase/go_n1ql v0.0.0-20220303011133-0ed4bf93e31d github.com/databricks/databricks-sql-go v1.6.1 github.com/datafuselabs/databend-go v0.7.3 github.com/docker/docker v27.5.1+incompatible github.com/exasol/exasol-driver-go v1.0.12 github.com/go-git/go-billy/v5 v5.6.2 github.com/go-sql-driver/mysql v1.9.0 github.com/gocql/gocql v1.7.0 github.com/godror/godror v0.47.0 github.com/gohxs/readline v0.0.0-20171011095936-a780388e6e7c github.com/google/go-cmp v0.7.0 github.com/google/goexpect v0.0.0-20210430020637-ab937bf7fd6f github.com/googleapis/go-sql-spanner v1.11.1 github.com/jackc/pgx/v5 v5.7.2 github.com/jeandeaual/go-locale v0.0.0-20241217141322-fcc2cadd6f08 github.com/jmrobles/h2go v0.5.0 github.com/kenshaw/colors v0.2.1 github.com/kenshaw/rasterm v0.1.12 github.com/lib/pq v1.10.9 github.com/marcboeker/go-duckdb v1.8.5 github.com/mattn/go-adodb v0.0.1 github.com/mattn/go-isatty v0.0.20 github.com/mattn/go-sqlite3 v1.14.24 github.com/microsoft/go-mssqldb v1.8.0 github.com/mithrandie/csvq v1.18.1 github.com/mithrandie/csvq-driver v1.7.0 github.com/nakagami/firebirdsql v0.9.14 github.com/ory/dockertest/v3 v3.11.0 github.com/prestodb/presto-go-client v0.0.0-20240426182841-905ac40a1783 github.com/proullon/ramsql v0.1.4 github.com/sijms/go-ora/v2 v2.8.24 github.com/snowflakedb/gosnowflake v1.13.0 github.com/spf13/cobra v1.9.1 github.com/spf13/pflag v1.0.6 github.com/spf13/viper v1.19.0 github.com/thda/tds v0.1.7 github.com/trinodb/trino-go-client v0.321.0 github.com/uber/athenadriver v1.1.15 github.com/vertica/vertica-sql-go v1.3.3 github.com/xo/dburl v0.23.3 github.com/xo/tblfmt v0.15.0 github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e github.com/ydb-platform/ydb-go-sdk/v3 v3.100.2 github.com/yookoala/realpath v1.0.0 github.com/ziutek/mymysql v1.5.4 golang.org/x/exp v0.0.0-20250228200357-dead58393ab7 gorm.io/driver/bigquery v1.2.0 modernc.org/ql v1.4.11 modernc.org/sqlite v1.36.0 sqlflow.org/gohive v0.0.0-20240730014249-8960223660e2 sqlflow.org/gomaxcompute v0.0.0-20210805062559-c14ae028b44c ) require ( cel.dev/expr v0.21.2 // indirect cloud.google.com/go v0.118.3 // indirect cloud.google.com/go/auth v0.15.0 // indirect cloud.google.com/go/auth/oauth2adapt v0.2.7 // indirect cloud.google.com/go/bigquery v1.66.2 // indirect cloud.google.com/go/compute/metadata v0.6.0 // indirect cloud.google.com/go/iam v1.4.0 // indirect cloud.google.com/go/longrunning v0.6.4 // indirect cloud.google.com/go/monitoring v1.24.0 // indirect cloud.google.com/go/spanner v1.76.1 // indirect dario.cat/mergo v1.0.0 // indirect filippo.io/edwards25519 v1.1.0 // indirect github.com/99designs/go-keychain v0.0.0-20191008050251-8e49817e8af4 // indirect github.com/99designs/keyring v1.2.2 // indirect github.com/Azure/azure-sdk-for-go/sdk/azcore v1.17.0 // indirect github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.8.2 // indirect github.com/Azure/azure-sdk-for-go/sdk/internal v1.10.0 // indirect github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.6.0 // indirect github.com/Azure/go-ansiterm v0.0.0-20230124172434-306776ec8161 // indirect github.com/AzureAD/microsoft-authentication-library-for-go v1.4.1 // indirect github.com/BurntSushi/toml v1.4.0 // indirect github.com/ClickHouse/ch-go v0.65.1 // indirect github.com/DATA-DOG/go-sqlmock v1.5.2 // indirect github.com/DataDog/zstd v1.5.6 // indirect github.com/GoogleCloudPlatform/grpc-gcp-go/grpcgcp v1.5.2 // indirect github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.27.0 // indirect github.com/JohnCGriffin/overflow v0.0.0-20211019200055-46fa312c352c // indirect github.com/Masterminds/semver v1.5.0 // indirect github.com/Microsoft/go-winio v0.6.2 // indirect github.com/Nvveen/Gotty v0.0.0-20120604004816-cd527374f1e5 // indirect github.com/aliyun/aliyun-tablestore-go-sdk v1.7.17 // indirect github.com/andybalholm/brotli v1.1.1 // indirect github.com/apache/arrow-go/v18 v18.1.0 // indirect github.com/apache/arrow/go/v12 v12.0.1 // indirect github.com/apache/arrow/go/v15 v15.0.2 // indirect github.com/apache/arrow/go/v16 v16.1.0 // indirect github.com/apache/thrift v0.21.0 // indirect github.com/avast/retry-go v3.0.0+incompatible // indirect github.com/aws/aws-sdk-go v1.55.6 // indirect github.com/aws/aws-sdk-go-v2 v1.36.3 // indirect github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.10 // indirect github.com/aws/aws-sdk-go-v2/credentials v1.17.61 // indirect github.com/aws/aws-sdk-go-v2/feature/dynamodb/attributevalue v1.18.6 // indirect github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.17.64 // indirect github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.34 // indirect github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.34 // indirect github.com/aws/aws-sdk-go-v2/internal/v4a v1.3.34 // indirect github.com/aws/aws-sdk-go-v2/service/dynamodb v1.41.0 // indirect github.com/aws/aws-sdk-go-v2/service/dynamodbstreams v1.25.0 // indirect github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.12.3 // indirect github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.6.2 // indirect github.com/aws/aws-sdk-go-v2/service/internal/endpoint-discovery v1.10.15 // indirect github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.12.15 // indirect github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.18.15 // indirect github.com/aws/aws-sdk-go-v2/service/s3 v1.78.0 // indirect github.com/aws/smithy-go v1.22.3 // indirect github.com/beltran/gohive v1.8.0 // indirect github.com/beltran/gosasl v1.0.0 // indirect github.com/beltran/gssapi v0.0.0-20200324152954-d86554db4bab // indirect github.com/beorn7/perks v1.0.1 // indirect github.com/bitfield/gotestdox v0.2.2 // indirect github.com/btnguyen2k/consu/checksum v1.1.1 // indirect github.com/btnguyen2k/consu/g18 v0.1.0 // indirect github.com/btnguyen2k/consu/gjrc v0.2.2 // indirect github.com/btnguyen2k/consu/olaf v0.1.3 // indirect github.com/btnguyen2k/consu/reddo v0.1.9 // indirect github.com/btnguyen2k/consu/semita v0.1.5 // indirect github.com/buger/jsonparser v1.1.1 // indirect github.com/cenkalti/backoff/v4 v4.3.0 // indirect github.com/cespare/xxhash/v2 v2.3.0 // indirect github.com/cncf/xds/go v0.0.0-20250121191232-2f005788dc42 // indirect github.com/cockroachdb/errors v1.11.3 // indirect github.com/cockroachdb/fifo v0.0.0-20240816210425-c5d0cb0b6fc0 // indirect github.com/cockroachdb/logtags v0.0.0-20241215232642-bb51bb14a506 // indirect github.com/cockroachdb/pebble v1.1.4 // indirect github.com/cockroachdb/redact v1.1.6 // indirect github.com/cockroachdb/tokenbucket v0.0.0-20230807174530-cc333fc44b06 // indirect github.com/containerd/continuity v0.4.3 // indirect github.com/containerd/log v0.1.0 // indirect github.com/coreos/go-oidc/v3 v3.12.0 // indirect github.com/couchbase/go-couchbase v0.1.1 // indirect github.com/couchbase/gomemcached v0.3.2 // indirect github.com/couchbase/goutils v0.1.2 // indirect github.com/cyphar/filepath-securejoin v0.3.6 // indirect github.com/danieljoos/wincred v1.2.2 // indirect github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect github.com/distribution/reference v0.6.0 // indirect github.com/dlclark/regexp2 v1.11.5 // indirect github.com/dnephin/pflag v1.0.7 // indirect github.com/docker/cli v26.1.4+incompatible // indirect github.com/docker/go-connections v0.5.0 // indirect github.com/docker/go-units v0.5.0 // indirect github.com/dustin/go-humanize v1.0.1 // indirect github.com/dvsekhvalnov/jose2go v1.8.0 // indirect github.com/edsrzf/mmap-go v1.2.0 // indirect github.com/elastic/go-sysinfo v1.15.1 // indirect github.com/elastic/go-windows v1.0.2 // indirect github.com/envoyproxy/go-control-plane/envoy v1.32.4 // indirect github.com/envoyproxy/protoc-gen-validate v1.2.1 // indirect github.com/exasol/error-reporting-go v0.2.0 // indirect github.com/fatih/color v1.18.0 // indirect github.com/felixge/httpsnoop v1.0.4 // indirect github.com/fsnotify/fsnotify v1.8.0 // indirect github.com/gabriel-vasile/mimetype v1.4.8 // indirect github.com/getsentry/sentry-go v0.31.1 // indirect github.com/go-faster/city v1.0.1 // indirect github.com/go-faster/errors v0.7.1 // indirect github.com/go-jose/go-jose/v4 v4.0.5 // indirect github.com/go-logfmt/logfmt v0.6.0 // indirect github.com/go-logr/logr v1.4.2 // indirect github.com/go-logr/stdr v1.2.2 // indirect github.com/go-ole/go-ole v1.3.0 // indirect github.com/go-viper/mapstructure/v2 v2.2.1 // indirect github.com/go-zookeeper/zk v1.0.4 // indirect github.com/goccy/go-json v0.10.5 // indirect github.com/godbus/dbus v0.0.0-20190726142602-4481cbc300e2 // indirect github.com/godror/knownpb v0.2.0 // indirect github.com/gogo/protobuf v1.3.2 // indirect github.com/golang-jwt/jwt/v4 v4.5.1 // indirect github.com/golang-jwt/jwt/v5 v5.2.1 // indirect github.com/golang-module/carbon/v2 v2.4.1 // indirect github.com/golang-sql/civil v0.0.0-20220223132316-b832511892a9 // indirect github.com/golang-sql/sqlexp v0.1.0 // indirect github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8 // indirect github.com/golang/mock v1.7.0-rc.1 // indirect github.com/golang/protobuf v1.5.4 // indirect github.com/golang/snappy v0.0.4 // indirect github.com/google/flatbuffers/go v0.0.0-20230110200425-62e4d2e5b215 // indirect github.com/google/goterm v0.0.0-20200907032337-555d40f16ae2 // indirect github.com/google/pprof v0.0.0-20241021161924-4cf4322d492d // indirect github.com/google/s2a-go v0.1.9 // indirect github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510 // indirect github.com/google/uuid v1.6.0 // indirect github.com/googleapis/enterprise-certificate-proxy v0.3.5 // indirect github.com/googleapis/gax-go/v2 v2.14.1 // indirect github.com/gorilla/websocket v1.5.3 // indirect github.com/grpc-ecosystem/grpc-gateway/v2 v2.16.0 // indirect github.com/gsterjov/go-libsecret v0.0.0-20161001094733-a6f4afe4910c // indirect github.com/hailocab/go-hostpool v0.0.0-20160125115350-e80d13ce29ed // indirect github.com/hashicorp/go-cleanhttp v0.5.2 // indirect github.com/hashicorp/go-retryablehttp v0.7.7 // indirect github.com/hashicorp/go-uuid v1.0.3 // indirect github.com/hashicorp/golang-lru v1.0.2 // indirect github.com/hashicorp/hcl v1.0.0 // indirect github.com/icholy/digest v1.1.0 // indirect github.com/inconshreveable/mousetrap v1.1.0 // indirect github.com/jackc/pgpassfile v1.0.0 // indirect github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 // indirect github.com/jackc/puddle/v2 v2.2.2 // indirect github.com/jcmturner/aescts/v2 v2.0.0 // indirect github.com/jcmturner/dnsutils/v2 v2.0.0 // indirect github.com/jcmturner/gofork v1.7.6 // indirect github.com/jcmturner/goidentity/v6 v6.0.1 // indirect github.com/jcmturner/gokrb5/v8 v8.4.4 // indirect github.com/jcmturner/rpc/v2 v2.0.3 // indirect github.com/jedib0t/go-pretty/v6 v6.6.7 // indirect github.com/jmespath/go-jmespath v0.4.0 // indirect github.com/jonboulle/clockwork v0.5.0 // indirect github.com/kardianos/osext v0.0.0-20190222173326-2bc1f35cddc0 // indirect github.com/klauspost/asmfmt v1.3.2 // indirect github.com/klauspost/compress v1.18.0 // indirect github.com/klauspost/cpuid/v2 v2.2.10 // indirect github.com/kr/pretty v0.3.1 // indirect github.com/kr/text v0.2.0 // indirect github.com/kylelemons/godebug v1.1.0 // indirect github.com/magiconair/properties v1.8.7 // indirect github.com/mattn/go-colorable v0.1.14 // indirect github.com/mattn/go-runewidth v0.0.16 // indirect github.com/mattn/go-sixel v0.0.5 // indirect github.com/minio/asm2plan9s v0.0.0-20200509001527-cdd76441f9d8 // indirect github.com/minio/c2goasm v0.0.0-20190812172519-36a3d3bbc4f3 // indirect github.com/mitchellh/go-homedir v1.1.0 // indirect github.com/mitchellh/mapstructure v1.5.0 // indirect github.com/mithrandie/go-file/v2 v2.1.0 // indirect github.com/mithrandie/go-text v1.6.0 // indirect github.com/mithrandie/ternary v1.1.1 // indirect github.com/moby/docker-image-spec v1.3.1 // indirect github.com/moby/patternmatcher v0.6.0 // indirect github.com/moby/sys/sequential v0.5.0 // indirect github.com/moby/sys/user v0.1.0 // indirect github.com/moby/sys/userns v0.1.0 // indirect github.com/moby/term v0.5.0 // indirect github.com/mtibben/percent v0.2.1 // indirect github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 // indirect github.com/nakagami/chacha20 v0.1.0 // indirect github.com/nathan-fiscaletti/consolesize-go v0.0.0-20220204101620-317176b6684d // indirect github.com/ncruces/go-strftime v0.1.9 // indirect github.com/opencontainers/go-digest v1.0.0 // indirect github.com/opencontainers/image-spec v1.1.0 // indirect github.com/opencontainers/runc v1.1.13 // indirect github.com/paulmach/orb v0.11.1 // indirect github.com/pelletier/go-toml/v2 v2.2.3 // indirect github.com/pierrec/lz4/v4 v4.1.22 // indirect github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c // indirect github.com/pkg/errors v0.9.1 // indirect github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10 // indirect github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect github.com/prometheus/client_golang v1.21.0 // indirect github.com/prometheus/client_model v0.6.1 // indirect github.com/prometheus/common v0.62.0 // indirect github.com/prometheus/procfs v0.15.1 // indirect github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec // indirect github.com/rivo/uniseg v0.4.7 // indirect github.com/rogpeppe/go-internal v1.14.1 // indirect github.com/rs/zerolog v1.33.0 // indirect github.com/sagikazarmark/locafero v0.6.0 // indirect github.com/sagikazarmark/slog-shim v0.1.0 // indirect github.com/segmentio/asm v1.2.0 // indirect github.com/shopspring/decimal v1.4.0 // indirect github.com/sirupsen/logrus v1.9.3 // indirect github.com/soniakeys/quant v1.0.0 // indirect github.com/sourcegraph/conc v0.3.0 // indirect github.com/spaolacci/murmur3 v1.1.0 // indirect github.com/spf13/afero v1.11.0 // indirect github.com/spf13/cast v1.7.0 // indirect github.com/stretchr/objx v0.5.2 // indirect github.com/stretchr/testify v1.10.0 // indirect github.com/subosito/gotenv v1.6.0 // indirect github.com/twmb/murmur3 v1.1.8 // indirect github.com/uber-go/tally v3.5.10+incompatible // indirect github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb // indirect github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 // indirect github.com/xeipuuv/gojsonschema v1.2.0 // indirect github.com/xwb1989/sqlparser v0.0.0-20180606152119-120387863bf2 // indirect github.com/ydb-platform/ydb-go-genproto v0.0.0-20241112172322-ea1f63298f77 // indirect github.com/zeebo/xxh3 v1.0.2 // indirect gitlab.com/nyarla/go-crypt v0.0.0-20160106005555-d9a5dc2b789b // indirect go.opencensus.io v0.24.0 // indirect go.opentelemetry.io/auto/sdk v1.1.0 // indirect go.opentelemetry.io/contrib/detectors/gcp v1.34.0 // indirect go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.59.0 // indirect go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.59.0 // indirect go.opentelemetry.io/otel v1.34.0 // indirect go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.19.0 // indirect go.opentelemetry.io/otel/metric v1.34.0 // indirect go.opentelemetry.io/otel/sdk v1.34.0 // indirect go.opentelemetry.io/otel/sdk/metric v1.34.0 // indirect go.opentelemetry.io/otel/trace v1.34.0 // indirect go.uber.org/atomic v1.11.0 // indirect go.uber.org/multierr v1.11.0 // indirect go.uber.org/zap v1.27.0 // indirect golang.org/x/crypto v0.35.0 // indirect golang.org/x/mod v0.23.0 // indirect golang.org/x/net v0.35.0 // indirect golang.org/x/oauth2 v0.27.0 // indirect golang.org/x/sync v0.11.0 // indirect golang.org/x/sys v0.30.0 // indirect golang.org/x/term v0.29.0 // indirect golang.org/x/text v0.22.0 // indirect golang.org/x/time v0.10.0 // indirect golang.org/x/tools v0.30.0 // indirect golang.org/x/xerrors v0.0.0-20240903120638-7835f813f4da // indirect google.golang.org/api v0.223.0 // indirect google.golang.org/genproto v0.0.0-20250303144028-a0af3efb3deb // indirect google.golang.org/genproto/googleapis/api v0.0.0-20250303144028-a0af3efb3deb // indirect google.golang.org/genproto/googleapis/rpc v0.0.0-20250303144028-a0af3efb3deb // indirect google.golang.org/grpc v1.70.0 // indirect google.golang.org/protobuf v1.36.5 // indirect gopkg.in/inf.v0 v0.9.1 // indirect gopkg.in/ini.v1 v1.67.0 // indirect gopkg.in/jcmturner/aescts.v1 v1.0.1 // indirect gopkg.in/jcmturner/dnsutils.v1 v1.0.1 // indirect gopkg.in/jcmturner/gokrb5.v6 v6.1.1 // indirect gopkg.in/jcmturner/rpc.v1 v1.1.0 // indirect gopkg.in/yaml.v2 v2.4.0 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect gotest.tools/gotestsum v1.12.0 // indirect howett.net/plist v1.0.1 // indirect modernc.org/b v1.1.0 // indirect modernc.org/db v1.0.13 // indirect modernc.org/file v1.0.9 // indirect modernc.org/fileutil v1.3.0 // indirect modernc.org/golex v1.1.0 // indirect modernc.org/internal v1.1.1 // indirect modernc.org/libc v1.61.13 // indirect modernc.org/lldb v1.0.8 // indirect modernc.org/mathutil v1.7.1 // indirect modernc.org/memory v1.8.2 // indirect modernc.org/sortutil v1.2.1 // indirect modernc.org/strutil v1.2.1 // indirect modernc.org/zappy v1.1.0 // indirect ) usql-0.19.19/go.sum000066400000000000000000007236561476173253300140500ustar00rootroot00000000000000cel.dev/expr v0.21.2 h1:o+Wj235dy4gFYlYin3JsMpp3EEfMrPm/6tdoyjT98S0= cel.dev/expr v0.21.2/go.mod h1:MrpN08Q+lEBs+bGYdLxxHkZoUSsCp0nSKTs0nTymJgw= cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSRU= cloud.google.com/go v0.44.1/go.mod h1:iSa0KzasP4Uvy3f1mN/7PiObzGgflwredwwASm/v6AU= cloud.google.com/go v0.44.2/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY= cloud.google.com/go v0.44.3/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY= cloud.google.com/go v0.45.1/go.mod h1:RpBamKRgapWJb87xiFSdk4g1CME7QZg3uwTez+TSTjc= cloud.google.com/go v0.46.3/go.mod h1:a6bKKbmY7er1mI7TEI4lsAkts/mkhTSZK8w33B4RAg0= cloud.google.com/go v0.50.0/go.mod h1:r9sluTvynVuxRIOHXQEHMFffphuXHOMZMycpNR5e6To= cloud.google.com/go v0.52.0/go.mod h1:pXajvRH/6o3+F9jDHZWQ5PbGhn+o8w9qiu/CffaVdO4= cloud.google.com/go v0.53.0/go.mod h1:fp/UouUEsRkN6ryDKNW/Upv/JBKnv6WDthjR6+vze6M= cloud.google.com/go v0.54.0/go.mod h1:1rq2OEkV3YMf6n/9ZvGWI3GWw0VoqH/1x2nd8Is/bPc= cloud.google.com/go v0.56.0/go.mod h1:jr7tqZxxKOVYizybht9+26Z/gUq7tiRzu+ACVAMbKVk= cloud.google.com/go v0.57.0/go.mod h1:oXiQ6Rzq3RAkkY7N6t3TcE6jE+CIBBbA36lwQ1JyzZs= cloud.google.com/go v0.62.0/go.mod h1:jmCYTdRCQuc1PHIIJ/maLInMho30T/Y0M4hTdTShOYc= cloud.google.com/go v0.65.0/go.mod h1:O5N8zS7uWy9vkA9vayVHs65eM1ubvY4h553ofrNHObY= cloud.google.com/go v0.72.0/go.mod h1:M+5Vjvlc2wnp6tjzE102Dw08nGShTscUx2nZMufOKPI= cloud.google.com/go v0.74.0/go.mod h1:VV1xSbzvo+9QJOxLDaJfTjx5e+MePCpCWwvftOeQmWk= cloud.google.com/go v0.75.0/go.mod h1:VGuuCn7PG0dwsd5XPVm2Mm3wlh3EL55/79EKB6hlPTY= cloud.google.com/go v0.78.0/go.mod h1:QjdrLG0uq+YwhjoVOLsS1t7TW8fs36kLs4XO5R5ECHg= cloud.google.com/go v0.79.0/go.mod h1:3bzgcEeQlzbuEAYu4mrWhKqWjmpprinYgKJLgKHnbb8= cloud.google.com/go v0.81.0/go.mod h1:mk/AM35KwGk/Nm2YSeZbxXdrNK3KZOYHmLkOqC2V6E0= cloud.google.com/go v0.83.0/go.mod h1:Z7MJUsANfY0pYPdw0lbnivPx4/vhy/e2FEkSkF7vAVY= cloud.google.com/go v0.84.0/go.mod h1:RazrYuxIK6Kb7YrzzhPoLmCVzl7Sup4NrbKPg8KHSUM= cloud.google.com/go v0.87.0/go.mod h1:TpDYlFy7vuLzZMMZ+B6iRiELaY7z/gJPaqbMx6mlWcY= cloud.google.com/go v0.90.0/go.mod h1:kRX0mNRHe0e2rC6oNakvwQqzyDmg57xJ+SZU1eT2aDQ= cloud.google.com/go v0.93.3/go.mod h1:8utlLll2EF5XMAV15woO4lSbWQlk8rer9aLOfLh7+YI= cloud.google.com/go v0.94.1/go.mod h1:qAlAugsXlC+JWO+Bke5vCtc9ONxjQT3drlTTnAplMW4= cloud.google.com/go v0.97.0/go.mod h1:GF7l59pYBVlXQIBLx3a761cZ41F9bBH3JUlihCt2Udc= cloud.google.com/go v0.99.0/go.mod h1:w0Xx2nLzqWJPuozYQX+hFfCSI8WioryfRDzkoI/Y2ZA= cloud.google.com/go v0.100.1/go.mod h1:fs4QogzfH5n2pBXBP9vRiU+eCny7lD2vmFZy79Iuw1U= cloud.google.com/go v0.100.2/go.mod h1:4Xra9TjzAeYHrl5+oeLlzbM2k3mjVhZh4UqTZ//w99A= cloud.google.com/go v0.102.0/go.mod h1:oWcCzKlqJ5zgHQt9YsaeTY9KzIvjyy0ArmiBUgpQ+nc= cloud.google.com/go v0.102.1/go.mod h1:XZ77E9qnTEnrgEOvr4xzfdX5TRo7fB4T2F4O6+34hIU= cloud.google.com/go v0.104.0/go.mod h1:OO6xxXdJyvuJPcEPBLN9BJPD+jep5G1+2U5B5gkRYtA= cloud.google.com/go v0.105.0/go.mod h1:PrLgOJNe5nfE9UMxKxgXj4mD3voiP+YQ6gdt6KMFOKM= cloud.google.com/go v0.107.0/go.mod h1:wpc2eNrD7hXUTy8EKS10jkxpZBjASrORK7goS+3YX2I= cloud.google.com/go v0.110.0/go.mod h1:SJnCLqQ0FCFGSZMUNUf84MV3Aia54kn7pi8st7tMzaY= cloud.google.com/go v0.118.3 h1:jsypSnrE/w4mJysioGdMBg4MiW/hHx/sArFpaBWHdME= cloud.google.com/go v0.118.3/go.mod h1:Lhs3YLnBlwJ4KA6nuObNMZ/fCbOQBPuWKPoE0Wa/9Vc= cloud.google.com/go/accessapproval v1.4.0/go.mod h1:zybIuC3KpDOvotz59lFe5qxRZx6C75OtwbisN56xYB4= cloud.google.com/go/accessapproval v1.5.0/go.mod h1:HFy3tuiGvMdcd/u+Cu5b9NkO1pEICJ46IR82PoUdplw= cloud.google.com/go/accessapproval v1.6.0/go.mod h1:R0EiYnwV5fsRFiKZkPHr6mwyk2wxUJ30nL4j2pcFY2E= cloud.google.com/go/accesscontextmanager v1.3.0/go.mod h1:TgCBehyr5gNMz7ZaH9xubp+CE8dkrszb4oK9CWyvD4o= cloud.google.com/go/accesscontextmanager v1.4.0/go.mod h1:/Kjh7BBu/Gh83sv+K60vN9QE5NJcd80sU33vIe2IFPE= cloud.google.com/go/accesscontextmanager v1.6.0/go.mod h1:8XCvZWfYw3K/ji0iVnp+6pu7huxoQTLmxAbVjbloTtM= cloud.google.com/go/accesscontextmanager v1.7.0/go.mod h1:CEGLewx8dwa33aDAZQujl7Dx+uYhS0eay198wB/VumQ= cloud.google.com/go/aiplatform v1.22.0/go.mod h1:ig5Nct50bZlzV6NvKaTwmplLLddFx0YReh9WfTO5jKw= cloud.google.com/go/aiplatform v1.24.0/go.mod h1:67UUvRBKG6GTayHKV8DBv2RtR1t93YRu5B1P3x99mYY= cloud.google.com/go/aiplatform v1.27.0/go.mod h1:Bvxqtl40l0WImSb04d0hXFU7gDOiq9jQmorivIiWcKg= cloud.google.com/go/aiplatform v1.35.0/go.mod h1:7MFT/vCaOyZT/4IIFfxH4ErVg/4ku6lKv3w0+tFTgXQ= cloud.google.com/go/aiplatform v1.36.1/go.mod h1:WTm12vJRPARNvJ+v6P52RDHCNe4AhvjcIZ/9/RRHy/k= cloud.google.com/go/aiplatform v1.37.0/go.mod h1:IU2Cv29Lv9oCn/9LkFiiuKfwrRTq+QQMbW+hPCxJGZw= cloud.google.com/go/analytics v0.11.0/go.mod h1:DjEWCu41bVbYcKyvlws9Er60YE4a//bK6mnhWvQeFNI= cloud.google.com/go/analytics v0.12.0/go.mod h1:gkfj9h6XRf9+TS4bmuhPEShsh3hH8PAZzm/41OOhQd4= cloud.google.com/go/analytics v0.17.0/go.mod h1:WXFa3WSym4IZ+JiKmavYdJwGG/CvpqiqczmL59bTD9M= cloud.google.com/go/analytics v0.18.0/go.mod h1:ZkeHGQlcIPkw0R/GW+boWHhCOR43xz9RN/jn7WcqfIE= cloud.google.com/go/analytics v0.19.0/go.mod h1:k8liqf5/HCnOUkbawNtrWWc+UAzyDlW89doe8TtoDsE= cloud.google.com/go/apigateway v1.3.0/go.mod h1:89Z8Bhpmxu6AmUxuVRg/ECRGReEdiP3vQtk4Z1J9rJk= cloud.google.com/go/apigateway v1.4.0/go.mod h1:pHVY9MKGaH9PQ3pJ4YLzoj6U5FUDeDFBllIz7WmzJoc= cloud.google.com/go/apigateway v1.5.0/go.mod h1:GpnZR3Q4rR7LVu5951qfXPJCHquZt02jf7xQx7kpqN8= cloud.google.com/go/apigeeconnect v1.3.0/go.mod h1:G/AwXFAKo0gIXkPTVfZDd2qA1TxBXJ3MgMRBQkIi9jc= cloud.google.com/go/apigeeconnect v1.4.0/go.mod h1:kV4NwOKqjvt2JYR0AoIWo2QGfoRtn/pkS3QlHp0Ni04= cloud.google.com/go/apigeeconnect v1.5.0/go.mod h1:KFaCqvBRU6idyhSNyn3vlHXc8VMDJdRmwDF6JyFRqZ8= cloud.google.com/go/apigeeregistry v0.4.0/go.mod h1:EUG4PGcsZvxOXAdyEghIdXwAEi/4MEaoqLMLDMIwKXY= cloud.google.com/go/apigeeregistry v0.5.0/go.mod h1:YR5+s0BVNZfVOUkMa5pAR2xGd0A473vA5M7j247o1wM= cloud.google.com/go/apigeeregistry v0.6.0/go.mod h1:BFNzW7yQVLZ3yj0TKcwzb8n25CFBri51GVGOEUcgQsc= cloud.google.com/go/apikeys v0.4.0/go.mod h1:XATS/yqZbaBK0HOssf+ALHp8jAlNHUgyfprvNcBIszU= cloud.google.com/go/apikeys v0.5.0/go.mod h1:5aQfwY4D+ewMMWScd3hm2en3hCj+BROlyrt3ytS7KLI= cloud.google.com/go/apikeys v0.6.0/go.mod h1:kbpXu5upyiAlGkKrJgQl8A0rKNNJ7dQ377pdroRSSi8= cloud.google.com/go/appengine v1.4.0/go.mod h1:CS2NhuBuDXM9f+qscZ6V86m1MIIqPj3WC/UoEuR1Sno= cloud.google.com/go/appengine v1.5.0/go.mod h1:TfasSozdkFI0zeoxW3PTBLiNqRmzraodCWatWI9Dmak= cloud.google.com/go/appengine v1.6.0/go.mod h1:hg6i0J/BD2cKmDJbaFSYHFyZkgBEfQrDg/X0V5fJn84= cloud.google.com/go/appengine v1.7.0/go.mod h1:eZqpbHFCqRGa2aCdope7eC0SWLV1j0neb/QnMJVWx6A= cloud.google.com/go/appengine v1.7.1/go.mod h1:IHLToyb/3fKutRysUlFO0BPt5j7RiQ45nrzEJmKTo6E= cloud.google.com/go/area120 v0.5.0/go.mod h1:DE/n4mp+iqVyvxHN41Vf1CR602GiHQjFPusMFW6bGR4= cloud.google.com/go/area120 v0.6.0/go.mod h1:39yFJqWVgm0UZqWTOdqkLhjoC7uFfgXRC8g/ZegeAh0= cloud.google.com/go/area120 v0.7.0/go.mod h1:a3+8EUD1SX5RUcCs3MY5YasiO1z6yLiNLRiFrykbynY= cloud.google.com/go/area120 v0.7.1/go.mod h1:j84i4E1RboTWjKtZVWXPqvK5VHQFJRF2c1Nm69pWm9k= cloud.google.com/go/artifactregistry v1.6.0/go.mod h1:IYt0oBPSAGYj/kprzsBjZ/4LnG/zOcHyFHjWPCi6SAQ= cloud.google.com/go/artifactregistry v1.7.0/go.mod h1:mqTOFOnGZx8EtSqK/ZWcsm/4U8B77rbcLP6ruDU2Ixk= cloud.google.com/go/artifactregistry v1.8.0/go.mod h1:w3GQXkJX8hiKN0v+at4b0qotwijQbYUqF2GWkZzAhC0= cloud.google.com/go/artifactregistry v1.9.0/go.mod h1:2K2RqvA2CYvAeARHRkLDhMDJ3OXy26h3XW+3/Jh2uYc= cloud.google.com/go/artifactregistry v1.11.1/go.mod h1:lLYghw+Itq9SONbCa1YWBoWs1nOucMH0pwXN1rOBZFI= cloud.google.com/go/artifactregistry v1.11.2/go.mod h1:nLZns771ZGAwVLzTX/7Al6R9ehma4WUEhZGWV6CeQNQ= cloud.google.com/go/artifactregistry v1.12.0/go.mod h1:o6P3MIvtzTOnmvGagO9v/rOjjA0HmhJ+/6KAXrmYDCI= cloud.google.com/go/artifactregistry v1.13.0/go.mod h1:uy/LNfoOIivepGhooAUpL1i30Hgee3Cu0l4VTWHUC08= cloud.google.com/go/asset v1.5.0/go.mod h1:5mfs8UvcM5wHhqtSv8J1CtxxaQq3AdBxxQi2jGW/K4o= cloud.google.com/go/asset v1.7.0/go.mod h1:YbENsRK4+xTiL+Ofoj5Ckf+O17kJtgp3Y3nn4uzZz5s= cloud.google.com/go/asset v1.8.0/go.mod h1:mUNGKhiqIdbr8X7KNayoYvyc4HbbFO9URsjbytpUaW0= cloud.google.com/go/asset v1.9.0/go.mod h1:83MOE6jEJBMqFKadM9NLRcs80Gdw76qGuHn8m3h8oHQ= cloud.google.com/go/asset v1.10.0/go.mod h1:pLz7uokL80qKhzKr4xXGvBQXnzHn5evJAEAtZiIb0wY= cloud.google.com/go/asset v1.11.1/go.mod h1:fSwLhbRvC9p9CXQHJ3BgFeQNM4c9x10lqlrdEUYXlJo= cloud.google.com/go/asset v1.12.0/go.mod h1:h9/sFOa4eDIyKmH6QMpm4eUK3pDojWnUhTgJlk762Hg= cloud.google.com/go/asset v1.13.0/go.mod h1:WQAMyYek/b7NBpYq/K4KJWcRqzoalEsxz/t/dTk4THw= cloud.google.com/go/assuredworkloads v1.5.0/go.mod h1:n8HOZ6pff6re5KYfBXcFvSViQjDwxFkAkmUFffJRbbY= cloud.google.com/go/assuredworkloads v1.6.0/go.mod h1:yo2YOk37Yc89Rsd5QMVECvjaMKymF9OP+QXWlKXUkXw= cloud.google.com/go/assuredworkloads v1.7.0/go.mod h1:z/736/oNmtGAyU47reJgGN+KVoYoxeLBoj4XkKYscNI= cloud.google.com/go/assuredworkloads v1.8.0/go.mod h1:AsX2cqyNCOvEQC8RMPnoc0yEarXQk6WEKkxYfL6kGIo= cloud.google.com/go/assuredworkloads v1.9.0/go.mod h1:kFuI1P78bplYtT77Tb1hi0FMxM0vVpRC7VVoJC3ZoT0= cloud.google.com/go/assuredworkloads v1.10.0/go.mod h1:kwdUQuXcedVdsIaKgKTp9t0UJkE5+PAVNhdQm4ZVq2E= cloud.google.com/go/auth v0.15.0 h1:Ly0u4aA5vG/fsSsxu98qCQBemXtAtJf+95z9HK+cxps= cloud.google.com/go/auth v0.15.0/go.mod h1:WJDGqZ1o9E9wKIL+IwStfyn/+s59zl4Bi+1KQNVXLZ8= cloud.google.com/go/auth/oauth2adapt v0.2.7 h1:/Lc7xODdqcEw8IrZ9SvwnlLX6j9FHQM74z6cBk9Rw6M= cloud.google.com/go/auth/oauth2adapt v0.2.7/go.mod h1:NTbTTzfvPl1Y3V1nPpOgl2w6d/FjO7NNUQaWSox6ZMc= cloud.google.com/go/automl v1.5.0/go.mod h1:34EjfoFGMZ5sgJ9EoLsRtdPSNZLcfflJR39VbVNS2M0= cloud.google.com/go/automl v1.6.0/go.mod h1:ugf8a6Fx+zP0D59WLhqgTDsQI9w07o64uf/Is3Nh5p8= cloud.google.com/go/automl v1.7.0/go.mod h1:RL9MYCCsJEOmt0Wf3z9uzG0a7adTT1fe+aObgSpkCt8= cloud.google.com/go/automl v1.8.0/go.mod h1:xWx7G/aPEe/NP+qzYXktoBSDfjO+vnKMGgsApGJJquM= cloud.google.com/go/automl v1.12.0/go.mod h1:tWDcHDp86aMIuHmyvjuKeeHEGq76lD7ZqfGLN6B0NuU= cloud.google.com/go/baremetalsolution v0.3.0/go.mod h1:XOrocE+pvK1xFfleEnShBlNAXf+j5blPPxrhjKgnIFc= cloud.google.com/go/baremetalsolution v0.4.0/go.mod h1:BymplhAadOO/eBa7KewQ0Ppg4A4Wplbn+PsFKRLo0uI= cloud.google.com/go/baremetalsolution v0.5.0/go.mod h1:dXGxEkmR9BMwxhzBhV0AioD0ULBmuLZI8CdwalUxuss= cloud.google.com/go/batch v0.3.0/go.mod h1:TR18ZoAekj1GuirsUsR1ZTKN3FC/4UDnScjT8NXImFE= cloud.google.com/go/batch v0.4.0/go.mod h1:WZkHnP43R/QCGQsZ+0JyG4i79ranE2u8xvjq/9+STPE= cloud.google.com/go/batch v0.7.0/go.mod h1:vLZN95s6teRUqRQ4s3RLDsH8PvboqBK+rn1oevL159g= cloud.google.com/go/beyondcorp v0.2.0/go.mod h1:TB7Bd+EEtcw9PCPQhCJtJGjk/7TC6ckmnSFS+xwTfm4= cloud.google.com/go/beyondcorp v0.3.0/go.mod h1:E5U5lcrcXMsCuoDNyGrpyTm/hn7ne941Jz2vmksAxW8= cloud.google.com/go/beyondcorp v0.4.0/go.mod h1:3ApA0mbhHx6YImmuubf5pyW8srKnCEPON32/5hj+RmM= cloud.google.com/go/beyondcorp v0.5.0/go.mod h1:uFqj9X+dSfrheVp7ssLTaRHd2EHqSL4QZmH4e8WXGGU= cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o= cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE= cloud.google.com/go/bigquery v1.4.0/go.mod h1:S8dzgnTigyfTmLBfrtrhyYhwRxG72rYxvftPBK2Dvzc= cloud.google.com/go/bigquery v1.5.0/go.mod h1:snEHRnqQbz117VIFhE8bmtwIDY80NLUZUMb4Nv6dBIg= cloud.google.com/go/bigquery v1.7.0/go.mod h1://okPTzCYNXSlb24MZs83e2Do+h+VXtc4gLoIoXIAPc= cloud.google.com/go/bigquery v1.8.0/go.mod h1:J5hqkt3O0uAFnINi6JXValWIb1v0goeZM77hZzJN/fQ= cloud.google.com/go/bigquery v1.42.0/go.mod h1:8dRTJxhtG+vwBKzE5OseQn/hiydoQN3EedCaOdYmxRA= cloud.google.com/go/bigquery v1.43.0/go.mod h1:ZMQcXHsl+xmU1z36G2jNGZmKp9zNY5BUua5wDgmNCfw= cloud.google.com/go/bigquery v1.44.0/go.mod h1:0Y33VqXTEsbamHJvJHdFmtqHvMIY28aK1+dFsvaChGc= cloud.google.com/go/bigquery v1.47.0/go.mod h1:sA9XOgy0A8vQK9+MWhEQTY6Tix87M/ZurWFIxmF9I/E= cloud.google.com/go/bigquery v1.48.0/go.mod h1:QAwSz+ipNgfL5jxiaK7weyOhzdoAy1zFm0Nf1fysJac= cloud.google.com/go/bigquery v1.49.0/go.mod h1:Sv8hMmTFFYBlt/ftw2uN6dFdQPzBlREY9yBh7Oy7/4Q= cloud.google.com/go/bigquery v1.50.0/go.mod h1:YrleYEh2pSEbgTBZYMJ5SuSr0ML3ypjRB1zgf7pvQLU= cloud.google.com/go/bigquery v1.66.2 h1:EKOSqjtO7jPpJoEzDmRctGea3c2EOGoexy8VyY9dNro= cloud.google.com/go/bigquery v1.66.2/go.mod h1:+Yd6dRyW8D/FYEjUGodIbu0QaoEmgav7Lwhotup6njo= cloud.google.com/go/billing v1.4.0/go.mod h1:g9IdKBEFlItS8bTtlrZdVLWSSdSyFUZKXNS02zKMOZY= cloud.google.com/go/billing v1.5.0/go.mod h1:mztb1tBc3QekhjSgmpf/CV4LzWXLzCArwpLmP2Gm88s= cloud.google.com/go/billing v1.6.0/go.mod h1:WoXzguj+BeHXPbKfNWkqVtDdzORazmCjraY+vrxcyvI= cloud.google.com/go/billing v1.7.0/go.mod h1:q457N3Hbj9lYwwRbnlD7vUpyjq6u5U1RAOArInEiD5Y= cloud.google.com/go/billing v1.12.0/go.mod h1:yKrZio/eu+okO/2McZEbch17O5CB5NpZhhXG6Z766ss= cloud.google.com/go/billing v1.13.0/go.mod h1:7kB2W9Xf98hP9Sr12KfECgfGclsH3CQR0R08tnRlRbc= cloud.google.com/go/binaryauthorization v1.1.0/go.mod h1:xwnoWu3Y84jbuHa0zd526MJYmtnVXn0syOjaJgy4+dM= cloud.google.com/go/binaryauthorization v1.2.0/go.mod h1:86WKkJHtRcv5ViNABtYMhhNWRrD1Vpi//uKEy7aYEfI= cloud.google.com/go/binaryauthorization v1.3.0/go.mod h1:lRZbKgjDIIQvzYQS1p99A7/U1JqvqeZg0wiI5tp6tg0= cloud.google.com/go/binaryauthorization v1.4.0/go.mod h1:tsSPQrBd77VLplV70GUhBf/Zm3FsKmgSqgm4UmiDItk= cloud.google.com/go/binaryauthorization v1.5.0/go.mod h1:OSe4OU1nN/VswXKRBmciKpo9LulY41gch5c68htf3/Q= cloud.google.com/go/certificatemanager v1.3.0/go.mod h1:n6twGDvcUBFu9uBgt4eYvvf3sQ6My8jADcOVwHmzadg= cloud.google.com/go/certificatemanager v1.4.0/go.mod h1:vowpercVFyqs8ABSmrdV+GiFf2H/ch3KyudYQEMM590= cloud.google.com/go/certificatemanager v1.6.0/go.mod h1:3Hh64rCKjRAX8dXgRAyOcY5vQ/fE1sh8o+Mdd6KPgY8= cloud.google.com/go/channel v1.8.0/go.mod h1:W5SwCXDJsq/rg3tn3oG0LOxpAo6IMxNa09ngphpSlnk= cloud.google.com/go/channel v1.9.0/go.mod h1:jcu05W0my9Vx4mt3/rEHpfxc9eKi9XwsdDL8yBMbKUk= cloud.google.com/go/channel v1.11.0/go.mod h1:IdtI0uWGqhEeatSB62VOoJ8FSUhJ9/+iGkJVqp74CGE= cloud.google.com/go/channel v1.12.0/go.mod h1:VkxCGKASi4Cq7TbXxlaBezonAYpp1GCnKMY6tnMQnLU= cloud.google.com/go/cloudbuild v1.3.0/go.mod h1:WequR4ULxlqvMsjDEEEFnOG5ZSRSgWOywXYDb1vPE6U= cloud.google.com/go/cloudbuild v1.4.0/go.mod h1:5Qwa40LHiOXmz3386FrjrYM93rM/hdRr7b53sySrTqA= cloud.google.com/go/cloudbuild v1.6.0/go.mod h1:UIbc/w9QCbH12xX+ezUsgblrWv+Cv4Tw83GiSMHOn9M= cloud.google.com/go/cloudbuild v1.7.0/go.mod h1:zb5tWh2XI6lR9zQmsm1VRA+7OCuve5d8S+zJUul8KTg= cloud.google.com/go/cloudbuild v1.9.0/go.mod h1:qK1d7s4QlO0VwfYn5YuClDGg2hfmLZEb4wQGAbIgL1s= cloud.google.com/go/clouddms v1.3.0/go.mod h1:oK6XsCDdW4Ib3jCCBugx+gVjevp2TMXFtgxvPSee3OM= cloud.google.com/go/clouddms v1.4.0/go.mod h1:Eh7sUGCC+aKry14O1NRljhjyrr0NFC0G2cjwX0cByRk= cloud.google.com/go/clouddms v1.5.0/go.mod h1:QSxQnhikCLUw13iAbffF2CZxAER3xDGNHjsTAkQJcQA= cloud.google.com/go/cloudtasks v1.5.0/go.mod h1:fD92REy1x5woxkKEkLdvavGnPJGEn8Uic9nWuLzqCpY= cloud.google.com/go/cloudtasks v1.6.0/go.mod h1:C6Io+sxuke9/KNRkbQpihnW93SWDU3uXt92nu85HkYI= cloud.google.com/go/cloudtasks v1.7.0/go.mod h1:ImsfdYWwlWNJbdgPIIGJWC+gemEGTBK/SunNQQNCAb4= cloud.google.com/go/cloudtasks v1.8.0/go.mod h1:gQXUIwCSOI4yPVK7DgTVFiiP0ZW/eQkydWzwVMdHxrI= cloud.google.com/go/cloudtasks v1.9.0/go.mod h1:w+EyLsVkLWHcOaqNEyvcKAsWp9p29dL6uL9Nst1cI7Y= cloud.google.com/go/cloudtasks v1.10.0/go.mod h1:NDSoTLkZ3+vExFEWu2UJV1arUyzVDAiZtdWcsUyNwBs= cloud.google.com/go/compute v0.1.0/go.mod h1:GAesmwr110a34z04OlxYkATPBEfVhkymfTBXtfbBFow= cloud.google.com/go/compute v1.3.0/go.mod h1:cCZiE1NHEtai4wiufUhW8I8S1JKkAnhnQJWM7YD99wM= cloud.google.com/go/compute v1.5.0/go.mod h1:9SMHyhJlzhlkJqrPAc839t2BZFTSk6Jdj6mkzQJeu0M= cloud.google.com/go/compute v1.6.0/go.mod h1:T29tfhtVbq1wvAPo0E3+7vhgmkOYeXjhFvz/FMzPu0s= cloud.google.com/go/compute v1.6.1/go.mod h1:g85FgpzFvNULZ+S8AYq87axRKuf2Kh7deLqV/jJ3thU= cloud.google.com/go/compute v1.7.0/go.mod h1:435lt8av5oL9P3fv1OEzSbSUe+ybHXGMPQHHZWZxy9U= cloud.google.com/go/compute v1.10.0/go.mod h1:ER5CLbMxl90o2jtNbGSbtfOpQKR0t15FOtRsugnLrlU= cloud.google.com/go/compute v1.12.0/go.mod h1:e8yNOBcBONZU1vJKCvCoDw/4JQsA0dpM4x/6PIIOocU= cloud.google.com/go/compute v1.12.1/go.mod h1:e8yNOBcBONZU1vJKCvCoDw/4JQsA0dpM4x/6PIIOocU= cloud.google.com/go/compute v1.13.0/go.mod h1:5aPTS0cUNMIc1CE546K+Th6weJUNQErARyZtRXDJ8GE= cloud.google.com/go/compute v1.14.0/go.mod h1:YfLtxrj9sU4Yxv+sXzZkyPjEyPBZfXHUvjxega5vAdo= cloud.google.com/go/compute v1.15.1/go.mod h1:bjjoF/NtFUrkD/urWfdHaKuOPDR5nWIs63rR+SXhcpA= cloud.google.com/go/compute v1.18.0/go.mod h1:1X7yHxec2Ga+Ss6jPyjxRxpu2uu7PLgsOVXvgU0yacs= cloud.google.com/go/compute v1.19.0/go.mod h1:rikpw2y+UMidAe9tISo04EHNOIf42RLYF/q8Bs93scU= cloud.google.com/go/compute v1.19.1/go.mod h1:6ylj3a05WF8leseCdIf77NK0g1ey+nj5IKd5/kvShxE= cloud.google.com/go/compute/metadata v0.1.0/go.mod h1:Z1VN+bulIf6bt4P/C37K4DyZYZEXYonfTBHHFPO/4UU= cloud.google.com/go/compute/metadata v0.2.0/go.mod h1:zFmK7XCadkQkj6TtorcaGlCW1hT1fIilQDwofLpJ20k= cloud.google.com/go/compute/metadata v0.2.1/go.mod h1:jgHgmJd2RKBGzXqF5LR2EZMGxBkeanZ9wwa75XHJgOM= cloud.google.com/go/compute/metadata v0.2.3/go.mod h1:VAV5nSsACxMJvgaAuX6Pk2AawlZn8kiOGuCv6gTkwuA= cloud.google.com/go/compute/metadata v0.6.0 h1:A6hENjEsCDtC1k8byVsgwvVcioamEHvZ4j01OwKxG9I= cloud.google.com/go/compute/metadata v0.6.0/go.mod h1:FjyFAW1MW0C203CEOMDTu3Dk1FlqW3Rga40jzHL4hfg= cloud.google.com/go/contactcenterinsights v1.3.0/go.mod h1:Eu2oemoePuEFc/xKFPjbTuPSj0fYJcPls9TFlPNnHHY= cloud.google.com/go/contactcenterinsights v1.4.0/go.mod h1:L2YzkGbPsv+vMQMCADxJoT9YiTTnSEd6fEvCeHTYVck= cloud.google.com/go/contactcenterinsights v1.6.0/go.mod h1:IIDlT6CLcDoyv79kDv8iWxMSTZhLxSCofVV5W6YFM/w= cloud.google.com/go/container v1.6.0/go.mod h1:Xazp7GjJSeUYo688S+6J5V+n/t+G5sKBTFkKNudGRxg= cloud.google.com/go/container v1.7.0/go.mod h1:Dp5AHtmothHGX3DwwIHPgq45Y8KmNsgN3amoYfxVkLo= cloud.google.com/go/container v1.13.1/go.mod h1:6wgbMPeQRw9rSnKBCAJXnds3Pzj03C4JHamr8asWKy4= cloud.google.com/go/container v1.14.0/go.mod h1:3AoJMPhHfLDxLvrlVWaK57IXzaPnLaZq63WX59aQBfM= cloud.google.com/go/container v1.15.0/go.mod h1:ft+9S0WGjAyjDggg5S06DXj+fHJICWg8L7isCQe9pQA= cloud.google.com/go/containeranalysis v0.5.1/go.mod h1:1D92jd8gRR/c0fGMlymRgxWD3Qw9C1ff6/T7mLgVL8I= cloud.google.com/go/containeranalysis v0.6.0/go.mod h1:HEJoiEIu+lEXM+k7+qLCci0h33lX3ZqoYFdmPcoO7s4= cloud.google.com/go/containeranalysis v0.7.0/go.mod h1:9aUL+/vZ55P2CXfuZjS4UjQ9AgXoSw8Ts6lemfmxBxI= cloud.google.com/go/containeranalysis v0.9.0/go.mod h1:orbOANbwk5Ejoom+s+DUCTTJ7IBdBQJDcSylAx/on9s= cloud.google.com/go/datacatalog v1.3.0/go.mod h1:g9svFY6tuR+j+hrTw3J2dNcmI0dzmSiyOzm8kpLq0a0= cloud.google.com/go/datacatalog v1.5.0/go.mod h1:M7GPLNQeLfWqeIm3iuiruhPzkt65+Bx8dAKvScX8jvs= cloud.google.com/go/datacatalog v1.6.0/go.mod h1:+aEyF8JKg+uXcIdAmmaMUmZ3q1b/lKLtXCmXdnc0lbc= cloud.google.com/go/datacatalog v1.7.0/go.mod h1:9mEl4AuDYWw81UGc41HonIHH7/sn52H0/tc8f8ZbZIE= cloud.google.com/go/datacatalog v1.8.0/go.mod h1:KYuoVOv9BM8EYz/4eMFxrr4DUKhGIOXxZoKYF5wdISM= cloud.google.com/go/datacatalog v1.8.1/go.mod h1:RJ58z4rMp3gvETA465Vg+ag8BGgBdnRPEMMSTr5Uv+M= cloud.google.com/go/datacatalog v1.12.0/go.mod h1:CWae8rFkfp6LzLumKOnmVh4+Zle4A3NXLzVJ1d1mRm0= cloud.google.com/go/datacatalog v1.13.0/go.mod h1:E4Rj9a5ZtAxcQJlEBTLgMTphfP11/lNaAshpoBgemX8= cloud.google.com/go/datacatalog v1.24.3 h1:3bAfstDB6rlHyK0TvqxEwaeOvoN9UgCs2bn03+VXmss= cloud.google.com/go/datacatalog v1.24.3/go.mod h1:Z4g33XblDxWGHngDzcpfeOU0b1ERlDPTuQoYG6NkF1s= cloud.google.com/go/dataflow v0.6.0/go.mod h1:9QwV89cGoxjjSR9/r7eFDqqjtvbKxAK2BaYU6PVk9UM= cloud.google.com/go/dataflow v0.7.0/go.mod h1:PX526vb4ijFMesO1o202EaUmouZKBpjHsTlCtB4parQ= cloud.google.com/go/dataflow v0.8.0/go.mod h1:Rcf5YgTKPtQyYz8bLYhFoIV/vP39eL7fWNcSOyFfLJE= cloud.google.com/go/dataform v0.3.0/go.mod h1:cj8uNliRlHpa6L3yVhDOBrUXH+BPAO1+KFMQQNSThKo= cloud.google.com/go/dataform v0.4.0/go.mod h1:fwV6Y4Ty2yIFL89huYlEkwUPtS7YZinZbzzj5S9FzCE= cloud.google.com/go/dataform v0.5.0/go.mod h1:GFUYRe8IBa2hcomWplodVmUx/iTL0FrsauObOM3Ipr0= cloud.google.com/go/dataform v0.6.0/go.mod h1:QPflImQy33e29VuapFdf19oPbE4aYTJxr31OAPV+ulA= cloud.google.com/go/dataform v0.7.0/go.mod h1:7NulqnVozfHvWUBpMDfKMUESr+85aJsC/2O0o3jWPDE= cloud.google.com/go/datafusion v1.4.0/go.mod h1:1Zb6VN+W6ALo85cXnM1IKiPw+yQMKMhB9TsTSRDo/38= cloud.google.com/go/datafusion v1.5.0/go.mod h1:Kz+l1FGHB0J+4XF2fud96WMmRiq/wj8N9u007vyXZ2w= cloud.google.com/go/datafusion v1.6.0/go.mod h1:WBsMF8F1RhSXvVM8rCV3AeyWVxcC2xY6vith3iw3S+8= cloud.google.com/go/datalabeling v0.5.0/go.mod h1:TGcJ0G2NzcsXSE/97yWjIZO0bXj0KbVlINXMG9ud42I= cloud.google.com/go/datalabeling v0.6.0/go.mod h1:WqdISuk/+WIGeMkpw/1q7bK/tFEZxsrFJOJdY2bXvTQ= cloud.google.com/go/datalabeling v0.7.0/go.mod h1:WPQb1y08RJbmpM3ww0CSUAGweL0SxByuW2E+FU+wXcM= cloud.google.com/go/dataplex v1.3.0/go.mod h1:hQuRtDg+fCiFgC8j0zV222HvzFQdRd+SVX8gdmFcZzA= cloud.google.com/go/dataplex v1.4.0/go.mod h1:X51GfLXEMVJ6UN47ESVqvlsRplbLhcsAt0kZCCKsU0A= cloud.google.com/go/dataplex v1.5.2/go.mod h1:cVMgQHsmfRoI5KFYq4JtIBEUbYwc3c7tXmIDhRmNNVQ= cloud.google.com/go/dataplex v1.6.0/go.mod h1:bMsomC/aEJOSpHXdFKFGQ1b0TDPIeL28nJObeO1ppRs= cloud.google.com/go/dataproc v1.7.0/go.mod h1:CKAlMjII9H90RXaMpSxQ8EU6dQx6iAYNPcYPOkSbi8s= cloud.google.com/go/dataproc v1.8.0/go.mod h1:5OW+zNAH0pMpw14JVrPONsxMQYMBqJuzORhIBfBn9uI= cloud.google.com/go/dataproc v1.12.0/go.mod h1:zrF3aX0uV3ikkMz6z4uBbIKyhRITnxvr4i3IjKsKrw4= cloud.google.com/go/dataqna v0.5.0/go.mod h1:90Hyk596ft3zUQ8NkFfvICSIfHFh1Bc7C4cK3vbhkeo= cloud.google.com/go/dataqna v0.6.0/go.mod h1:1lqNpM7rqNLVgWBJyk5NF6Uen2PHym0jtVJonplVsDA= cloud.google.com/go/dataqna v0.7.0/go.mod h1:Lx9OcIIeqCrw1a6KdO3/5KMP1wAmTc0slZWwP12Qq3c= cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE= cloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk= cloud.google.com/go/datastore v1.10.0/go.mod h1:PC5UzAmDEkAmkfaknstTYbNpgE49HAgW2J1gcgUfmdM= cloud.google.com/go/datastore v1.11.0/go.mod h1:TvGxBIHCS50u8jzG+AW/ppf87v1of8nwzFNgEZU1D3c= cloud.google.com/go/datastream v1.2.0/go.mod h1:i/uTP8/fZwgATHS/XFu0TcNUhuA0twZxxQ3EyCUQMwo= cloud.google.com/go/datastream v1.3.0/go.mod h1:cqlOX8xlyYF/uxhiKn6Hbv6WjwPPuI9W2M9SAXwaLLQ= cloud.google.com/go/datastream v1.4.0/go.mod h1:h9dpzScPhDTs5noEMQVWP8Wx8AFBRyS0s8KWPx/9r0g= cloud.google.com/go/datastream v1.5.0/go.mod h1:6TZMMNPwjUqZHBKPQ1wwXpb0d5VDVPl2/XoS5yi88q4= cloud.google.com/go/datastream v1.6.0/go.mod h1:6LQSuswqLa7S4rPAOZFVjHIG3wJIjZcZrw8JDEDJuIs= cloud.google.com/go/datastream v1.7.0/go.mod h1:uxVRMm2elUSPuh65IbZpzJNMbuzkcvu5CjMqVIUHrww= cloud.google.com/go/deploy v1.4.0/go.mod h1:5Xghikd4VrmMLNaF6FiRFDlHb59VM59YoDQnOUdsH/c= cloud.google.com/go/deploy v1.5.0/go.mod h1:ffgdD0B89tToyW/U/D2eL0jN2+IEV/3EMuXHA0l4r+s= cloud.google.com/go/deploy v1.6.0/go.mod h1:f9PTHehG/DjCom3QH0cntOVRm93uGBDt2vKzAPwpXQI= cloud.google.com/go/deploy v1.8.0/go.mod h1:z3myEJnA/2wnB4sgjqdMfgxCA0EqC3RBTNcVPs93mtQ= cloud.google.com/go/dialogflow v1.15.0/go.mod h1:HbHDWs33WOGJgn6rfzBW1Kv807BE3O1+xGbn59zZWI4= cloud.google.com/go/dialogflow v1.16.1/go.mod h1:po6LlzGfK+smoSmTBnbkIZY2w8ffjz/RcGSS+sh1el0= cloud.google.com/go/dialogflow v1.17.0/go.mod h1:YNP09C/kXA1aZdBgC/VtXX74G/TKn7XVCcVumTflA+8= cloud.google.com/go/dialogflow v1.18.0/go.mod h1:trO7Zu5YdyEuR+BhSNOqJezyFQ3aUzz0njv7sMx/iek= cloud.google.com/go/dialogflow v1.19.0/go.mod h1:JVmlG1TwykZDtxtTXujec4tQ+D8SBFMoosgy+6Gn0s0= cloud.google.com/go/dialogflow v1.29.0/go.mod h1:b+2bzMe+k1s9V+F2jbJwpHPzrnIyHihAdRFMtn2WXuM= cloud.google.com/go/dialogflow v1.31.0/go.mod h1:cuoUccuL1Z+HADhyIA7dci3N5zUssgpBJmCzI6fNRB4= cloud.google.com/go/dialogflow v1.32.0/go.mod h1:jG9TRJl8CKrDhMEcvfcfFkkpp8ZhgPz3sBGmAUYJ2qE= cloud.google.com/go/dlp v1.6.0/go.mod h1:9eyB2xIhpU0sVwUixfBubDoRwP+GjeUoxxeueZmqvmM= cloud.google.com/go/dlp v1.7.0/go.mod h1:68ak9vCiMBjbasxeVD17hVPxDEck+ExiHavX8kiHG+Q= cloud.google.com/go/dlp v1.9.0/go.mod h1:qdgmqgTyReTz5/YNSSuueR8pl7hO0o9bQ39ZhtgkWp4= cloud.google.com/go/documentai v1.7.0/go.mod h1:lJvftZB5NRiFSX4moiye1SMxHx0Bc3x1+p9e/RfXYiU= cloud.google.com/go/documentai v1.8.0/go.mod h1:xGHNEB7CtsnySCNrCFdCyyMz44RhFEEX2Q7UD0c5IhU= cloud.google.com/go/documentai v1.9.0/go.mod h1:FS5485S8R00U10GhgBC0aNGrJxBP8ZVpEeJ7PQDZd6k= cloud.google.com/go/documentai v1.10.0/go.mod h1:vod47hKQIPeCfN2QS/jULIvQTugbmdc0ZvxxfQY1bg4= cloud.google.com/go/documentai v1.16.0/go.mod h1:o0o0DLTEZ+YnJZ+J4wNfTxmDVyrkzFvttBXXtYRMHkM= cloud.google.com/go/documentai v1.18.0/go.mod h1:F6CK6iUH8J81FehpskRmhLq/3VlwQvb7TvwOceQ2tbs= cloud.google.com/go/domains v0.6.0/go.mod h1:T9Rz3GasrpYk6mEGHh4rymIhjlnIuB4ofT1wTxDeT4Y= cloud.google.com/go/domains v0.7.0/go.mod h1:PtZeqS1xjnXuRPKE/88Iru/LdfoRyEHYA9nFQf4UKpg= cloud.google.com/go/domains v0.8.0/go.mod h1:M9i3MMDzGFXsydri9/vW+EWz9sWb4I6WyHqdlAk0idE= cloud.google.com/go/edgecontainer v0.1.0/go.mod h1:WgkZ9tp10bFxqO8BLPqv2LlfmQF1X8lZqwW4r1BTajk= cloud.google.com/go/edgecontainer v0.2.0/go.mod h1:RTmLijy+lGpQ7BXuTDa4C4ssxyXT34NIuHIgKuP4s5w= cloud.google.com/go/edgecontainer v0.3.0/go.mod h1:FLDpP4nykgwwIfcLt6zInhprzw0lEi2P1fjO6Ie0qbc= cloud.google.com/go/edgecontainer v1.0.0/go.mod h1:cttArqZpBB2q58W/upSG++ooo6EsblxDIolxa3jSjbY= cloud.google.com/go/errorreporting v0.3.0/go.mod h1:xsP2yaAp+OAW4OIm60An2bbLpqIhKXdWR/tawvl7QzU= cloud.google.com/go/essentialcontacts v1.3.0/go.mod h1:r+OnHa5jfj90qIfZDO/VztSFqbQan7HV75p8sA+mdGI= cloud.google.com/go/essentialcontacts v1.4.0/go.mod h1:8tRldvHYsmnBCHdFpvU+GL75oWiBKl80BiqlFh9tp+8= cloud.google.com/go/essentialcontacts v1.5.0/go.mod h1:ay29Z4zODTuwliK7SnX8E86aUF2CTzdNtvv42niCX0M= cloud.google.com/go/eventarc v1.7.0/go.mod h1:6ctpF3zTnaQCxUjHUdcfgcA1A2T309+omHZth7gDfmc= cloud.google.com/go/eventarc v1.8.0/go.mod h1:imbzxkyAU4ubfsaKYdQg04WS1NvncblHEup4kvF+4gw= cloud.google.com/go/eventarc v1.10.0/go.mod h1:u3R35tmZ9HvswGRBnF48IlYgYeBcPUCjkr4BTdem2Kw= cloud.google.com/go/eventarc v1.11.0/go.mod h1:PyUjsUKPWoRBCHeOxZd/lbOOjahV41icXyUY5kSTvVY= cloud.google.com/go/filestore v1.3.0/go.mod h1:+qbvHGvXU1HaKX2nD0WEPo92TP/8AQuCVEBXNY9z0+w= cloud.google.com/go/filestore v1.4.0/go.mod h1:PaG5oDfo9r224f8OYXURtAsY+Fbyq/bLYoINEK8XQAI= cloud.google.com/go/filestore v1.5.0/go.mod h1:FqBXDWBp4YLHqRnVGveOkHDf8svj9r5+mUDLupOWEDs= cloud.google.com/go/filestore v1.6.0/go.mod h1:di5unNuss/qfZTw2U9nhFqo8/ZDSc466dre85Kydllg= cloud.google.com/go/firestore v1.9.0/go.mod h1:HMkjKHNTtRyZNiMzu7YAsLr9K3X2udY2AMwDaMEQiiE= cloud.google.com/go/functions v1.6.0/go.mod h1:3H1UA3qiIPRWD7PeZKLvHZ9SaQhR26XIJcC0A5GbvAk= cloud.google.com/go/functions v1.7.0/go.mod h1:+d+QBcWM+RsrgZfV9xo6KfA1GlzJfxcfZcRPEhDDfzg= cloud.google.com/go/functions v1.8.0/go.mod h1:RTZ4/HsQjIqIYP9a9YPbU+QFoQsAlYgrwOXJWHn1POY= cloud.google.com/go/functions v1.9.0/go.mod h1:Y+Dz8yGguzO3PpIjhLTbnqV1CWmgQ5UwtlpzoyquQ08= cloud.google.com/go/functions v1.10.0/go.mod h1:0D3hEOe3DbEvCXtYOZHQZmD+SzYsi1YbI7dGvHfldXw= cloud.google.com/go/functions v1.12.0/go.mod h1:AXWGrF3e2C/5ehvwYo/GH6O5s09tOPksiKhz+hH8WkA= cloud.google.com/go/functions v1.13.0/go.mod h1:EU4O007sQm6Ef/PwRsI8N2umygGqPBS/IZQKBQBcJ3c= cloud.google.com/go/gaming v1.5.0/go.mod h1:ol7rGcxP/qHTRQE/RO4bxkXq+Fix0j6D4LFPzYTIrDM= cloud.google.com/go/gaming v1.6.0/go.mod h1:YMU1GEvA39Qt3zWGyAVA9bpYz/yAhTvaQ1t2sK4KPUA= cloud.google.com/go/gaming v1.7.0/go.mod h1:LrB8U7MHdGgFG851iHAfqUdLcKBdQ55hzXy9xBJz0+w= cloud.google.com/go/gaming v1.8.0/go.mod h1:xAqjS8b7jAVW0KFYeRUxngo9My3f33kFmua++Pi+ggM= cloud.google.com/go/gaming v1.9.0/go.mod h1:Fc7kEmCObylSWLO334NcO+O9QMDyz+TKC4v1D7X+Bc0= cloud.google.com/go/gkebackup v0.2.0/go.mod h1:XKvv/4LfG829/B8B7xRkk8zRrOEbKtEam6yNfuQNH60= cloud.google.com/go/gkebackup v0.3.0/go.mod h1:n/E671i1aOQvUxT541aTkCwExO/bTer2HDlj4TsBRAo= cloud.google.com/go/gkebackup v0.4.0/go.mod h1:byAyBGUwYGEEww7xsbnUTBHIYcOPy/PgUWUtOeRm9Vg= cloud.google.com/go/gkeconnect v0.5.0/go.mod h1:c5lsNAg5EwAy7fkqX/+goqFsU1Da/jQFqArp+wGNr/o= cloud.google.com/go/gkeconnect v0.6.0/go.mod h1:Mln67KyU/sHJEBY8kFZ0xTeyPtzbq9StAVvEULYK16A= cloud.google.com/go/gkeconnect v0.7.0/go.mod h1:SNfmVqPkaEi3bF/B3CNZOAYPYdg7sU+obZ+QTky2Myw= cloud.google.com/go/gkehub v0.9.0/go.mod h1:WYHN6WG8w9bXU0hqNxt8rm5uxnk8IH+lPY9J2TV7BK0= cloud.google.com/go/gkehub v0.10.0/go.mod h1:UIPwxI0DsrpsVoWpLB0stwKCP+WFVG9+y977wO+hBH0= cloud.google.com/go/gkehub v0.11.0/go.mod h1:JOWHlmN+GHyIbuWQPl47/C2RFhnFKH38jH9Ascu3n0E= cloud.google.com/go/gkehub v0.12.0/go.mod h1:djiIwwzTTBrF5NaXCGv3mf7klpEMcST17VBTVVDcuaw= cloud.google.com/go/gkemulticloud v0.3.0/go.mod h1:7orzy7O0S+5kq95e4Hpn7RysVA7dPs8W/GgfUtsPbrA= cloud.google.com/go/gkemulticloud v0.4.0/go.mod h1:E9gxVBnseLWCk24ch+P9+B2CoDFJZTyIgLKSalC7tuI= cloud.google.com/go/gkemulticloud v0.5.0/go.mod h1:W0JDkiyi3Tqh0TJr//y19wyb1yf8llHVto2Htf2Ja3Y= cloud.google.com/go/grafeas v0.2.0/go.mod h1:KhxgtF2hb0P191HlY5besjYm6MqTSTj3LSI+M+ByZHc= cloud.google.com/go/gsuiteaddons v1.3.0/go.mod h1:EUNK/J1lZEZO8yPtykKxLXI6JSVN2rg9bN8SXOa0bgM= cloud.google.com/go/gsuiteaddons v1.4.0/go.mod h1:rZK5I8hht7u7HxFQcFei0+AtfS9uSushomRlg+3ua1o= cloud.google.com/go/gsuiteaddons v1.5.0/go.mod h1:TFCClYLd64Eaa12sFVmUyG62tk4mdIsI7pAnSXRkcFo= cloud.google.com/go/iam v0.1.0/go.mod h1:vcUNEa0pEm0qRVpmWepWaFMIAI8/hjB9mO8rNCJtF6c= cloud.google.com/go/iam v0.3.0/go.mod h1:XzJPvDayI+9zsASAFO68Hk07u3z+f+JrT2xXNdp4bnY= cloud.google.com/go/iam v0.5.0/go.mod h1:wPU9Vt0P4UmCux7mqtRu6jcpPAb74cP1fh50J3QpkUc= cloud.google.com/go/iam v0.6.0/go.mod h1:+1AH33ueBne5MzYccyMHtEKqLE4/kJOibtffMHDMFMc= cloud.google.com/go/iam v0.7.0/go.mod h1:H5Br8wRaDGNc8XP3keLc4unfUUZeyH3Sfl9XpQEYOeg= cloud.google.com/go/iam v0.8.0/go.mod h1:lga0/y3iH6CX7sYqypWJ33hf7kkfXJag67naqGESjkE= cloud.google.com/go/iam v0.11.0/go.mod h1:9PiLDanza5D+oWFZiH1uG+RnRCfEGKoyl6yo4cgWZGY= cloud.google.com/go/iam v0.12.0/go.mod h1:knyHGviacl11zrtZUoDuYpDgLjvr28sLQaG0YB2GYAY= cloud.google.com/go/iam v0.13.0/go.mod h1:ljOg+rcNfzZ5d6f1nAUJ8ZIxOaZUVoS14bKCtaLZ/D0= cloud.google.com/go/iam v1.4.0 h1:ZNfy/TYfn2uh/ukvhp783WhnbVluqf/tzOaqVUPlIPA= cloud.google.com/go/iam v1.4.0/go.mod h1:gMBgqPaERlriaOV0CUl//XUzDhSfXevn4OEUbg6VRs4= cloud.google.com/go/iap v1.4.0/go.mod h1:RGFwRJdihTINIe4wZ2iCP0zF/qu18ZwyKxrhMhygBEc= cloud.google.com/go/iap v1.5.0/go.mod h1:UH/CGgKd4KyohZL5Pt0jSKE4m3FR51qg6FKQ/z/Ix9A= cloud.google.com/go/iap v1.6.0/go.mod h1:NSuvI9C/j7UdjGjIde7t7HBz+QTwBcapPE07+sSRcLk= cloud.google.com/go/iap v1.7.0/go.mod h1:beqQx56T9O1G1yNPph+spKpNibDlYIiIixiqsQXxLIo= cloud.google.com/go/iap v1.7.1/go.mod h1:WapEwPc7ZxGt2jFGB/C/bm+hP0Y6NXzOYGjpPnmMS74= cloud.google.com/go/ids v1.1.0/go.mod h1:WIuwCaYVOzHIj2OhN9HAwvW+DBdmUAdcWlFxRl+KubM= cloud.google.com/go/ids v1.2.0/go.mod h1:5WXvp4n25S0rA/mQWAg1YEEBBq6/s+7ml1RDCW1IrcY= cloud.google.com/go/ids v1.3.0/go.mod h1:JBdTYwANikFKaDP6LtW5JAi4gubs57SVNQjemdt6xV4= cloud.google.com/go/iot v1.3.0/go.mod h1:r7RGh2B61+B8oz0AGE+J72AhA0G7tdXItODWsaA2oLs= cloud.google.com/go/iot v1.4.0/go.mod h1:dIDxPOn0UvNDUMD8Ger7FIaTuvMkj+aGk94RPP0iV+g= cloud.google.com/go/iot v1.5.0/go.mod h1:mpz5259PDl3XJthEmh9+ap0affn/MqNSP4My77Qql9o= cloud.google.com/go/iot v1.6.0/go.mod h1:IqdAsmE2cTYYNO1Fvjfzo9po179rAtJeVGUvkLN3rLE= cloud.google.com/go/kms v1.4.0/go.mod h1:fajBHndQ+6ubNw6Ss2sSd+SWvjL26RNo/dr7uxsnnOA= cloud.google.com/go/kms v1.5.0/go.mod h1:QJS2YY0eJGBg3mnDfuaCyLauWwBJiHRboYxJ++1xJNg= cloud.google.com/go/kms v1.6.0/go.mod h1:Jjy850yySiasBUDi6KFUwUv2n1+o7QZFyuUJg6OgjA0= cloud.google.com/go/kms v1.8.0/go.mod h1:4xFEhYFqvW+4VMELtZyxomGSYtSQKzM178ylFW4jMAg= cloud.google.com/go/kms v1.9.0/go.mod h1:qb1tPTgfF9RQP8e1wq4cLFErVuTJv7UsSC915J8dh3w= cloud.google.com/go/kms v1.10.0/go.mod h1:ng3KTUtQQU9bPX3+QGLsflZIHlkbn8amFAMY63m8d24= cloud.google.com/go/kms v1.10.1/go.mod h1:rIWk/TryCkR59GMC3YtHtXeLzd634lBbKenvyySAyYI= cloud.google.com/go/language v1.4.0/go.mod h1:F9dRpNFQmJbkaop6g0JhSBXCNlO90e1KWx5iDdxbWic= cloud.google.com/go/language v1.6.0/go.mod h1:6dJ8t3B+lUYfStgls25GusK04NLh3eDLQnWM3mdEbhI= cloud.google.com/go/language v1.7.0/go.mod h1:DJ6dYN/W+SQOjF8e1hLQXMF21AkH2w9wiPzPCJa2MIE= cloud.google.com/go/language v1.8.0/go.mod h1:qYPVHf7SPoNNiCL2Dr0FfEFNil1qi3pQEyygwpgVKB8= cloud.google.com/go/language v1.9.0/go.mod h1:Ns15WooPM5Ad/5no/0n81yUetis74g3zrbeJBE+ptUY= cloud.google.com/go/lifesciences v0.5.0/go.mod h1:3oIKy8ycWGPUyZDR/8RNnTOYevhaMLqh5vLUXs9zvT8= cloud.google.com/go/lifesciences v0.6.0/go.mod h1:ddj6tSX/7BOnhxCSd3ZcETvtNr8NZ6t/iPhY2Tyfu08= cloud.google.com/go/lifesciences v0.8.0/go.mod h1:lFxiEOMqII6XggGbOnKiyZ7IBwoIqA84ClvoezaA/bo= cloud.google.com/go/logging v1.6.1/go.mod h1:5ZO0mHHbvm8gEmeEUHrmDlTDSu5imF6MUP9OfilNXBw= cloud.google.com/go/logging v1.7.0/go.mod h1:3xjP2CjkM3ZkO73aj4ASA5wRPGGCRrPIAeNqVNkzY8M= cloud.google.com/go/longrunning v0.1.1/go.mod h1:UUFxuDWkv22EuY93jjmDMFT5GPQKeFVJBIF6QlTqdsE= cloud.google.com/go/longrunning v0.3.0/go.mod h1:qth9Y41RRSUE69rDcOn6DdK3HfQfsUI0YSmW3iIlLJc= cloud.google.com/go/longrunning v0.4.1/go.mod h1:4iWDqhBZ70CvZ6BfETbvam3T8FMvLK+eFj0E6AaRQTo= cloud.google.com/go/longrunning v0.6.4 h1:3tyw9rO3E2XVXzSApn1gyEEnH2K9SynNQjMlBi3uHLg= cloud.google.com/go/longrunning v0.6.4/go.mod h1:ttZpLCe6e7EXvn9OxpBRx7kZEB0efv8yBO6YnVMfhJs= cloud.google.com/go/managedidentities v1.3.0/go.mod h1:UzlW3cBOiPrzucO5qWkNkh0w33KFtBJU281hacNvsdE= cloud.google.com/go/managedidentities v1.4.0/go.mod h1:NWSBYbEMgqmbZsLIyKvxrYbtqOsxY1ZrGM+9RgDqInM= cloud.google.com/go/managedidentities v1.5.0/go.mod h1:+dWcZ0JlUmpuxpIDfyP5pP5y0bLdRwOS4Lp7gMni/LA= cloud.google.com/go/maps v0.1.0/go.mod h1:BQM97WGyfw9FWEmQMpZ5T6cpovXXSd1cGmFma94eubI= cloud.google.com/go/maps v0.6.0/go.mod h1:o6DAMMfb+aINHz/p/jbcY+mYeXBoZoxTfdSQ8VAJaCw= cloud.google.com/go/maps v0.7.0/go.mod h1:3GnvVl3cqeSvgMcpRlQidXsPYuDGQ8naBis7MVzpXsY= cloud.google.com/go/mediatranslation v0.5.0/go.mod h1:jGPUhGTybqsPQn91pNXw0xVHfuJ3leR1wj37oU3y1f4= cloud.google.com/go/mediatranslation v0.6.0/go.mod h1:hHdBCTYNigsBxshbznuIMFNe5QXEowAuNmmC7h8pu5w= cloud.google.com/go/mediatranslation v0.7.0/go.mod h1:LCnB/gZr90ONOIQLgSXagp8XUW1ODs2UmUMvcgMfI2I= cloud.google.com/go/memcache v1.4.0/go.mod h1:rTOfiGZtJX1AaFUrOgsMHX5kAzaTQ8azHiuDoTPzNsE= cloud.google.com/go/memcache v1.5.0/go.mod h1:dk3fCK7dVo0cUU2c36jKb4VqKPS22BTkf81Xq617aWM= cloud.google.com/go/memcache v1.6.0/go.mod h1:XS5xB0eQZdHtTuTF9Hf8eJkKtR3pVRCcvJwtm68T3rA= cloud.google.com/go/memcache v1.7.0/go.mod h1:ywMKfjWhNtkQTxrWxCkCFkoPjLHPW6A7WOTVI8xy3LY= cloud.google.com/go/memcache v1.9.0/go.mod h1:8oEyzXCu+zo9RzlEaEjHl4KkgjlNDaXbCQeQWlzNFJM= cloud.google.com/go/metastore v1.5.0/go.mod h1:2ZNrDcQwghfdtCwJ33nM0+GrBGlVuh8rakL3vdPY3XY= cloud.google.com/go/metastore v1.6.0/go.mod h1:6cyQTls8CWXzk45G55x57DVQ9gWg7RiH65+YgPsNh9s= cloud.google.com/go/metastore v1.7.0/go.mod h1:s45D0B4IlsINu87/AsWiEVYbLaIMeUSoxlKKDqBGFS8= cloud.google.com/go/metastore v1.8.0/go.mod h1:zHiMc4ZUpBiM7twCIFQmJ9JMEkDSyZS9U12uf7wHqSI= cloud.google.com/go/metastore v1.10.0/go.mod h1:fPEnH3g4JJAk+gMRnrAnoqyv2lpUCqJPWOodSaf45Eo= cloud.google.com/go/monitoring v1.7.0/go.mod h1:HpYse6kkGo//7p6sT0wsIC6IBDET0RhIsnmlA53dvEk= cloud.google.com/go/monitoring v1.8.0/go.mod h1:E7PtoMJ1kQXWxPjB6mv2fhC5/15jInuulFdYYtlcvT4= cloud.google.com/go/monitoring v1.12.0/go.mod h1:yx8Jj2fZNEkL/GYZyTLS4ZtZEZN8WtDEiEqG4kLK50w= cloud.google.com/go/monitoring v1.13.0/go.mod h1:k2yMBAB1H9JT/QETjNkgdCGD9bPF712XiLTVr+cBrpw= cloud.google.com/go/monitoring v1.24.0 h1:csSKiCJ+WVRgNkRzzz3BPoGjFhjPY23ZTcaenToJxMM= cloud.google.com/go/monitoring v1.24.0/go.mod h1:Bd1PRK5bmQBQNnuGwHBfUamAV1ys9049oEPHnn4pcsc= cloud.google.com/go/networkconnectivity v1.4.0/go.mod h1:nOl7YL8odKyAOtzNX73/M5/mGZgqqMeryi6UPZTk/rA= cloud.google.com/go/networkconnectivity v1.5.0/go.mod h1:3GzqJx7uhtlM3kln0+x5wyFvuVH1pIBJjhCpjzSt75o= cloud.google.com/go/networkconnectivity v1.6.0/go.mod h1:OJOoEXW+0LAxHh89nXd64uGG+FbQoeH8DtxCHVOMlaM= cloud.google.com/go/networkconnectivity v1.7.0/go.mod h1:RMuSbkdbPwNMQjB5HBWD5MpTBnNm39iAVpC3TmsExt8= cloud.google.com/go/networkconnectivity v1.10.0/go.mod h1:UP4O4sWXJG13AqrTdQCD9TnLGEbtNRqjuaaA7bNjF5E= cloud.google.com/go/networkconnectivity v1.11.0/go.mod h1:iWmDD4QF16VCDLXUqvyspJjIEtBR/4zq5hwnY2X3scM= cloud.google.com/go/networkmanagement v1.4.0/go.mod h1:Q9mdLLRn60AsOrPc8rs8iNV6OHXaGcDdsIQe1ohekq8= cloud.google.com/go/networkmanagement v1.5.0/go.mod h1:ZnOeZ/evzUdUsnvRt792H0uYEnHQEMaz+REhhzJRcf4= cloud.google.com/go/networkmanagement v1.6.0/go.mod h1:5pKPqyXjB/sgtvB5xqOemumoQNB7y95Q7S+4rjSOPYY= cloud.google.com/go/networksecurity v0.5.0/go.mod h1:xS6fOCoqpVC5zx15Z/MqkfDwH4+m/61A3ODiDV1xmiQ= cloud.google.com/go/networksecurity v0.6.0/go.mod h1:Q5fjhTr9WMI5mbpRYEbiexTzROf7ZbDzvzCrNl14nyU= cloud.google.com/go/networksecurity v0.7.0/go.mod h1:mAnzoxx/8TBSyXEeESMy9OOYwo1v+gZ5eMRnsT5bC8k= cloud.google.com/go/networksecurity v0.8.0/go.mod h1:B78DkqsxFG5zRSVuwYFRZ9Xz8IcQ5iECsNrPn74hKHU= cloud.google.com/go/notebooks v1.2.0/go.mod h1:9+wtppMfVPUeJ8fIWPOq1UnATHISkGXGqTkxeieQ6UY= cloud.google.com/go/notebooks v1.3.0/go.mod h1:bFR5lj07DtCPC7YAAJ//vHskFBxA5JzYlH68kXVdk34= cloud.google.com/go/notebooks v1.4.0/go.mod h1:4QPMngcwmgb6uw7Po99B2xv5ufVoIQ7nOGDyL4P8AgA= cloud.google.com/go/notebooks v1.5.0/go.mod h1:q8mwhnP9aR8Hpfnrc5iN5IBhrXUy8S2vuYs+kBJ/gu0= cloud.google.com/go/notebooks v1.7.0/go.mod h1:PVlaDGfJgj1fl1S3dUwhFMXFgfYGhYQt2164xOMONmE= cloud.google.com/go/notebooks v1.8.0/go.mod h1:Lq6dYKOYOWUCTvw5t2q1gp1lAp0zxAxRycayS0iJcqQ= cloud.google.com/go/optimization v1.1.0/go.mod h1:5po+wfvX5AQlPznyVEZjGJTMr4+CAkJf2XSTQOOl9l4= cloud.google.com/go/optimization v1.2.0/go.mod h1:Lr7SOHdRDENsh+WXVmQhQTrzdu9ybg0NecjHidBq6xs= cloud.google.com/go/optimization v1.3.1/go.mod h1:IvUSefKiwd1a5p0RgHDbWCIbDFgKuEdB+fPPuP0IDLI= cloud.google.com/go/orchestration v1.3.0/go.mod h1:Sj5tq/JpWiB//X/q3Ngwdl5K7B7Y0KZ7bfv0wL6fqVA= cloud.google.com/go/orchestration v1.4.0/go.mod h1:6W5NLFWs2TlniBphAViZEVhrXRSMgUGDfW7vrWKvsBk= cloud.google.com/go/orchestration v1.6.0/go.mod h1:M62Bevp7pkxStDfFfTuCOaXgaaqRAga1yKyoMtEoWPQ= cloud.google.com/go/orgpolicy v1.4.0/go.mod h1:xrSLIV4RePWmP9P3tBl8S93lTmlAxjm06NSm2UTmKvE= cloud.google.com/go/orgpolicy v1.5.0/go.mod h1:hZEc5q3wzwXJaKrsx5+Ewg0u1LxJ51nNFlext7Tanwc= cloud.google.com/go/orgpolicy v1.10.0/go.mod h1:w1fo8b7rRqlXlIJbVhOMPrwVljyuW5mqssvBtU18ONc= cloud.google.com/go/osconfig v1.7.0/go.mod h1:oVHeCeZELfJP7XLxcBGTMBvRO+1nQ5tFG9VQTmYS2Fs= cloud.google.com/go/osconfig v1.8.0/go.mod h1:EQqZLu5w5XA7eKizepumcvWx+m8mJUhEwiPqWiZeEdg= cloud.google.com/go/osconfig v1.9.0/go.mod h1:Yx+IeIZJ3bdWmzbQU4fxNl8xsZ4amB+dygAwFPlvnNo= cloud.google.com/go/osconfig v1.10.0/go.mod h1:uMhCzqC5I8zfD9zDEAfvgVhDS8oIjySWh+l4WK6GnWw= cloud.google.com/go/osconfig v1.11.0/go.mod h1:aDICxrur2ogRd9zY5ytBLV89KEgT2MKB2L/n6x1ooPw= cloud.google.com/go/oslogin v1.4.0/go.mod h1:YdgMXWRaElXz/lDk1Na6Fh5orF7gvmJ0FGLIs9LId4E= cloud.google.com/go/oslogin v1.5.0/go.mod h1:D260Qj11W2qx/HVF29zBg+0fd6YCSjSqLUkY/qEenQU= cloud.google.com/go/oslogin v1.6.0/go.mod h1:zOJ1O3+dTU8WPlGEkFSh7qeHPPSoxrcMbbK1Nm2iX70= cloud.google.com/go/oslogin v1.7.0/go.mod h1:e04SN0xO1UNJ1M5GP0vzVBFicIe4O53FOfcixIqTyXo= cloud.google.com/go/oslogin v1.9.0/go.mod h1:HNavntnH8nzrn8JCTT5fj18FuJLFJc4NaZJtBnQtKFs= cloud.google.com/go/phishingprotection v0.5.0/go.mod h1:Y3HZknsK9bc9dMi+oE8Bim0lczMU6hrX0UpADuMefr0= cloud.google.com/go/phishingprotection v0.6.0/go.mod h1:9Y3LBLgy0kDTcYET8ZH3bq/7qni15yVUoAxiFxnlSUA= cloud.google.com/go/phishingprotection v0.7.0/go.mod h1:8qJI4QKHoda/sb/7/YmMQ2omRLSLYSu9bU0EKCNI+Lk= cloud.google.com/go/policytroubleshooter v1.3.0/go.mod h1:qy0+VwANja+kKrjlQuOzmlvscn4RNsAc0e15GGqfMxg= cloud.google.com/go/policytroubleshooter v1.4.0/go.mod h1:DZT4BcRw3QoO8ota9xw/LKtPa8lKeCByYeKTIf/vxdE= cloud.google.com/go/policytroubleshooter v1.5.0/go.mod h1:Rz1WfV+1oIpPdN2VvvuboLVRsB1Hclg3CKQ53j9l8vw= cloud.google.com/go/policytroubleshooter v1.6.0/go.mod h1:zYqaPTsmfvpjm5ULxAyD/lINQxJ0DDsnWOP/GZ7xzBc= cloud.google.com/go/privatecatalog v0.5.0/go.mod h1:XgosMUvvPyxDjAVNDYxJ7wBW8//hLDDYmnsNcMGq1K0= cloud.google.com/go/privatecatalog v0.6.0/go.mod h1:i/fbkZR0hLN29eEWiiwue8Pb+GforiEIBnV9yrRUOKI= cloud.google.com/go/privatecatalog v0.7.0/go.mod h1:2s5ssIFO69F5csTXcwBP7NPFTZvps26xGzvQ2PQaBYg= cloud.google.com/go/privatecatalog v0.8.0/go.mod h1:nQ6pfaegeDAq/Q5lrfCQzQLhubPiZhSaNhIgfJlnIXs= cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I= cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw= cloud.google.com/go/pubsub v1.2.0/go.mod h1:jhfEVHT8odbXTkndysNHCcx0awwzvfOlguIAii9o8iA= cloud.google.com/go/pubsub v1.3.1/go.mod h1:i+ucay31+CNRpDW4Lu78I4xXG+O1r/MAHgjpRVR+TSU= cloud.google.com/go/pubsub v1.26.0/go.mod h1:QgBH3U/jdJy/ftjPhTkyXNj543Tin1pRYcdcPRnFIRI= cloud.google.com/go/pubsub v1.27.1/go.mod h1:hQN39ymbV9geqBnfQq6Xf63yNhUAhv9CZhzp5O6qsW0= cloud.google.com/go/pubsub v1.28.0/go.mod h1:vuXFpwaVoIPQMGXqRyUQigu/AX1S3IWugR9xznmcXX8= cloud.google.com/go/pubsub v1.30.0/go.mod h1:qWi1OPS0B+b5L+Sg6Gmc9zD1Y+HaM0MdUr7LsupY1P4= cloud.google.com/go/pubsublite v1.5.0/go.mod h1:xapqNQ1CuLfGi23Yda/9l4bBCKz/wC3KIJ5gKcxveZg= cloud.google.com/go/pubsublite v1.6.0/go.mod h1:1eFCS0U11xlOuMFV/0iBqw3zP12kddMeCbj/F3FSj9k= cloud.google.com/go/pubsublite v1.7.0/go.mod h1:8hVMwRXfDfvGm3fahVbtDbiLePT3gpoiJYJY+vxWxVM= cloud.google.com/go/recaptchaenterprise v1.3.1/go.mod h1:OdD+q+y4XGeAlxRaMn1Y7/GveP6zmq76byL6tjPE7d4= cloud.google.com/go/recaptchaenterprise/v2 v2.1.0/go.mod h1:w9yVqajwroDNTfGuhmOjPDN//rZGySaf6PtFVcSCa7o= cloud.google.com/go/recaptchaenterprise/v2 v2.2.0/go.mod h1:/Zu5jisWGeERrd5HnlS3EUGb/D335f9k51B/FVil0jk= cloud.google.com/go/recaptchaenterprise/v2 v2.3.0/go.mod h1:O9LwGCjrhGHBQET5CA7dd5NwwNQUErSgEDit1DLNTdo= cloud.google.com/go/recaptchaenterprise/v2 v2.4.0/go.mod h1:Am3LHfOuBstrLrNCBrlI5sbwx9LBg3te2N6hGvHn2mE= cloud.google.com/go/recaptchaenterprise/v2 v2.5.0/go.mod h1:O8LzcHXN3rz0j+LBC91jrwI3R+1ZSZEWrfL7XHgNo9U= cloud.google.com/go/recaptchaenterprise/v2 v2.6.0/go.mod h1:RPauz9jeLtB3JVzg6nCbe12qNoaa8pXc4d/YukAmcnA= cloud.google.com/go/recaptchaenterprise/v2 v2.7.0/go.mod h1:19wVj/fs5RtYtynAPJdDTb69oW0vNHYDBTbB4NvMD9c= cloud.google.com/go/recommendationengine v0.5.0/go.mod h1:E5756pJcVFeVgaQv3WNpImkFP8a+RptV6dDLGPILjvg= cloud.google.com/go/recommendationengine v0.6.0/go.mod h1:08mq2umu9oIqc7tDy8sx+MNJdLG0fUi3vaSVbztHgJ4= cloud.google.com/go/recommendationengine v0.7.0/go.mod h1:1reUcE3GIu6MeBz/h5xZJqNLuuVjNg1lmWMPyjatzac= cloud.google.com/go/recommender v1.5.0/go.mod h1:jdoeiBIVrJe9gQjwd759ecLJbxCDED4A6p+mqoqDvTg= cloud.google.com/go/recommender v1.6.0/go.mod h1:+yETpm25mcoiECKh9DEScGzIRyDKpZ0cEhWGo+8bo+c= cloud.google.com/go/recommender v1.7.0/go.mod h1:XLHs/W+T8olwlGOgfQenXBTbIseGclClff6lhFVe9Bs= cloud.google.com/go/recommender v1.8.0/go.mod h1:PkjXrTT05BFKwxaUxQmtIlrtj0kph108r02ZZQ5FE70= cloud.google.com/go/recommender v1.9.0/go.mod h1:PnSsnZY7q+VL1uax2JWkt/UegHssxjUVVCrX52CuEmQ= cloud.google.com/go/redis v1.7.0/go.mod h1:V3x5Jq1jzUcg+UNsRvdmsfuFnit1cfe3Z/PGyq/lm4Y= cloud.google.com/go/redis v1.8.0/go.mod h1:Fm2szCDavWzBk2cDKxrkmWBqoCiL1+Ctwq7EyqBCA/A= cloud.google.com/go/redis v1.9.0/go.mod h1:HMYQuajvb2D0LvMgZmLDZW8V5aOC/WxstZHiy4g8OiA= cloud.google.com/go/redis v1.10.0/go.mod h1:ThJf3mMBQtW18JzGgh41/Wld6vnDDc/F/F35UolRZPM= cloud.google.com/go/redis v1.11.0/go.mod h1:/X6eicana+BWcUda5PpwZC48o37SiFVTFSs0fWAJ7uQ= cloud.google.com/go/resourcemanager v1.3.0/go.mod h1:bAtrTjZQFJkiWTPDb1WBjzvc6/kifjj4QBYuKCCoqKA= cloud.google.com/go/resourcemanager v1.4.0/go.mod h1:MwxuzkumyTX7/a3n37gmsT3py7LIXwrShilPh3P1tR0= cloud.google.com/go/resourcemanager v1.5.0/go.mod h1:eQoXNAiAvCf5PXxWxXjhKQoTMaUSNrEfg+6qdf/wots= cloud.google.com/go/resourcemanager v1.6.0/go.mod h1:YcpXGRs8fDzcUl1Xw8uOVmI8JEadvhRIkoXXUNVYcVo= cloud.google.com/go/resourcemanager v1.7.0/go.mod h1:HlD3m6+bwhzj9XCouqmeiGuni95NTrExfhoSrkC/3EI= cloud.google.com/go/resourcesettings v1.3.0/go.mod h1:lzew8VfESA5DQ8gdlHwMrqZs1S9V87v3oCnKCWoOuQU= cloud.google.com/go/resourcesettings v1.4.0/go.mod h1:ldiH9IJpcrlC3VSuCGvjR5of/ezRrOxFtpJoJo5SmXg= cloud.google.com/go/resourcesettings v1.5.0/go.mod h1:+xJF7QSG6undsQDfsCJyqWXyBwUoJLhetkRMDRnIoXA= cloud.google.com/go/retail v1.8.0/go.mod h1:QblKS8waDmNUhghY2TI9O3JLlFk8jybHeV4BF19FrE4= cloud.google.com/go/retail v1.9.0/go.mod h1:g6jb6mKuCS1QKnH/dpu7isX253absFl6iE92nHwlBUY= cloud.google.com/go/retail v1.10.0/go.mod h1:2gDk9HsL4HMS4oZwz6daui2/jmKvqShXKQuB2RZ+cCc= cloud.google.com/go/retail v1.11.0/go.mod h1:MBLk1NaWPmh6iVFSz9MeKG/Psyd7TAgm6y/9L2B4x9Y= cloud.google.com/go/retail v1.12.0/go.mod h1:UMkelN/0Z8XvKymXFbD4EhFJlYKRx1FGhQkVPU5kF14= cloud.google.com/go/run v0.2.0/go.mod h1:CNtKsTA1sDcnqqIFR3Pb5Tq0usWxJJvsWOCPldRU3Do= cloud.google.com/go/run v0.3.0/go.mod h1:TuyY1+taHxTjrD0ZFk2iAR+xyOXEA0ztb7U3UNA0zBo= cloud.google.com/go/run v0.8.0/go.mod h1:VniEnuBwqjigv0A7ONfQUaEItaiCRVujlMqerPPiktM= cloud.google.com/go/run v0.9.0/go.mod h1:Wwu+/vvg8Y+JUApMwEDfVfhetv30hCG4ZwDR/IXl2Qg= cloud.google.com/go/scheduler v1.4.0/go.mod h1:drcJBmxF3aqZJRhmkHQ9b3uSSpQoltBPGPxGAWROx6s= cloud.google.com/go/scheduler v1.5.0/go.mod h1:ri073ym49NW3AfT6DZi21vLZrG07GXr5p3H1KxN5QlI= cloud.google.com/go/scheduler v1.6.0/go.mod h1:SgeKVM7MIwPn3BqtcBntpLyrIJftQISRrYB5ZtT+KOk= cloud.google.com/go/scheduler v1.7.0/go.mod h1:jyCiBqWW956uBjjPMMuX09n3x37mtyPJegEWKxRsn44= cloud.google.com/go/scheduler v1.8.0/go.mod h1:TCET+Y5Gp1YgHT8py4nlg2Sew8nUHMqcpousDgXJVQc= cloud.google.com/go/scheduler v1.9.0/go.mod h1:yexg5t+KSmqu+njTIh3b7oYPheFtBWGcbVUYF1GGMIc= cloud.google.com/go/secretmanager v1.6.0/go.mod h1:awVa/OXF6IiyaU1wQ34inzQNc4ISIDIrId8qE5QGgKA= cloud.google.com/go/secretmanager v1.8.0/go.mod h1:hnVgi/bN5MYHd3Gt0SPuTPPp5ENina1/LxM+2W9U9J4= cloud.google.com/go/secretmanager v1.9.0/go.mod h1:b71qH2l1yHmWQHt9LC80akm86mX8AL6X1MA01dW8ht4= cloud.google.com/go/secretmanager v1.10.0/go.mod h1:MfnrdvKMPNra9aZtQFvBcvRU54hbPD8/HayQdlUgJpU= cloud.google.com/go/security v1.5.0/go.mod h1:lgxGdyOKKjHL4YG3/YwIL2zLqMFCKs0UbQwgyZmfJl4= cloud.google.com/go/security v1.7.0/go.mod h1:mZklORHl6Bg7CNnnjLH//0UlAlaXqiG7Lb9PsPXLfD0= cloud.google.com/go/security v1.8.0/go.mod h1:hAQOwgmaHhztFhiQ41CjDODdWP0+AE1B3sX4OFlq+GU= cloud.google.com/go/security v1.9.0/go.mod h1:6Ta1bO8LXI89nZnmnsZGp9lVoVWXqsVbIq/t9dzI+2Q= cloud.google.com/go/security v1.10.0/go.mod h1:QtOMZByJVlibUT2h9afNDWRZ1G96gVywH8T5GUSb9IA= cloud.google.com/go/security v1.12.0/go.mod h1:rV6EhrpbNHrrxqlvW0BWAIawFWq3X90SduMJdFwtLB8= cloud.google.com/go/security v1.13.0/go.mod h1:Q1Nvxl1PAgmeW0y3HTt54JYIvUdtcpYKVfIB8AOMZ+0= cloud.google.com/go/securitycenter v1.13.0/go.mod h1:cv5qNAqjY84FCN6Y9z28WlkKXyWsgLO832YiWwkCWcU= cloud.google.com/go/securitycenter v1.14.0/go.mod h1:gZLAhtyKv85n52XYWt6RmeBdydyxfPeTrpToDPw4Auc= cloud.google.com/go/securitycenter v1.15.0/go.mod h1:PeKJ0t8MoFmmXLXWm41JidyzI3PJjd8sXWaVqg43WWk= cloud.google.com/go/securitycenter v1.16.0/go.mod h1:Q9GMaLQFUD+5ZTabrbujNWLtSLZIZF7SAR0wWECrjdk= cloud.google.com/go/securitycenter v1.18.1/go.mod h1:0/25gAzCM/9OL9vVx4ChPeM/+DlfGQJDwBy/UC8AKK0= cloud.google.com/go/securitycenter v1.19.0/go.mod h1:LVLmSg8ZkkyaNy4u7HCIshAngSQ8EcIRREP3xBnyfag= cloud.google.com/go/servicecontrol v1.4.0/go.mod h1:o0hUSJ1TXJAmi/7fLJAedOovnujSEvjKCAFNXPQ1RaU= cloud.google.com/go/servicecontrol v1.5.0/go.mod h1:qM0CnXHhyqKVuiZnGKrIurvVImCs8gmqWsDoqe9sU1s= cloud.google.com/go/servicecontrol v1.10.0/go.mod h1:pQvyvSRh7YzUF2efw7H87V92mxU8FnFDawMClGCNuAA= cloud.google.com/go/servicecontrol v1.11.0/go.mod h1:kFmTzYzTUIuZs0ycVqRHNaNhgR+UMUpw9n02l/pY+mc= cloud.google.com/go/servicecontrol v1.11.1/go.mod h1:aSnNNlwEFBY+PWGQ2DoM0JJ/QUXqV5/ZD9DOLB7SnUk= cloud.google.com/go/servicedirectory v1.4.0/go.mod h1:gH1MUaZCgtP7qQiI+F+A+OpeKF/HQWgtAddhTbhL2bs= cloud.google.com/go/servicedirectory v1.5.0/go.mod h1:QMKFL0NUySbpZJ1UZs3oFAmdvVxhhxB6eJ/Vlp73dfg= cloud.google.com/go/servicedirectory v1.6.0/go.mod h1:pUlbnWsLH9c13yGkxCmfumWEPjsRs1RlmJ4pqiNjVL4= cloud.google.com/go/servicedirectory v1.7.0/go.mod h1:5p/U5oyvgYGYejufvxhgwjL8UVXjkuw7q5XcG10wx1U= cloud.google.com/go/servicedirectory v1.8.0/go.mod h1:srXodfhY1GFIPvltunswqXpVxFPpZjf8nkKQT7XcXaY= cloud.google.com/go/servicedirectory v1.9.0/go.mod h1:29je5JjiygNYlmsGz8k6o+OZ8vd4f//bQLtvzkPPT/s= cloud.google.com/go/servicemanagement v1.4.0/go.mod h1:d8t8MDbezI7Z2R1O/wu8oTggo3BI2GKYbdG4y/SJTco= cloud.google.com/go/servicemanagement v1.5.0/go.mod h1:XGaCRe57kfqu4+lRxaFEAuqmjzF0r+gWHjWqKqBvKFo= cloud.google.com/go/servicemanagement v1.6.0/go.mod h1:aWns7EeeCOtGEX4OvZUWCCJONRZeFKiptqKf1D0l/Jc= cloud.google.com/go/servicemanagement v1.8.0/go.mod h1:MSS2TDlIEQD/fzsSGfCdJItQveu9NXnUniTrq/L8LK4= cloud.google.com/go/serviceusage v1.3.0/go.mod h1:Hya1cozXM4SeSKTAgGXgj97GlqUvF5JaoXacR1JTP/E= cloud.google.com/go/serviceusage v1.4.0/go.mod h1:SB4yxXSaYVuUBYUml6qklyONXNLt83U0Rb+CXyhjEeU= cloud.google.com/go/serviceusage v1.5.0/go.mod h1:w8U1JvqUqwJNPEOTQjrMHkw3IaIFLoLsPLvsE3xueec= cloud.google.com/go/serviceusage v1.6.0/go.mod h1:R5wwQcbOWsyuOfbP9tGdAnCAc6B9DRwPG1xtWMDeuPA= cloud.google.com/go/shell v1.3.0/go.mod h1:VZ9HmRjZBsjLGXusm7K5Q5lzzByZmJHf1d0IWHEN5X4= cloud.google.com/go/shell v1.4.0/go.mod h1:HDxPzZf3GkDdhExzD/gs8Grqk+dmYcEjGShZgYa9URw= cloud.google.com/go/shell v1.6.0/go.mod h1:oHO8QACS90luWgxP3N9iZVuEiSF84zNyLytb+qE2f9A= cloud.google.com/go/spanner v1.41.0/go.mod h1:MLYDBJR/dY4Wt7ZaMIQ7rXOTLjYrmxLE/5ve9vFfWos= cloud.google.com/go/spanner v1.44.0/go.mod h1:G8XIgYdOK+Fbcpbs7p2fiprDw4CaZX63whnSMLVBxjk= cloud.google.com/go/spanner v1.45.0/go.mod h1:FIws5LowYz8YAE1J8fOS7DJup8ff7xJeetWEo5REA2M= cloud.google.com/go/spanner v1.76.1 h1:vYbVZuXfnFwvNcvH3lhI2PeUA+kHyqKmLC7mJWaC4Ok= cloud.google.com/go/spanner v1.76.1/go.mod h1:YtwoE+zObKY7+ZeDCBtZ2ukM+1/iPaMfUM+KnTh/sx0= cloud.google.com/go/speech v1.6.0/go.mod h1:79tcr4FHCimOp56lwC01xnt/WPJZc4v3gzyT7FoBkCM= cloud.google.com/go/speech v1.7.0/go.mod h1:KptqL+BAQIhMsj1kOP2la5DSEEerPDuOP/2mmkhHhZQ= cloud.google.com/go/speech v1.8.0/go.mod h1:9bYIl1/tjsAnMgKGHKmBZzXKEkGgtU+MpdDPTE9f7y0= cloud.google.com/go/speech v1.9.0/go.mod h1:xQ0jTcmnRFFM2RfX/U+rk6FQNUF6DQlydUSyoooSpco= cloud.google.com/go/speech v1.14.1/go.mod h1:gEosVRPJ9waG7zqqnsHpYTOoAS4KouMRLDFMekpJ0J0= cloud.google.com/go/speech v1.15.0/go.mod h1:y6oH7GhqCaZANH7+Oe0BhgIogsNInLlz542tg3VqeYI= cloud.google.com/go/storage v1.0.0/go.mod h1:IhtSnM/ZTZV8YYJWCY8RULGVqBDmpoyjwiyrjsg+URw= cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0ZeosJ0Rtdos= cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohlUTyfDhBk= cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs= cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0= cloud.google.com/go/storage v1.14.0/go.mod h1:GrKmX003DSIwi9o29oFT7YDnHYwZoctc3fOKtUw0Xmo= cloud.google.com/go/storage v1.22.1/go.mod h1:S8N1cAStu7BOeFfE8KAQzmyyLkK8p/vmRq6kuBTW58Y= cloud.google.com/go/storage v1.23.0/go.mod h1:vOEEDNFnciUMhBeT6hsJIn3ieU5cFRmzeLgDvXzfIXc= cloud.google.com/go/storage v1.27.0/go.mod h1:x9DOL8TK/ygDUMieqwfhdpQryTeEkhGKMi80i/iqR2s= cloud.google.com/go/storage v1.28.1/go.mod h1:Qnisd4CqDdo6BGs2AD5LLnEsmSQ80wQ5ogcBBKhU86Y= cloud.google.com/go/storage v1.29.0/go.mod h1:4puEjyTKnku6gfKoTfNOU/W+a9JyuVNxjpS5GBrB8h4= cloud.google.com/go/storage v1.50.0 h1:3TbVkzTooBvnZsk7WaAQfOsNrdoM8QHusXA1cpk6QJs= cloud.google.com/go/storage v1.50.0/go.mod h1:l7XeiD//vx5lfqE3RavfmU9yvk5Pp0Zhcv482poyafY= cloud.google.com/go/storagetransfer v1.5.0/go.mod h1:dxNzUopWy7RQevYFHewchb29POFv3/AaBgnhqzqiK0w= cloud.google.com/go/storagetransfer v1.6.0/go.mod h1:y77xm4CQV/ZhFZH75PLEXY0ROiS7Gh6pSKrM8dJyg6I= cloud.google.com/go/storagetransfer v1.7.0/go.mod h1:8Giuj1QNb1kfLAiWM1bN6dHzfdlDAVC9rv9abHot2W4= cloud.google.com/go/storagetransfer v1.8.0/go.mod h1:JpegsHHU1eXg7lMHkvf+KE5XDJ7EQu0GwNJbbVGanEw= cloud.google.com/go/talent v1.1.0/go.mod h1:Vl4pt9jiHKvOgF9KoZo6Kob9oV4lwd/ZD5Cto54zDRw= cloud.google.com/go/talent v1.2.0/go.mod h1:MoNF9bhFQbiJ6eFD3uSsg0uBALw4n4gaCaEjBw9zo8g= cloud.google.com/go/talent v1.3.0/go.mod h1:CmcxwJ/PKfRgd1pBjQgU6W3YBwiewmUzQYH5HHmSCmM= cloud.google.com/go/talent v1.4.0/go.mod h1:ezFtAgVuRf8jRsvyE6EwmbTK5LKciD4KVnHuDEFmOOA= cloud.google.com/go/talent v1.5.0/go.mod h1:G+ODMj9bsasAEJkQSzO2uHQWXHHXUomArjWQQYkqK6c= cloud.google.com/go/texttospeech v1.4.0/go.mod h1:FX8HQHA6sEpJ7rCMSfXuzBcysDAuWusNNNvN9FELDd8= cloud.google.com/go/texttospeech v1.5.0/go.mod h1:oKPLhR4n4ZdQqWKURdwxMy0uiTS1xU161C8W57Wkea4= cloud.google.com/go/texttospeech v1.6.0/go.mod h1:YmwmFT8pj1aBblQOI3TfKmwibnsfvhIBzPXcW4EBovc= cloud.google.com/go/tpu v1.3.0/go.mod h1:aJIManG0o20tfDQlRIej44FcwGGl/cD0oiRyMKG19IQ= cloud.google.com/go/tpu v1.4.0/go.mod h1:mjZaX8p0VBgllCzF6wcU2ovUXN9TONFLd7iz227X2Xg= cloud.google.com/go/tpu v1.5.0/go.mod h1:8zVo1rYDFuW2l4yZVY0R0fb/v44xLh3llq7RuV61fPM= cloud.google.com/go/trace v1.3.0/go.mod h1:FFUE83d9Ca57C+K8rDl/Ih8LwOzWIV1krKgxg6N0G28= cloud.google.com/go/trace v1.4.0/go.mod h1:UG0v8UBqzusp+z63o7FK74SdFE+AXpCLdFb1rshXG+Y= cloud.google.com/go/trace v1.8.0/go.mod h1:zH7vcsbAhklH8hWFig58HvxcxyQbaIqMarMg9hn5ECA= cloud.google.com/go/trace v1.9.0/go.mod h1:lOQqpE5IaWY0Ixg7/r2SjixMuc6lfTFeO4QGM4dQWOk= cloud.google.com/go/translate v1.3.0/go.mod h1:gzMUwRjvOqj5i69y/LYLd8RrNQk+hOmIXTi9+nb3Djs= cloud.google.com/go/translate v1.4.0/go.mod h1:06Dn/ppvLD6WvA5Rhdp029IX2Mi3Mn7fpMRLPvXT5Wg= cloud.google.com/go/translate v1.5.0/go.mod h1:29YDSYveqqpA1CQFD7NQuP49xymq17RXNaUDdc0mNu0= cloud.google.com/go/translate v1.6.0/go.mod h1:lMGRudH1pu7I3n3PETiOB2507gf3HnfLV8qlkHZEyos= cloud.google.com/go/translate v1.7.0/go.mod h1:lMGRudH1pu7I3n3PETiOB2507gf3HnfLV8qlkHZEyos= cloud.google.com/go/video v1.8.0/go.mod h1:sTzKFc0bUSByE8Yoh8X0mn8bMymItVGPfTuUBUyRgxk= cloud.google.com/go/video v1.9.0/go.mod h1:0RhNKFRF5v92f8dQt0yhaHrEuH95m068JYOvLZYnJSw= cloud.google.com/go/video v1.12.0/go.mod h1:MLQew95eTuaNDEGriQdcYn0dTwf9oWiA4uYebxM5kdg= cloud.google.com/go/video v1.13.0/go.mod h1:ulzkYlYgCp15N2AokzKjy7MQ9ejuynOJdf1tR5lGthk= cloud.google.com/go/video v1.14.0/go.mod h1:SkgaXwT+lIIAKqWAJfktHT/RbgjSuY6DobxEp0C5yTQ= cloud.google.com/go/video v1.15.0/go.mod h1:SkgaXwT+lIIAKqWAJfktHT/RbgjSuY6DobxEp0C5yTQ= cloud.google.com/go/videointelligence v1.6.0/go.mod h1:w0DIDlVRKtwPCn/C4iwZIJdvC69yInhW0cfi+p546uU= cloud.google.com/go/videointelligence v1.7.0/go.mod h1:k8pI/1wAhjznARtVT9U1llUaFNPh7muw8QyOUpavru4= cloud.google.com/go/videointelligence v1.8.0/go.mod h1:dIcCn4gVDdS7yte/w+koiXn5dWVplOZkE+xwG9FgK+M= cloud.google.com/go/videointelligence v1.9.0/go.mod h1:29lVRMPDYHikk3v8EdPSaL8Ku+eMzDljjuvRs105XoU= cloud.google.com/go/videointelligence v1.10.0/go.mod h1:LHZngX1liVtUhZvi2uNS0VQuOzNi2TkY1OakiuoUOjU= cloud.google.com/go/vision v1.2.0/go.mod h1:SmNwgObm5DpFBme2xpyOyasvBc1aPdjvMk2bBk0tKD0= cloud.google.com/go/vision/v2 v2.2.0/go.mod h1:uCdV4PpN1S0jyCyq8sIM42v2Y6zOLkZs+4R9LrGYwFo= cloud.google.com/go/vision/v2 v2.3.0/go.mod h1:UO61abBx9QRMFkNBbf1D8B1LXdS2cGiiCRx0vSpZoUo= cloud.google.com/go/vision/v2 v2.4.0/go.mod h1:VtI579ll9RpVTrdKdkMzckdnwMyX2JILb+MhPqRbPsY= cloud.google.com/go/vision/v2 v2.5.0/go.mod h1:MmaezXOOE+IWa+cS7OhRRLK2cNv1ZL98zhqFFZaaH2E= cloud.google.com/go/vision/v2 v2.6.0/go.mod h1:158Hes0MvOS9Z/bDMSFpjwsUrZ5fPrdwuyyvKSGAGMY= cloud.google.com/go/vision/v2 v2.7.0/go.mod h1:H89VysHy21avemp6xcf9b9JvZHVehWbET0uT/bcuY/0= cloud.google.com/go/vmmigration v1.2.0/go.mod h1:IRf0o7myyWFSmVR1ItrBSFLFD/rJkfDCUTO4vLlJvsE= cloud.google.com/go/vmmigration v1.3.0/go.mod h1:oGJ6ZgGPQOFdjHuocGcLqX4lc98YQ7Ygq8YQwHh9A7g= cloud.google.com/go/vmmigration v1.5.0/go.mod h1:E4YQ8q7/4W9gobHjQg4JJSgXXSgY21nA5r8swQV+Xxc= cloud.google.com/go/vmmigration v1.6.0/go.mod h1:bopQ/g4z+8qXzichC7GW1w2MjbErL54rk3/C843CjfY= cloud.google.com/go/vmwareengine v0.1.0/go.mod h1:RsdNEf/8UDvKllXhMz5J40XxDrNJNN4sagiox+OI208= cloud.google.com/go/vmwareengine v0.2.2/go.mod h1:sKdctNJxb3KLZkE/6Oui94iw/xs9PRNC2wnNLXsHvH8= cloud.google.com/go/vmwareengine v0.3.0/go.mod h1:wvoyMvNWdIzxMYSpH/R7y2h5h3WFkx6d+1TIsP39WGY= cloud.google.com/go/vpcaccess v1.4.0/go.mod h1:aQHVbTWDYUR1EbTApSVvMq1EnT57ppDmQzZ3imqIk4w= cloud.google.com/go/vpcaccess v1.5.0/go.mod h1:drmg4HLk9NkZpGfCmZ3Tz0Bwnm2+DKqViEpeEpOq0m8= cloud.google.com/go/vpcaccess v1.6.0/go.mod h1:wX2ILaNhe7TlVa4vC5xce1bCnqE3AeH27RV31lnmZes= cloud.google.com/go/webrisk v1.4.0/go.mod h1:Hn8X6Zr+ziE2aNd8SliSDWpEnSS1u4R9+xXZmFiHmGE= cloud.google.com/go/webrisk v1.5.0/go.mod h1:iPG6fr52Tv7sGk0H6qUFzmL3HHZev1htXuWDEEsqMTg= cloud.google.com/go/webrisk v1.6.0/go.mod h1:65sW9V9rOosnc9ZY7A7jsy1zoHS5W9IAXv6dGqhMQMc= cloud.google.com/go/webrisk v1.7.0/go.mod h1:mVMHgEYH0r337nmt1JyLthzMr6YxwN1aAIEc2fTcq7A= cloud.google.com/go/webrisk v1.8.0/go.mod h1:oJPDuamzHXgUc+b8SiHRcVInZQuybnvEW72PqTc7sSg= cloud.google.com/go/websecurityscanner v1.3.0/go.mod h1:uImdKm2wyeXQevQJXeh8Uun/Ym1VqworNDlBXQevGMo= cloud.google.com/go/websecurityscanner v1.4.0/go.mod h1:ebit/Fp0a+FWu5j4JOmJEV8S8CzdTkAS77oDsiSqYWQ= cloud.google.com/go/websecurityscanner v1.5.0/go.mod h1:Y6xdCPy81yi0SQnDY1xdNTNpfY1oAgXUlcfN3B3eSng= cloud.google.com/go/workflows v1.6.0/go.mod h1:6t9F5h/unJz41YqfBmqSASJSXccBLtD1Vwf+KmJENM0= cloud.google.com/go/workflows v1.7.0/go.mod h1:JhSrZuVZWuiDfKEFxU0/F1PQjmpnpcoISEXH2bcHC3M= cloud.google.com/go/workflows v1.8.0/go.mod h1:ysGhmEajwZxGn1OhGOGKsTXc5PyxOc0vfKf5Af+to4M= cloud.google.com/go/workflows v1.9.0/go.mod h1:ZGkj1aFIOd9c8Gerkjjq7OW7I5+l6cSvT3ujaO/WwSA= cloud.google.com/go/workflows v1.10.0/go.mod h1:fZ8LmRmZQWacon9UCX1r/g/DfAXx5VcPALq2CxzdePw= dario.cat/mergo v1.0.0 h1:AGCNq9Evsj31mOgNPcLyXc+4PNABt905YmuqPYYpBWk= dario.cat/mergo v1.0.0/go.mod h1:uNxQE+84aUszobStD9th8a29P2fMDhsBdgRYvZOxGmk= dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= filippo.io/edwards25519 v1.1.0 h1:FNf4tywRC1HmFuKW5xopWpigGjJKiJSV0Cqo0cJWDaA= filippo.io/edwards25519 v1.1.0/go.mod h1:BxyFTGdWcka3PhytdK4V28tE5sGfRvvvRV7EaN4VDT4= gioui.org v0.0.0-20210308172011-57750fc8a0a6/go.mod h1:RSH6KIUZ0p2xy5zHDxgAM4zumjgTw83q2ge/PI+yyw8= git.sr.ht/~sbinet/gg v0.3.1/go.mod h1:KGYtlADtqsqANL9ueOFkWymvzUvLMQllU5Ixo+8v3pc= github.com/99designs/go-keychain v0.0.0-20191008050251-8e49817e8af4 h1:/vQbFIOMbk2FiG/kXiLl8BRyzTWDw7gX/Hz7Dd5eDMs= github.com/99designs/go-keychain v0.0.0-20191008050251-8e49817e8af4/go.mod h1:hN7oaIRCjzsZ2dE+yG5k+rsdt3qcwykqK6HVGcKwsw4= github.com/99designs/keyring v1.2.2 h1:pZd3neh/EmUzWONb35LxQfvuY7kiSXAq3HQd97+XBn0= github.com/99designs/keyring v1.2.2/go.mod h1:wes/FrByc8j7lFOAGLGSNEg8f/PaI3cgTBqhFkHUrPk= github.com/AdaLogics/go-fuzz-headers v0.0.0-20230811130428-ced1acdcaa24 h1:bvDV9vkmnHYOMsOr4WLk+Vo07yKIzd94sVoIqshQ4bU= github.com/AdaLogics/go-fuzz-headers v0.0.0-20230811130428-ced1acdcaa24/go.mod h1:8o94RPi1/7XTJvwPpRSzSUedZrtlirdB3r9Z20bi2f8= github.com/Azure/azure-sdk-for-go/sdk/azcore v1.17.0 h1:g0EZJwz7xkXQiZAI5xi9f3WWFYBlX1CPTrR+NDToRkQ= github.com/Azure/azure-sdk-for-go/sdk/azcore v1.17.0/go.mod h1:XCW7KnZet0Opnr7HccfUw1PLc4CjHqpcaxW8DHklNkQ= github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.8.2 h1:F0gBpfdPLGsw+nsgk6aqqkZS1jiixa5WwFe3fk/T3Ys= github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.8.2/go.mod h1:SqINnQ9lVVdRlyC8cd1lCI0SdX4n2paeABd2K8ggfnE= github.com/Azure/azure-sdk-for-go/sdk/azidentity/cache v0.3.2 h1:yz1bePFlP5Vws5+8ez6T3HWXPmwOK7Yvq8QxDBD3SKY= github.com/Azure/azure-sdk-for-go/sdk/azidentity/cache v0.3.2/go.mod h1:Pa9ZNPuoNu/GztvBSKk9J1cDJW6vk/n0zLtV4mgd8N8= github.com/Azure/azure-sdk-for-go/sdk/internal v1.10.0 h1:ywEEhmNahHBihViHepv3xPBn1663uRv2t2q/ESv9seY= github.com/Azure/azure-sdk-for-go/sdk/internal v1.10.0/go.mod h1:iZDifYGJTIgIIkYRNWPENUnqx6bJ2xnSDFI2tjwZNuY= github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/storage/armstorage v1.6.0 h1:PiSrjRPpkQNjrM8H0WwKMnZUdu1RGMtd/LdGKUrOo+c= github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/storage/armstorage v1.6.0/go.mod h1:oDrbWx4ewMylP7xHivfgixbfGBT6APAwsSoHRKotnIc= github.com/Azure/azure-sdk-for-go/sdk/security/keyvault/azkeys v1.0.1 h1:MyVTgWR8qd/Jw1Le0NZebGBUCLbtak3bJ3z1OlqZBpw= github.com/Azure/azure-sdk-for-go/sdk/security/keyvault/azkeys v1.0.1/go.mod h1:GpPjLhVR9dnUoJMyHWSPy71xY9/lcmpzIPZXmF0FCVY= github.com/Azure/azure-sdk-for-go/sdk/security/keyvault/internal v1.0.0 h1:D3occbWoio4EBLkbkevetNMAVX197GkzbUMtqjGWn80= github.com/Azure/azure-sdk-for-go/sdk/security/keyvault/internal v1.0.0/go.mod h1:bTSOgj05NGRuHHhQwAdPnYr9TOdNmKlZTgGLL6nyAdI= github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.6.0 h1:UXT0o77lXQrikd1kgwIPQOUect7EoR/+sbP4wQKdzxM= github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.6.0/go.mod h1:cTvi54pg19DoT07ekoeMgE/taAwNtCShVeZqA+Iv2xI= github.com/Azure/go-ansiterm v0.0.0-20230124172434-306776ec8161 h1:L/gRVlceqvL25UVaW/CKtUDjefjrs0SPonmDGUVOYP0= github.com/Azure/go-ansiterm v0.0.0-20230124172434-306776ec8161/go.mod h1:xomTg63KZ2rFqZQzSB4Vz2SUXa1BpHTVz9L5PTmPC4E= github.com/AzureAD/microsoft-authentication-extensions-for-go/cache v0.1.1 h1:WJTmL004Abzc5wDB5VtZG2PJk5ndYDgVacGqfirKxjM= github.com/AzureAD/microsoft-authentication-extensions-for-go/cache v0.1.1/go.mod h1:tCcJZ0uHAmvjsVYzEFivsRTN00oz5BEsRgQHu5JZ9WE= github.com/AzureAD/microsoft-authentication-library-for-go v1.4.1 h1:8BKxhZZLX/WosEeoCvWysmKUscfa9v8LIPEEU0JjE2o= github.com/AzureAD/microsoft-authentication-library-for-go v1.4.1/go.mod h1:wP83P5OoQ5p6ip3ScPr0BAq0BvuPAvacpEuSzyouqAI= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= github.com/BurntSushi/toml v1.4.0 h1:kuoIxZQy2WRRk1pttg9asf+WVv6tWQuBNVmK8+nqPr0= github.com/BurntSushi/toml v1.4.0/go.mod h1:ukJfTF/6rtPPRCnwkur4qwRxa8vTRFBF0uk2lLoLwho= github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= github.com/ClickHouse/ch-go v0.65.1 h1:SLuxmLl5Mjj44/XbINsK2HFvzqup0s6rwKLFH347ZhU= github.com/ClickHouse/ch-go v0.65.1/go.mod h1:bsodgURwmrkvkBe5jw1qnGDgyITsYErfONKAHn05nv4= github.com/ClickHouse/clickhouse-go/v2 v2.32.2 h1:Y8fAXt0CpLhqNXMLlSddg+cMfAr7zHBWqXLpih6ozCY= github.com/ClickHouse/clickhouse-go/v2 v2.32.2/go.mod h1:/vE8N/+9pozLkIiTMWbNUGviccDv/czEGS1KACvpXIk= github.com/DATA-DOG/go-sqlmock v1.4.1/go.mod h1:f/Ixk793poVmq4qj/V1dPUg2JEAKC73Q5eFN3EC/SaM= github.com/DATA-DOG/go-sqlmock v1.5.2 h1:OcvFkGmslmlZibjAjaHm3L//6LiuBgolP7OputlJIzU= github.com/DATA-DOG/go-sqlmock v1.5.2/go.mod h1:88MAG/4G7SMwSE3CeA0ZKzrT5CiOU3OJ+JlNzwDqpNU= github.com/DataDog/zstd v1.5.6 h1:LbEglqepa/ipmmQJUDnSsfvA8e8IStVcGaFWDuxvGOY= github.com/DataDog/zstd v1.5.6/go.mod h1:g4AWEaM3yOg3HYfnJ3YIawPnVdXJh9QME85blwSAmyw= github.com/GoogleCloudPlatform/grpc-gcp-go/grpcgcp v1.5.2 h1:DBjmt6/otSdULyJdVg2BlG0qGZO5tKL4VzOs0jpvw5Q= github.com/GoogleCloudPlatform/grpc-gcp-go/grpcgcp v1.5.2/go.mod h1:dppbR7CwXD4pgtV9t3wD1812RaLDcBjtblcDF5f1vI0= github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.27.0 h1:ErKg/3iS1AKcTkf3yixlZ54f9U1rljCkQyEXWUnIUxc= github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.27.0/go.mod h1:yAZHSGnqScoU556rBOVkwLze6WP5N+U11RHuWaGVxwY= github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/metric v0.49.0 h1:o90wcURuxekmXrtxmYWTyNla0+ZEHhud6DI1ZTxd1vI= github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/metric v0.49.0/go.mod h1:6fTWu4m3jocfUZLYF5KsZC1TUfRvEjs7lM4crme/irw= github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.49.0 h1:GYUJLfvd++4DMuMhCFLgLXvFwofIxh/qOwoGuS/LTew= github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.49.0/go.mod h1:wRbFgBQUVm1YXrvWKofAEmq9HNJTDphbAaJSSX01KUI= github.com/IBM/nzgo/v12 v12.0.9 h1:SwzYFU5ooXsTZsQhU6OsbUhs/fQyLvCtlJYSEZ58mN0= github.com/IBM/nzgo/v12 v12.0.9/go.mod h1:4pvfEkfsrAdqlljsp8HNwv/uzNKy2fzoXBB1aRIssJg= github.com/JohnCGriffin/overflow v0.0.0-20211019200055-46fa312c352c h1:RGWPOewvKIROun94nF7v2cua9qP+thov/7M50KEoeSU= github.com/JohnCGriffin/overflow v0.0.0-20211019200055-46fa312c352c/go.mod h1:X0CRv0ky0k6m906ixxpzmDRLvX58TFUKS2eePweuyxk= github.com/Masterminds/semver v1.4.2/go.mod h1:MB6lktGJrhw8PrUyiEoblNEGEQ+RzHPF078ddwwvV3Y= github.com/Masterminds/semver v1.5.0 h1:H65muMkzWKEuNDnfl9d70GUjFniHKHRbFPGBuZ3QEww= github.com/Masterminds/semver v1.5.0/go.mod h1:MB6lktGJrhw8PrUyiEoblNEGEQ+RzHPF078ddwwvV3Y= github.com/MichaelS11/go-cql-driver v0.1.1 h1:ntFKov/39Tl36HckP4tzld3XMeyDYHHO00MiZNdoL1A= github.com/MichaelS11/go-cql-driver v0.1.1/go.mod h1:rMwGk5bMWiYI/If6r6dbqEfZG6nQLvqJHTplv5yTDaw= github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERoyfY= github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA8Ipt1oGCvU= github.com/Nvveen/Gotty v0.0.0-20120604004816-cd527374f1e5 h1:TngWCqHvy9oXAN6lEVMRuU21PR1EtLVZJmdB18Gu3Rw= github.com/Nvveen/Gotty v0.0.0-20120604004816-cd527374f1e5/go.mod h1:lmUJ/7eu/Q8D7ML55dXQrVaamCz2vxCfdQBasLZfHKk= github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU= github.com/SAP/go-hdb v1.13.3 h1:T7ArXTKNytDWms4+NvmtIt9HFiaz4hSjvBIikfgVZis= github.com/SAP/go-hdb v1.13.3/go.mod h1:ghSRSuu6n65+M6exMu/P7jwkt6HHnc/Wstrf6ai2Qas= github.com/UNO-SOFT/zlog v0.8.1 h1:TEFkGJHtUfTRgMkLZiAjLSHALjwSBdw6/zByMC5GJt4= github.com/UNO-SOFT/zlog v0.8.1/go.mod h1:yqFOjn3OhvJ4j7ArJqQNA+9V+u6t9zSAyIZdWdMweWc= github.com/VoltDB/voltdb-client-go v1.0.16 h1:lnemSbNt+ceZ8/S/NAuQHDRmK1aSSR/s8UK9apszfmA= github.com/VoltDB/voltdb-client-go v1.0.16/go.mod h1:mMhb5zwkT46Ef3NvkFqt+kX0j+ltQ2Sdqj9+ICq+Yto= github.com/ahmetb/dlog v0.0.0-20170105205344-4fb5f8204f26 h1:3YVZUqkoev4mL+aCwVOSWV4M7pN+NURHL38Z2zq5JKA= github.com/ahmetb/dlog v0.0.0-20170105205344-4fb5f8204f26/go.mod h1:ymXt5bw5uSNu4jveerFxE0vNYxF8ncqbptntMaFMg3k= github.com/ajstarks/deck v0.0.0-20200831202436-30c9fc6549a9/go.mod h1:JynElWSGnm/4RlzPXRlREEwqTHAN3T56Bv2ITsFT3gY= github.com/ajstarks/deck/generate v0.0.0-20210309230005-c3f852c02e19/go.mod h1:T13YZdzov6OU0A1+RfKZiZN9ca6VeKdBdyDV+BY97Tk= github.com/ajstarks/svgo v0.0.0-20180226025133-644b8db467af/go.mod h1:K08gAheRH3/J6wwsYMMT4xOr94bZjxIelGM0+d/wbFw= github.com/ajstarks/svgo v0.0.0-20211024235047-1546f124cd8b/go.mod h1:1KcenG0jGWcpt8ov532z81sp/kMMUG485J2InIOyADM= github.com/alecthomas/assert/v2 v2.11.0 h1:2Q9r3ki8+JYXvGsDyBXwH3LcJ+WK5D0gc5E8vS6K3D0= github.com/alecthomas/assert/v2 v2.11.0/go.mod h1:Bze95FyfUr7x34QZrjL+XP+0qgp/zg8yS+TtBj1WA3k= github.com/alecthomas/chroma/v2 v2.15.0 h1:LxXTQHFoYrstG2nnV9y2X5O94sOBzf0CIUpSTbpxvMc= github.com/alecthomas/chroma/v2 v2.15.0/go.mod h1:gUhVLrPDXPtp/f+L1jo9xepo9gL4eLwRuGAunSZMkio= github.com/alecthomas/repr v0.4.0 h1:GhI2A8MACjfegCPVq9f1FLvIBS+DrQ2KQBFZP1iFzXc= github.com/alecthomas/repr v0.4.0/go.mod h1:Fr0507jx4eOXV7AlPV6AVZLYrLIuIeSOWtW57eE/O/4= github.com/alexbrainman/odbc v0.0.0-20250224181725-329517659778 h1:HRnGkGB6LhiN7hYrDqUasEXp7EiiY3N52mTqAHdRiDc= github.com/alexbrainman/odbc v0.0.0-20250224181725-329517659778/go.mod h1:c5eyz5amZqTKvY3ipqerFO/74a/8CYmXOahSr40c+Ww= github.com/aliyun/aliyun-tablestore-go-sdk v1.7.3/go.mod h1:PWqq46gZJf7mnYTAuTmxKgx6EwJu3oBpOs1s2V0EZPM= github.com/aliyun/aliyun-tablestore-go-sdk v1.7.17 h1:88DbDTaKw+M8NI1ok57p7peVS7pwkDqeJWX1x4IjqYc= github.com/aliyun/aliyun-tablestore-go-sdk v1.7.17/go.mod h1:JzOJMpBPGN+4cuYnrGO5wdwphEyqbeGVY2vCaiAcNW8= github.com/aliyun/aliyun-tablestore-go-sql-driver v0.0.0-20220418015234-4d337cb3eed9 h1:DpsLZRlqHH1b2QyoLDK1/MtUtm7zuiQweA6hsTY97do= github.com/aliyun/aliyun-tablestore-go-sql-driver v0.0.0-20220418015234-4d337cb3eed9/go.mod h1:4yTI9ZSYNi4eENMKL8VWP22MzoDKeqDT4j7Fd103BVQ= github.com/amsokol/ignite-go-client v0.12.2 h1:q4Mr+UUiKVnR7ykjR1YARVS5jp+ZU6ekCIs0V4WgFDo= github.com/amsokol/ignite-go-client v0.12.2/go.mod h1:K3tKJGcLQORFD+ds7f0f9fl88tv0KZcpfuNhzRyuLVE= github.com/andybalholm/brotli v1.0.4/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHGRSepvi9Eig= github.com/andybalholm/brotli v1.1.1 h1:PR2pgnyFznKEugtsUo0xLdDop5SKXd5Qf5ysW+7XdTA= github.com/andybalholm/brotli v1.1.1/go.mod h1:05ib4cKhjx3OQYUY22hTVd34Bc8upXjOLL2rKwwZBoA= github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY= github.com/apache/arrow-go/v18 v18.1.0 h1:agLwJUiVuwXZdwPYVrlITfx7bndULJ/dggbnLFgDp/Y= github.com/apache/arrow-go/v18 v18.1.0/go.mod h1:tigU/sIgKNXaesf5d7Y95jBBKS5KsxTqYBKXFsvKzo0= github.com/apache/arrow/go/v10 v10.0.1/go.mod h1:YvhnlEePVnBS4+0z3fhPfUy7W1Ikj0Ih0vcRo/gZ1M0= github.com/apache/arrow/go/v11 v11.0.0/go.mod h1:Eg5OsL5H+e299f7u5ssuXsuHQVEGC4xei5aX110hRiI= github.com/apache/arrow/go/v12 v12.0.1 h1:JsR2+hzYYjgSUkBSaahpqCetqZMr76djX80fF/DiJbg= github.com/apache/arrow/go/v12 v12.0.1/go.mod h1:weuTY7JvTG/HDPtMQxEUp7pU73vkLWMLpY67QwZ/WWw= github.com/apache/arrow/go/v15 v15.0.2 h1:60IliRbiyTWCWjERBCkO1W4Qun9svcYoZrSLcyOsMLE= github.com/apache/arrow/go/v15 v15.0.2/go.mod h1:DGXsR3ajT524njufqf95822i+KTh+yea1jass9YXgjA= github.com/apache/arrow/go/v16 v16.1.0 h1:dwgfOya6s03CzH9JrjCBx6bkVb4yPD4ma3haj9p7FXI= github.com/apache/arrow/go/v16 v16.1.0/go.mod h1:9wnc9mn6vEDTRIm4+27pEjQpRKuTvBaessPoEXQzxWA= github.com/apache/arrow/go/v17 v17.0.0 h1:RRR2bdqKcdbss9Gxy2NS/hK8i4LDMh23L6BbkN5+F54= github.com/apache/arrow/go/v17 v17.0.0/go.mod h1:jR7QHkODl15PfYyjM2nU+yTLScZ/qfj7OSUZmJ8putc= github.com/apache/calcite-avatica-go/v5 v5.3.0 h1:7Gooh7opt3TObRe7WstTWbQGaA16ERjzoGeB26l3s/w= github.com/apache/calcite-avatica-go/v5 v5.3.0/go.mod h1:xgozzeFAHCh2ZZ7NCrD4CHx9waunSMOMXLDZRj9Gn3s= github.com/apache/thrift v0.16.0/go.mod h1:PHK3hniurgQaNMZYaCLEqXKsYK8upmhPbmdP2FXSqgU= github.com/apache/thrift v0.21.0 h1:tdPmh/ptjE1IJnhbhrcl2++TauVjy242rkV/UzJChnE= github.com/apache/thrift v0.21.0/go.mod h1:W1H8aR/QRtYNvrPeFXBtobyRkd0/YVhTc6i07XIAgDw= github.com/avast/retry-go v3.0.0+incompatible h1:4SOWQ7Qs+oroOTQOYnAHqelpCO0biHSxpiH9JdtuBj0= github.com/avast/retry-go v3.0.0+incompatible/go.mod h1:XtSnn+n/sHqQIpZ10K1qAevBhOOCWBLXXy3hyiqqBrY= github.com/aws/aws-sdk-go v1.37.32/go.mod h1:hcU610XS61/+aQV88ixoOzUoG7v3b31pl2zKMmprdro= github.com/aws/aws-sdk-go v1.55.6 h1:cSg4pvZ3m8dgYcgqB97MrcdjUmZ1BeMYKUxMMB89IPk= github.com/aws/aws-sdk-go v1.55.6/go.mod h1:eRwEWoyTWFMVYVQzKMNHWP5/RV4xIUGMQfXQHfHkpNU= github.com/aws/aws-sdk-go-v2 v1.36.3 h1:mJoei2CxPutQVxaATCzDUjcZEjVRdpsiiXi2o38yqWM= github.com/aws/aws-sdk-go-v2 v1.36.3/go.mod h1:LLXuLpgzEbD766Z5ECcRmi8AzSwfZItDtmABVkRLGzg= github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.10 h1:zAybnyUQXIZ5mok5Jqwlf58/TFE7uvd3IAsa1aF9cXs= github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.10/go.mod h1:qqvMj6gHLR/EXWZw4ZbqlPbQUyenf4h82UQUlKc+l14= github.com/aws/aws-sdk-go-v2/config v1.29.8 h1:RpwAfYcV2lr/yRc4lWhUM9JRPQqKgKWmou3LV7UfWP4= github.com/aws/aws-sdk-go-v2/config v1.29.8/go.mod h1:t+G7Fq1OcO8cXTPPXzxQSnj/5Xzdc9jAAD3Xrn9/Mgo= github.com/aws/aws-sdk-go-v2/credentials v1.17.61 h1:Hd/uX6Wo2iUW1JWII+rmyCD7MMhOe7ALwQXN6sKDd1o= github.com/aws/aws-sdk-go-v2/credentials v1.17.61/go.mod h1:L7vaLkwHY1qgW0gG1zG0z/X0sQ5tpIY5iI13+j3qI80= github.com/aws/aws-sdk-go-v2/feature/dynamodb/attributevalue v1.18.6 h1:5MXQb+ASlUe0SgSmPt8V0l4EFRKLyr0krAnMqMvlAjQ= github.com/aws/aws-sdk-go-v2/feature/dynamodb/attributevalue v1.18.6/go.mod h1:V+IXONaymKaUpRMGVqdjaXhZwYFHAgFwxmJi6/132tE= github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.30 h1:x793wxmUWVDhshP8WW2mlnXuFrO4cOd3HLBroh1paFw= github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.30/go.mod h1:Jpne2tDnYiFascUEs2AWHJL9Yp7A5ZVy3TNyxaAjD6M= github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.17.64 h1:RTko0AQ0i1vWXDM97DkuW6zskgOxFxm4RqC0kmBJFkE= github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.17.64/go.mod h1:ty968MpOa5CoQ/ALWNB8Gmfoehof2nRHDR/DZDPfimE= github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.34 h1:ZK5jHhnrioRkUNOc+hOgQKlUL5JeC3S6JgLxtQ+Rm0Q= github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.34/go.mod h1:p4VfIceZokChbA9FzMbRGz5OV+lekcVtHlPKEO0gSZY= github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.34 h1:SZwFm17ZUNNg5Np0ioo/gq8Mn6u9w19Mri8DnJ15Jf0= github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.34/go.mod h1:dFZsC0BLo346mvKQLWmoJxT+Sjp+qcVR1tRVHQGOH9Q= github.com/aws/aws-sdk-go-v2/internal/ini v1.8.3 h1:bIqFDwgGXXN1Kpp99pDOdKMTTb5d2KyU5X/BZxjOkRo= github.com/aws/aws-sdk-go-v2/internal/ini v1.8.3/go.mod h1:H5O/EsxDWyU+LP/V8i5sm8cxoZgc2fdNR9bxlOFrQTo= github.com/aws/aws-sdk-go-v2/internal/v4a v1.3.34 h1:ZNTqv4nIdE/DiBfUUfXcLZ/Spcuz+RjeziUtNJackkM= github.com/aws/aws-sdk-go-v2/internal/v4a v1.3.34/go.mod h1:zf7Vcd1ViW7cPqYWEHLHJkS50X0JS2IKz9Cgaj6ugrs= github.com/aws/aws-sdk-go-v2/service/dynamodb v1.41.0 h1:kSMAk72LZ5eIdY/W+tVV6VdokciajcDdVClEBVNWNP0= github.com/aws/aws-sdk-go-v2/service/dynamodb v1.41.0/go.mod h1:yYaWRnVSPyAmexW5t7G3TcuYoalYfT+xQwzWsvtUQ7M= github.com/aws/aws-sdk-go-v2/service/dynamodbstreams v1.25.0 h1:iTFqGH+Eel+KPW0cFvsA6JVP9/86MEbENVz60dbHxIs= github.com/aws/aws-sdk-go-v2/service/dynamodbstreams v1.25.0/go.mod h1:lUqWdw5/esjPTkITXhN4C66o1ltwDq2qQ12j3SOzhVg= github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.12.3 h1:eAh2A4b5IzM/lum78bZ590jy36+d/aFLgKF/4Vd1xPE= github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.12.3/go.mod h1:0yKJC/kb8sAnmlYa6Zs3QVYqaC8ug2AbnNChv5Ox3uA= github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.6.2 h1:t/gZFyrijKuSU0elA5kRngP/oU3mc0I+Dvp8HwRE4c0= github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.6.2/go.mod h1:iu6FSzgt+M2/x3Dk8zhycdIcHjEFb36IS8HVUVFoMg0= github.com/aws/aws-sdk-go-v2/service/internal/endpoint-discovery v1.10.15 h1:M1R1rud7HzDrfCdlBQ7NjnRsDNEhXO/vGhuD189Ggmk= github.com/aws/aws-sdk-go-v2/service/internal/endpoint-discovery v1.10.15/go.mod h1:uvFKBSq9yMPV4LGAi7N4awn4tLY+hKE35f8THes2mzQ= github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.12.15 h1:dM9/92u2F1JbDaGooxTq18wmmFzbJRfXfVfy96/1CXM= github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.12.15/go.mod h1:SwFBy2vjtA0vZbjjaFtfN045boopadnoVPhu4Fv66vY= github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.18.15 h1:moLQUoVq91LiqT1nbvzDukyqAlCv89ZmwaHw/ZFlFZg= github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.18.15/go.mod h1:ZH34PJUc8ApjBIfgQCFvkWcUDBtl/WTD+uiYHjd8igA= github.com/aws/aws-sdk-go-v2/service/s3 v1.78.0 h1:EBm8lXevBWe+kK9VOU/IBeOI189WPRwPUc3LvJK9GOs= github.com/aws/aws-sdk-go-v2/service/s3 v1.78.0/go.mod h1:4qzsZSzB/KiX2EzDjs9D7A8rI/WGJxZceVJIHqtJjIU= github.com/aws/aws-sdk-go-v2/service/sso v1.25.0 h1:2U9sF8nKy7UgyEeLiZTRg6ShBS22z8UnYpV6aRFL0is= github.com/aws/aws-sdk-go-v2/service/sso v1.25.0/go.mod h1:qs4a9T5EMLl/Cajiw2TcbNt2UNo/Hqlyp+GiuG4CFDI= github.com/aws/aws-sdk-go-v2/service/ssooidc v1.29.0 h1:wjAdc85cXdQR5uLx5FwWvGIHm4OPJhTyzUHU8craXtE= github.com/aws/aws-sdk-go-v2/service/ssooidc v1.29.0/go.mod h1:MlYRNmYu/fGPoxBQVvBYr9nyr948aY/WLUvwBMBJubs= github.com/aws/aws-sdk-go-v2/service/sts v1.33.16 h1:BHEK2Q/7CMRMCb3nySi/w8UbIcPhKvYP5s1xf8/izn0= github.com/aws/aws-sdk-go-v2/service/sts v1.33.16/go.mod h1:cQnB8CUnxbMU82JvlqjKR2HBOm3fe9pWorWBza6MBJ4= github.com/aws/smithy-go v1.22.3 h1:Z//5NuZCSW6R4PhQ93hShNbyBbn8BWCmCVCt+Q8Io5k= github.com/aws/smithy-go v1.22.3/go.mod h1:t1ufH5HMublsJYulve2RKmHDC15xu1f26kHCp/HgceI= github.com/beltran/gohive v1.8.0 h1:Z6XqI4XHQeDCGkizAz2D7yqtUXPd0YRIpVJa7JLir9Y= github.com/beltran/gohive v1.8.0/go.mod h1:DnMBzukPbPMGi9a8Wm3eBbLfO9LGc7WEvqZLrdLVH0U= github.com/beltran/gosasl v1.0.0 h1:iiRtLxkvKhrNv3Ohh/n2NiyyfwIo/UbMzy/dZWiUHXE= github.com/beltran/gosasl v1.0.0/go.mod h1:Qx8cW6jkI8riyzmklj80kAIkv+iezFUTBiGU0qHhHes= github.com/beltran/gssapi v0.0.0-20200324152954-d86554db4bab h1:ayfcn60tXOSYy5zUN1AMSTQo4nJCf7hrdzAVchpPst4= github.com/beltran/gssapi v0.0.0-20200324152954-d86554db4bab/go.mod h1:GLe4UoSyvJ3cVG+DVtKen5eAiaD8mAJFuV5PT3Eeg9Q= github.com/benbjohnson/clock v1.1.0/go.mod h1:J11/hYXuz8f4ySSvYwY0FKfm+ezbsZBKZxNJlLklBHA= github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM= github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= github.com/bippio/go-impala v2.1.0+incompatible h1:H/N1Ms5KhVa2IoRZ6NO9ZzBryfTGhNiLG/zmNXS0CHY= github.com/bippio/go-impala v2.1.0+incompatible/go.mod h1:lcyV/9s/ri5lFj3zdyyneQwDso8/Fd62fELt05Wts8g= github.com/bitfield/gotestdox v0.2.2 h1:x6RcPAbBbErKLnapz1QeAlf3ospg8efBsedU93CDsnE= github.com/bitfield/gotestdox v0.2.2/go.mod h1:D+gwtS0urjBrzguAkTM2wodsTQYFHdpx8eqRJ3N+9pY= github.com/bitly/go-hostpool v0.0.0-20171023180738-a3a6125de932 h1:mXoPYz/Ul5HYEDvkta6I8/rnYM5gSdSV2tJ6XbZuEtY= github.com/bitly/go-hostpool v0.0.0-20171023180738-a3a6125de932/go.mod h1:NOuUCSz6Q9T7+igc/hlvDOUdtWKryOrtFyIVABv/p7k= github.com/bmizerany/assert v0.0.0-20160611221934-b7ed37b82869 h1:DDGfHa7BWjL4YnC6+E63dPcxHo2sUxDIu8g3QgEJdRY= github.com/bmizerany/assert v0.0.0-20160611221934-b7ed37b82869/go.mod h1:Ekp36dRnpXw/yCqJaO+ZrUyxD+3VXMFFr56k5XYrpB4= github.com/boombuler/barcode v1.0.0/go.mod h1:paBWMcWSl3LHKBqUq+rly7CNSldXjb2rDl3JlRe0mD8= github.com/boombuler/barcode v1.0.1/go.mod h1:paBWMcWSl3LHKBqUq+rly7CNSldXjb2rDl3JlRe0mD8= github.com/btnguyen2k/consu/checksum v1.1.1 h1:kdIJGk3yl83Nn1HxZRk3bXJM0xvlwTcTYUmZ8BiloPU= github.com/btnguyen2k/consu/checksum v1.1.1/go.mod h1:/zZ8EXdphDYEkBFua51hK9y3rODCPIkiZYnCDlHT670= github.com/btnguyen2k/consu/g18 v0.1.0 h1:IoS5w5QlOfkcrNOHJyICD6PgqLh+J5fIDqy3vRBVcVM= github.com/btnguyen2k/consu/g18 v0.1.0/go.mod h1:gTPcr87XdCLDISusRQyDey22/ZOw6bLh6EChxTLx6/c= github.com/btnguyen2k/consu/gjrc v0.2.2 h1:CAY8xPgvtWc7EMTE9gxam/BxMgTRRpc4Hs9QEyYxRUc= github.com/btnguyen2k/consu/gjrc v0.2.2/go.mod h1:Sc0NehbI0i8V6FAY9qX1we9XXbWNnrMOb9jNpYqGBWk= github.com/btnguyen2k/consu/olaf v0.1.3 h1:0dWWmN5nOB/9pJdo7o1S3wR2+l3kG7pXHv3Vwki8uNM= github.com/btnguyen2k/consu/olaf v0.1.3/go.mod h1:6ybEnJcdcK/PNiSfkKnMoxYuKyH2vJPBvHRuuZpPvD8= github.com/btnguyen2k/consu/reddo v0.1.7/go.mod h1:pdY5oIVX3noZIaZu3nvoKZ59+seXL/taXNGWh9xJDbg= github.com/btnguyen2k/consu/reddo v0.1.8/go.mod h1:pdY5oIVX3noZIaZu3nvoKZ59+seXL/taXNGWh9xJDbg= github.com/btnguyen2k/consu/reddo v0.1.9 h1:NZyEzRcDXzksNMnvZVZyJmGN6ZQQmHg4hIPCPbfsCBE= github.com/btnguyen2k/consu/reddo v0.1.9/go.mod h1:pdY5oIVX3noZIaZu3nvoKZ59+seXL/taXNGWh9xJDbg= github.com/btnguyen2k/consu/semita v0.1.5 h1:fu71xNJTbCV8T+6QPJdJu3bxtmLWvTjCepkvujF74+I= github.com/btnguyen2k/consu/semita v0.1.5/go.mod h1:fksCe3L4kxiJVnKKhUXKI8mcFdB9974mtedwUVVFu1M= github.com/btnguyen2k/consu/semver v0.2.1 h1:le0FzrM7u0IOR4MnOyBySHpZ/p3vV4JjofAhPB7edWE= github.com/btnguyen2k/consu/semver v0.2.1/go.mod h1:jxK/nwIWTXcWlcWcfkhPfLWq9b5dVzAtJLycySBFHTc= github.com/btnguyen2k/gocosmos v1.1.0 h1:16OIhDTAK6ChyjQMjG+yHEO/MGdO8UsCeJ/2xiY9eRE= github.com/btnguyen2k/gocosmos v1.1.0/go.mod h1:g599FZ7hAt6XZ108baotFrBr4U/r5xoyyQ8VyAZerL8= github.com/btnguyen2k/godynamo v1.3.0 h1:8Ri9gVWMvBWlD5P04AEVrl2QcmMQR7KgC3zSCl/YLWw= github.com/btnguyen2k/godynamo v1.3.0/go.mod h1:vNE48BoUAZS4F5ohrZ7suhw61DmCiXSClKrJfr2maTo= github.com/buger/jsonparser v1.1.1 h1:2PnMjfWD7wBILjqQbt530v576A/cAbQvEW9gGIpYMUs= github.com/buger/jsonparser v1.1.1/go.mod h1:6RYKKt7H4d4+iWqouImQ9R2FZql3VbhNgx27UK13J/0= github.com/cactus/go-statsd-client/statsd v0.0.0-20200423205355-cb0885a1018c/go.mod h1:l/bIBLeOl9eX+wxJAzxS4TveKRtAqlyDpHjhkfO0MEI= github.com/cenkalti/backoff v2.2.1+incompatible/go.mod h1:90ReRw6GdpyfrHakVjL/QHaoyV4aDUVVkXQJJJ3NXXM= github.com/cenkalti/backoff/v4 v4.3.0 h1:MyRJ/UdXutAwSAT+s3wNd7MfTIcy71VQueUuFK343L8= github.com/cenkalti/backoff/v4 v4.3.0/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE= github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= github.com/census-instrumentation/opencensus-proto v0.3.0/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= github.com/census-instrumentation/opencensus-proto v0.4.1/go.mod h1:4T9NM4+4Vw91VeyqjLS6ao50K5bOcLKN6Q42XnYaRYw= github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc= github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= github.com/cespare/xxhash/v2 v2.2.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs= github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= github.com/chaisql/chai v0.16.1-0.20240218103834-23e406360fd2 h1:vLtnPMqbTuTQcnfn5EDipZI0PISMSn7AMmz6HhAu+GU= github.com/chaisql/chai v0.16.1-0.20240218103834-23e406360fd2/go.mod h1:ix/NVvPO+dv6wsmln8R5H22yJb0iIj6KrGpouHaJmrE= github.com/chzyer/logex v1.1.10 h1:Swpa1K6QvQznwJRcfTfQJmTE72DqScAa40E+fbHEXEE= github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1 h1:q763qf9huN11kDQavWsoZXJNW3xEE4JJyHa5Q25/sd8= github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= github.com/cncf/udpa/go v0.0.0-20200629203442-efcf912fb354/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= github.com/cncf/udpa/go v0.0.0-20210930031921-04548b0d99d4/go.mod h1:6pvJx4me5XPnfI9Z40ddWsdw2W/uZgQLFXToKeRcDiI= github.com/cncf/udpa/go v0.0.0-20220112060539-c52dc94e7fbe/go.mod h1:6pvJx4me5XPnfI9Z40ddWsdw2W/uZgQLFXToKeRcDiI= github.com/cncf/xds/go v0.0.0-20210312221358-fbca930ec8ed/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/cncf/xds/go v0.0.0-20210805033703-aa0b78936158/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/cncf/xds/go v0.0.0-20210922020428-25de7278fc84/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/cncf/xds/go v0.0.0-20211001041855-01bcc9b48dfe/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/cncf/xds/go v0.0.0-20211011173535-cb28da3451f1/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/cncf/xds/go v0.0.0-20220314180256-7f1daf1720fc/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/cncf/xds/go v0.0.0-20230105202645-06c439db220b/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/cncf/xds/go v0.0.0-20230607035331-e9ce68804cb4/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/cncf/xds/go v0.0.0-20250121191232-2f005788dc42 h1:Om6kYQYDUk5wWbT0t0q6pvyM49i9XZAv9dDrkDA7gjk= github.com/cncf/xds/go v0.0.0-20250121191232-2f005788dc42/go.mod h1:W+zGtBO5Y1IgJhy4+A9GOqVhqLpfZi+vwmdNXUehLA8= github.com/cockroachdb/datadriven v1.0.3-0.20230413201302-be42291fc80f h1:otljaYPt5hWxV3MUfO5dFPFiOXg9CyG5/kCfayTqsJ4= github.com/cockroachdb/datadriven v1.0.3-0.20230413201302-be42291fc80f/go.mod h1:a9RdTaap04u637JoCzcUoIcDmvwSUtcUFtT/C3kJlTU= github.com/cockroachdb/errors v1.11.3 h1:5bA+k2Y6r+oz/6Z/RFlNeVCesGARKuC6YymtcDrbC/I= github.com/cockroachdb/errors v1.11.3/go.mod h1:m4UIW4CDjx+R5cybPsNrRbreomiFqt8o1h1wUVazSd8= github.com/cockroachdb/fifo v0.0.0-20240816210425-c5d0cb0b6fc0 h1:pU88SPhIFid6/k0egdR5V6eALQYq2qbSmukrkgIh/0A= github.com/cockroachdb/fifo v0.0.0-20240816210425-c5d0cb0b6fc0/go.mod h1:9/y3cnZ5GKakj/H4y9r9GTjCvAFta7KLgSHPJJYc52M= github.com/cockroachdb/logtags v0.0.0-20241215232642-bb51bb14a506 h1:ASDL+UJcILMqgNeV5jiqR4j+sTuvQNHdf2chuKj1M5k= github.com/cockroachdb/logtags v0.0.0-20241215232642-bb51bb14a506/go.mod h1:Mw7HqKr2kdtu6aYGn3tPmAftiP3QPX63LdK/zcariIo= github.com/cockroachdb/pebble v1.1.4 h1:5II1uEP4MyHLDnsrbv/EZ36arcb9Mxg3n+owhZ3GrG8= github.com/cockroachdb/pebble v1.1.4/go.mod h1:4exszw1r40423ZsmkG/09AFEG83I0uDgfujJdbL6kYU= github.com/cockroachdb/redact v1.1.6 h1:zXJBwDZ84xJNlHl1rMyCojqyIxv+7YUpQiJLQ7n4314= github.com/cockroachdb/redact v1.1.6/go.mod h1:BVNblN9mBWFyMyqK1k3AAiSxhvhfK2oOZZ2lK+dpvRg= github.com/cockroachdb/tokenbucket v0.0.0-20230807174530-cc333fc44b06 h1:zuQyyAKVxetITBuuhv3BI9cMrmStnpT18zmgmTxunpo= github.com/cockroachdb/tokenbucket v0.0.0-20230807174530-cc333fc44b06/go.mod h1:7nc4anLGjupUW/PeY5qiNYsdNXj7zopG+eqsS7To5IQ= github.com/containerd/continuity v0.4.3 h1:6HVkalIp+2u1ZLH1J/pYX2oBVXlJZvh1X1A7bEZ9Su8= github.com/containerd/continuity v0.4.3/go.mod h1:F6PTNCKepoxEaXLQp3wDAjygEnImnZ/7o4JzpodfroQ= github.com/containerd/log v0.1.0 h1:TCJt7ioM2cr/tfR8GPbGf9/VRAX8D2B4PjzCpfX540I= github.com/containerd/log v0.1.0/go.mod h1:VRRf09a7mHDIRezVKTRCrOq78v577GXq3bSa3EhrzVo= github.com/coreos/go-oidc/v3 v3.12.0 h1:sJk+8G2qq94rDI6ehZ71Bol3oUHy63qNYmkiSjrc/Jo= github.com/coreos/go-oidc/v3 v3.12.0/go.mod h1:gE3LgjOgFoHi9a4ce4/tJczr0Ai2/BoDhf0r5lltWI0= github.com/coreos/go-systemd/v22 v22.5.0/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= github.com/couchbase/go-couchbase v0.1.1 h1:ClFXELcKj/ojyoTYbsY34QUrrYCBi/1G749sXSCkdhk= github.com/couchbase/go-couchbase v0.1.1/go.mod h1:+/bddYDxXsf9qt0xpDUtRR47A2GjaXmGGAqQ/k3GJ8A= github.com/couchbase/go_n1ql v0.0.0-20220303011133-0ed4bf93e31d h1:jOxYt3U9z+tj2WDvacvBhXmHXDt+EUR5Hbu56wTw6QY= github.com/couchbase/go_n1ql v0.0.0-20220303011133-0ed4bf93e31d/go.mod h1:Rn19fO9CVfhJkqyIED9ixL5Kh5XuH7hXgDTxyfGY7hM= github.com/couchbase/gomemcached v0.3.2 h1:08rxiOoNcv0x5LTxgcYhnx1aPvV7iEtfeyUgqsJyPk0= github.com/couchbase/gomemcached v0.3.2/go.mod h1:mxliKQxOv84gQ0bJWbI+w9Wxdpt9HjDvgW9MjCym5Vo= github.com/couchbase/goutils v0.1.2 h1:gWr8B6XNWPIhfalHNog3qQKfGiYyh4K4VhO3P2o9BCs= github.com/couchbase/goutils v0.1.2/go.mod h1:h89Ek/tiOxxqjz30nPPlwZdQbdB8BwgnuBxeoUe/ViE= github.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g= github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= github.com/creack/pty v1.1.18 h1:n56/Zwd5o6whRC5PMGretI4IdRLlmBXYNjScPaBgsbY= github.com/creack/pty v1.1.18/go.mod h1:MOBLtS5ELjhRRrroQr9kyvTxUAFNvYEK993ew/Vr4O4= github.com/cyphar/filepath-securejoin v0.3.6 h1:4d9N5ykBnSp5Xn2JkhocYDkOpURL/18CYMpo6xB9uWM= github.com/cyphar/filepath-securejoin v0.3.6/go.mod h1:Sdj7gXlvMcPZsbhwhQ33GguGLDGQL7h7bg04C/+u9jI= github.com/danieljoos/wincred v1.2.2 h1:774zMFJrqaeYCK2W57BgAem/MLi6mtSE47MB6BOJ0i0= github.com/danieljoos/wincred v1.2.2/go.mod h1:w7w4Utbrz8lqeMbDAK0lkNJUv5sAOkFi7nd/ogr0Uh8= github.com/databricks/databricks-sql-go v1.6.1 h1:SOAwVdw/N3AZ5ECJYI49SBUncNy61WzOpzlJFZ17O5g= github.com/databricks/databricks-sql-go v1.6.1/go.mod h1:/FB8hVRN/KGnWStEyz19r2r7TmfBsK8nUv6yMid//tU= github.com/datafuselabs/databend-go v0.7.3 h1:Sf5lzmHUrzvN67HI9w3sTSKHIKxy4Wh+xtFiWY70xtk= github.com/datafuselabs/databend-go v0.7.3/go.mod h1:h/sGUBZs7EqJgqnZ3XB0KHfyUlpGvfNrw2lWcdDJVIw= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM= github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f h1:lO4WD4F/rVNCu3HqELle0jiPLLBs70cWOduZpkS1E78= github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f/go.mod h1:cuUVRXasLTGF7a8hSLbxyZXjz+1KgoB3wDUb6vlszIc= github.com/distribution/reference v0.6.0 h1:0IXCQ5g4/QMHHkarYzh5l+u8T3t73zM5QvfrDyIgxBk= github.com/distribution/reference v0.6.0/go.mod h1:BbU0aIcezP1/5jX/8MP0YiH4SdvB5Y4f/wlDRiLyi3E= github.com/dlclark/regexp2 v1.11.5 h1:Q/sSnsKerHeCkc/jSTNq1oCm7KiVgUMZRDUoRu0JQZQ= github.com/dlclark/regexp2 v1.11.5/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8= github.com/dnephin/pflag v1.0.7 h1:oxONGlWxhmUct0YzKTgrpQv9AUA1wtPBn7zuSjJqptk= github.com/dnephin/pflag v1.0.7/go.mod h1:uxE91IoWURlOiTUIA8Mq5ZZkAv3dPUfZNaT80Zm7OQE= github.com/docker/cli v26.1.4+incompatible h1:I8PHdc0MtxEADqYJZvhBrW9bo8gawKwwenxRM7/rLu8= github.com/docker/cli v26.1.4+incompatible/go.mod h1:JLrzqnKDaYBop7H2jaqPtU4hHvMKP+vjCwu2uszcLI8= github.com/docker/docker v27.5.1+incompatible h1:4PYU5dnBYqRQi0294d1FBECqT9ECWeQAIfE8q4YnPY8= github.com/docker/docker v27.5.1+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= github.com/docker/go-connections v0.5.0 h1:USnMq7hx7gwdVZq1L49hLXaFtUdTADjXGp+uj1Br63c= github.com/docker/go-connections v0.5.0/go.mod h1:ov60Kzw0kKElRwhNs9UlUHAE/F9Fe6GLaXnqyDdmEXc= github.com/docker/go-units v0.5.0 h1:69rxXcBk27SvSaaxTtLh/8llcHD8vYHT7WSdRZ/jvr4= github.com/docker/go-units v0.5.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk= github.com/docopt/docopt-go v0.0.0-20180111231733-ee0de3bc6815/go.mod h1:WwZ+bS3ebgob9U8Nd0kOddGdZWjyMGR8Wziv+TBNwSE= github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk= github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY= github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto= github.com/dvsekhvalnov/jose2go v1.8.0 h1:LqkkVKAlHFfH9LOEl5fe4p/zL02OhWE7pCufMBG2jLA= github.com/dvsekhvalnov/jose2go v1.8.0/go.mod h1:QsHjhyTlD/lAVqn/NSbVZmSCGeDehTB/mPZadG+mhXU= github.com/edsrzf/mmap-go v1.1.0/go.mod h1:19H/e8pUPLicwkyNgOykDXkJ9F0MHE+Z52B8EIth78Q= github.com/edsrzf/mmap-go v1.2.0 h1:hXLYlkbaPzt1SaQk+anYwKSRNhufIDCchSPkUD6dD84= github.com/edsrzf/mmap-go v1.2.0/go.mod h1:19H/e8pUPLicwkyNgOykDXkJ9F0MHE+Z52B8EIth78Q= github.com/elastic/go-sysinfo v1.8.1/go.mod h1:JfllUnzoQV/JRYymbH3dO1yggI3mV2oTKSXsDHM+uIM= github.com/elastic/go-sysinfo v1.15.1 h1:zBmTnFEXxIQ3iwcQuk7MzaUotmKRp3OabbbWM8TdzIQ= github.com/elastic/go-sysinfo v1.15.1/go.mod h1:jPSuTgXG+dhhh0GKIyI2Cso+w5lPJ5PvVqKlL8LV/Hk= github.com/elastic/go-windows v1.0.0/go.mod h1:TsU0Nrp7/y3+VwE82FoZF8gC/XFg/Elz6CcloAxnPgU= github.com/elastic/go-windows v1.0.2 h1:yoLLsAsV5cfg9FLhZ9EXZ2n2sQFKeDYrHenkcivY4vI= github.com/elastic/go-windows v1.0.2/go.mod h1:bGcDpBzXgYSqM0Gx3DM4+UxFj300SZLixie9u9ixLM8= github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= github.com/envoyproxy/go-control-plane v0.9.7/go.mod h1:cwu0lG7PUMfa9snN8LXBig5ynNVH9qI8YYLbd1fK2po= github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= github.com/envoyproxy/go-control-plane v0.9.9-0.20210217033140-668b12f5399d/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= github.com/envoyproxy/go-control-plane v0.9.9-0.20210512163311-63b5d3c536b0/go.mod h1:hliV/p42l8fGbc6Y9bQ70uLwIvmJyVE5k4iMKlh8wCQ= github.com/envoyproxy/go-control-plane v0.9.10-0.20210907150352-cf90f659a021/go.mod h1:AFq3mo9L8Lqqiid3OhADV3RfLJnjiw63cSpi+fDTRC0= github.com/envoyproxy/go-control-plane v0.10.2-0.20220325020618-49ff273808a1/go.mod h1:KJwIaB5Mv44NWtYuAOFCVOjcI94vtpEz2JU/D2v6IjE= github.com/envoyproxy/go-control-plane v0.10.3/go.mod h1:fJJn/j26vwOu972OllsvAgJJM//w9BV6Fxbg2LuVd34= github.com/envoyproxy/go-control-plane v0.11.1-0.20230524094728-9239064ad72f/go.mod h1:sfYdkwUW4BA3PbKjySwjJy+O4Pu0h62rlqCMHNk+K+Q= github.com/envoyproxy/go-control-plane v0.13.4 h1:zEqyPVyku6IvWCFwux4x9RxkLOMUL+1vC9xUFv5l2/M= github.com/envoyproxy/go-control-plane v0.13.4/go.mod h1:kDfuBlDVsSj2MjrLEtRWtHlsWIFcGyB2RMO44Dc5GZA= github.com/envoyproxy/go-control-plane/envoy v1.32.4 h1:jb83lalDRZSpPWW2Z7Mck/8kXZ5CQAFYVjQcdVIr83A= github.com/envoyproxy/go-control-plane/envoy v1.32.4/go.mod h1:Gzjc5k8JcJswLjAx1Zm+wSYE20UrLtt7JZMWiWQXQEw= github.com/envoyproxy/go-control-plane/ratelimit v0.1.0 h1:/G9QYbddjL25KvtKTv3an9lx6VBE2cnb8wp1vEGNYGI= github.com/envoyproxy/go-control-plane/ratelimit v0.1.0/go.mod h1:Wk+tMFAFbCXaJPzVVHnPgRKdUdwW/KdbRt94AzgRee4= github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= github.com/envoyproxy/protoc-gen-validate v0.6.7/go.mod h1:dyJXwwfPK2VSqiB9Klm1J6romD608Ba7Hij42vrOBCo= github.com/envoyproxy/protoc-gen-validate v0.9.1/go.mod h1:OKNgG7TCp5pF4d6XftA0++PMirau2/yoOwVac3AbF2w= github.com/envoyproxy/protoc-gen-validate v0.10.1/go.mod h1:DRjgyB0I43LtJapqN6NiRwroiAU2PaFuvk/vjgh61ss= github.com/envoyproxy/protoc-gen-validate v1.2.1 h1:DEo3O99U8j4hBFwbJfrz9VtgcDfUKS7KJ7spH3d86P8= github.com/envoyproxy/protoc-gen-validate v1.2.1/go.mod h1:d/C80l/jxXLdfEIhX1W2TmLfsJ31lvEjwamM4DxlWXU= github.com/exasol/error-reporting-go v0.2.0 h1:nKIe4zYiTHbYrKJRlSNJcmGjTJCZredDh5akVHfIbRs= github.com/exasol/error-reporting-go v0.2.0/go.mod h1:lUzRJqKLiSuYpqRUN2LVyj08WeHzhMEC/8Gmgtuqh1Y= github.com/exasol/exasol-driver-go v1.0.12 h1:IL34rFC0o250uE1YSAzXttOFo9Qxp4KLRKQWnMKyvVU= github.com/exasol/exasol-driver-go v1.0.12/go.mod h1:ppL/hbx7LVJuAcs9jFTY/VSC9BOZeAHlycY8dxIs2dA= github.com/exasol/exasol-test-setup-abstraction-server/go-client v0.3.10 h1:qeEHBUnq2JB2kWkV+ttVV5ZBWQ/KEJ0jwfICGbFdrDw= github.com/exasol/exasol-test-setup-abstraction-server/go-client v0.3.10/go.mod h1:8YAw37IjSTGpzZ81UR9USPd64/tbH5D1D0X8O68r9zc= github.com/fatih/color v1.15.0/go.mod h1:0h5ZqXfHYED7Bhv2ZJamyIOUej9KtShiJESRwBDUSsw= github.com/fatih/color v1.16.0/go.mod h1:fL2Sau1YI5c0pdGEVCbKQbLXB6edEj1ZgiY4NijnWvE= github.com/fatih/color v1.18.0 h1:S8gINlzdQ840/4pfAwic/ZE0djQEH3wM94VfqLTZcOM= github.com/fatih/color v1.18.0/go.mod h1:4FelSpRwEGDpQ12mAdzqdOukCy4u8WUtOY6lkT/6HfU= github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg= github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= github.com/fogleman/gg v1.2.1-0.20190220221249-0403632d5b90/go.mod h1:R/bRT+9gY/C5z7JzPU0zXsXHKM4/ayA+zqcVNZzPa1k= github.com/fogleman/gg v1.3.0/go.mod h1:R/bRT+9gY/C5z7JzPU0zXsXHKM4/ayA+zqcVNZzPa1k= github.com/frankban/quicktest v1.14.6 h1:7Xjx+VpznH+oBnejlPUj8oUpdxnVs4f8XU8WnHkI4W8= github.com/frankban/quicktest v1.14.6/go.mod h1:4ptaffx2x8+WTWXmUCuVU6aPUX1/Mz7zb5vbUoiM6w0= github.com/fsnotify/fsnotify v1.7.0/go.mod h1:40Bi/Hjc2AVfZrqy+aj+yEI+/bRxZnMJyTJwOpGvigM= github.com/fsnotify/fsnotify v1.8.0 h1:dAwr6QBTBZIkG8roQaJjGof0pp0EeF+tNV7YBP3F/8M= github.com/fsnotify/fsnotify v1.8.0/go.mod h1:8jBTzvmWwFyi3Pb8djgCCO5IBqzKJ/Jwo8TRcHyHii0= github.com/gabriel-vasile/mimetype v1.4.8 h1:FfZ3gj38NjllZIeJAmMhr+qKL8Wu+nOoI3GqacKw1NM= github.com/gabriel-vasile/mimetype v1.4.8/go.mod h1:ByKUIKGjh1ODkGM1asKUbQZOLGrPjydw3hYPU2YU9t8= github.com/getsentry/sentry-go v0.31.1 h1:ELVc0h7gwyhnXHDouXkhqTFSO5oslsRDk0++eyE0KJ4= github.com/getsentry/sentry-go v0.31.1/go.mod h1:CYNcMMz73YigoHljQRG+qPF+eMq8gG72XcGN/p71BAY= github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= github.com/go-errors/errors v1.4.2 h1:J6MZopCL4uSllY1OfXM374weqZFFItUbrImctkmUxIA= github.com/go-errors/errors v1.4.2/go.mod h1:sIVyrIiJhuEF+Pj9Ebtd6P/rEYROXFi3BopGUQ5a5Og= github.com/go-faster/city v1.0.1 h1:4WAxSZ3V2Ws4QRDrscLEDcibJY8uf41H6AhXDrNDcGw= github.com/go-faster/city v1.0.1/go.mod h1:jKcUJId49qdW3L1qKHH/3wPeUstCVpVSXTM6vO3VcTw= github.com/go-faster/errors v0.7.1 h1:MkJTnDoEdi9pDabt1dpWf7AA8/BaSYZqibYyhZ20AYg= github.com/go-faster/errors v0.7.1/go.mod h1:5ySTjWFiphBs07IKuiL69nxdfd5+fzh1u7FPGZP2quo= github.com/go-fonts/dejavu v0.1.0/go.mod h1:4Wt4I4OU2Nq9asgDCteaAaWZOV24E+0/Pwo0gppep4g= github.com/go-fonts/latin-modern v0.2.0/go.mod h1:rQVLdDMK+mK1xscDwsqM5J8U2jrRa3T0ecnM9pNujks= github.com/go-fonts/liberation v0.1.1/go.mod h1:K6qoJYypsmfVjWg8KOVDQhLc8UDgIK2HYqyqAO9z7GY= github.com/go-fonts/liberation v0.2.0/go.mod h1:K6qoJYypsmfVjWg8KOVDQhLc8UDgIK2HYqyqAO9z7GY= github.com/go-fonts/stix v0.1.0/go.mod h1:w/c1f0ldAUlJmLBvlbkvVXLAD+tAMqobIIQpmnUIzUY= github.com/go-git/go-billy/v5 v5.6.2 h1:6Q86EsPXMa7c3YZ3aLAQsMA0VlWmy43r6FHqa/UNbRM= github.com/go-git/go-billy/v5 v5.6.2/go.mod h1:rcFC2rAsp/erv7CMz9GczHcuD0D32fWzH+MJAU+jaUU= github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= github.com/go-gorp/gorp v2.2.0+incompatible h1:xAUh4QgEeqPPhK3vxZN+bzrim1z5Av6q837gtjUlshc= github.com/go-gorp/gorp v2.2.0+incompatible/go.mod h1:7IfkAQnO7jfT/9IQ3R9wL1dFhukN6aQxzKTHnkxzA/E= github.com/go-jose/go-jose/v4 v4.0.5 h1:M6T8+mKZl/+fNNuFHvGIzDz7BTLQPIounk/b9dw3AaE= github.com/go-jose/go-jose/v4 v4.0.5/go.mod h1:s3P1lRrkT8igV8D9OjyL4WRyHvjB6a4JSllnOrmmBOA= github.com/go-latex/latex v0.0.0-20210118124228-b3d85cf34e07/go.mod h1:CO1AlKB2CSIqUrmQPqA0gdRIlnLEY0gK5JGjh37zN5U= github.com/go-latex/latex v0.0.0-20210823091927-c0d11ff05a81/go.mod h1:SX0U8uGpxhq9o2S/CELCSUxEWWAuoCUcVCQWv7G2OCk= github.com/go-logfmt/logfmt v0.6.0 h1:wGYYu3uicYdqXVgoYbvnkrPVXkuLM1p1ifugDMEdRi4= github.com/go-logfmt/logfmt v0.6.0/go.mod h1:WYhtIu8zTZfxdn5+rREduYbwxfcBr/Vr6KEVveWlfTs= github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= github.com/go-logr/logr v1.4.2 h1:6pFjapn8bFcIbiKo3XT4j/BhANplGihG6tvd+8rYgrY= github.com/go-logr/logr v1.4.2/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= github.com/go-ole/go-ole v1.2.5/go.mod h1:pprOEPIfldk/42T2oK7lQ4v4JSDwmV0As9GaiUsvbm0= github.com/go-ole/go-ole v1.3.0 h1:Dt6ye7+vXGIKZ7Xtk4s6/xVdGDQynvom7xCFEdWr6uE= github.com/go-ole/go-ole v1.3.0/go.mod h1:5LS6F96DhAwUc7C+1HLexzMXY1xGRSryjyPPKW6zv78= github.com/go-pdf/fpdf v0.5.0/go.mod h1:HzcnA+A23uwogo0tp9yU+l3V+KXhiESpt1PMayhOh5M= github.com/go-pdf/fpdf v0.6.0/go.mod h1:HzcnA+A23uwogo0tp9yU+l3V+KXhiESpt1PMayhOh5M= github.com/go-sql-driver/mysql v1.9.0 h1:Y0zIbQXhQKmQgTp44Y1dp3wTXcn804QoTptLZT1vtvo= github.com/go-sql-driver/mysql v1.9.0/go.mod h1:pDetrLJeA3oMujJuvXc8RJoasr589B6A9fwzD3QMrqw= github.com/go-viper/mapstructure/v2 v2.2.1 h1:ZAaOCxANMuZx5RCeg0mBdEZk7DZasvvZIxtHqx8aGss= github.com/go-viper/mapstructure/v2 v2.2.1/go.mod h1:oJDH3BJKyqBA2TXFhDsKDGDTlndYOZ6rGS0BRZIxGhM= github.com/go-zookeeper/zk v1.0.4 h1:DPzxraQx7OrPyXq2phlGlNSIyWEsAox0RJmjTseMV6I= github.com/go-zookeeper/zk v1.0.4/go.mod h1:nOB03cncLtlp4t+UAkGSV+9beXP/akpekBwL+UX1Qcw= github.com/goccy/go-json v0.9.11/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I= github.com/goccy/go-json v0.10.5 h1:Fq85nIqj+gXn/S5ahsiTlK3TmC85qgirsdTP/+DeaC4= github.com/goccy/go-json v0.10.5/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M= github.com/gocql/gocql v0.0.0-20200815110948-5378c8f664e9/go.mod h1:DL0ekTmBSTdlNF25Orwt/JMzqIq3EJ4MVa/J/uK64OY= github.com/gocql/gocql v1.7.0 h1:O+7U7/1gSN7QTEAaMEsJc1Oq2QHXvCWoF3DFK9HDHus= github.com/gocql/gocql v1.7.0/go.mod h1:vnlvXyFZeLBF0Wy+RS8hrOdbn0UWsWtdg07XJnFxZ+4= github.com/godbus/dbus v0.0.0-20190726142602-4481cbc300e2 h1:ZpnhV/YsD2/4cESfV5+Hoeu/iUR3ruzNvZ+yQfO03a0= github.com/godbus/dbus v0.0.0-20190726142602-4481cbc300e2/go.mod h1:bBOAhwG1umN6/6ZUMtDFBMQR8jRg9O75tm9K00oMsK4= github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= github.com/godror/godror v0.47.0 h1:GZsaMOIvLqgTPPVXFIavRI4mqwNIhmcFfEZbzWeabGE= github.com/godror/godror v0.47.0/go.mod h1:44hxVDzvFSwc+yGyRM+riCLNAY5SwZkUfLzVTh5MXCg= github.com/godror/knownpb v0.2.0 h1:RJLntksFiKUHoUz3wCCJ8+DBjxSLYHYDNl1xRz0/gXI= github.com/godror/knownpb v0.2.0/go.mod h1:kRahRJBwqTenpVPleymQ4k433Xz2Wuy7dOeFSuEpmkI= github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= github.com/gohxs/readline v0.0.0-20171011095936-a780388e6e7c h1:yE35fKFwcelIte3q5q1/cPiY7pI7vvf5/j/0ddxNCKs= github.com/gohxs/readline v0.0.0-20171011095936-a780388e6e7c/go.mod h1:9S/fKAutQ6wVHqm1jnp9D9sc5hu689s9AaTWFS92LaU= github.com/golang-jwt/jwt/v4 v4.5.1 h1:JdqV9zKUdtaa9gdPlywC3aeoEsR681PlKC+4F5gQgeo= github.com/golang-jwt/jwt/v4 v4.5.1/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0= github.com/golang-jwt/jwt/v5 v5.2.1 h1:OuVbFODueb089Lh128TAcimifWaLhJwVflnrgM17wHk= github.com/golang-jwt/jwt/v5 v5.2.1/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk= github.com/golang-module/carbon/v2 v2.4.1 h1:cYUD8T+rHeX+qIybGYpnJ8I90F10dvyEF67VNOO+zZM= github.com/golang-module/carbon/v2 v2.4.1/go.mod h1:1jP9AZ4k2+lmfgY/wZgmtsN52VcHC5YuPM6varKDTkM= github.com/golang-sql/civil v0.0.0-20220223132316-b832511892a9 h1:au07oEsX2xN0ktxqI+Sida1w446QrXBRJ0nee3SNZlA= github.com/golang-sql/civil v0.0.0-20220223132316-b832511892a9/go.mod h1:8vg3r2VgvsThLBIFL93Qb5yWzgyZWhEmBwUJWevAkK0= github.com/golang-sql/sqlexp v0.1.0 h1:ZCD6MBpcuOVfGVqsEmY5/4FtYiKz6tSyUv9LPEDei6A= github.com/golang-sql/sqlexp v0.1.0/go.mod h1:J4ad9Vo8ZCWQ2GMrC4UCQy1JpCbwU9m3EOqtpKwwwHI= github.com/golang/freetype v0.0.0-20170609003504-e2365dfdc4a0/go.mod h1:E/TSTwGwJL78qG/PmXZO1EjYhfJinVAhrmmHX6Z8B9k= github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= github.com/golang/glog v1.0.0/go.mod h1:EWib/APOK0SL3dFbYqvxE3UYd8E6s1ouQ7iEp/0LWV4= github.com/golang/glog v1.1.0/go.mod h1:pfYeQZ3JWZoXTV5sFc986z3HTpwQs9At6P4ImfuP3NQ= github.com/golang/glog v1.2.3 h1:oDTdz9f5VGVVNGu/Q7UXKWYsD0873HXLHdJUNBsSEKM= github.com/golang/glog v1.2.3/go.mod h1:6AhwSGph0fcJtXVM/PEHPqZlFeoLxhs7/t5UDAwmO+w= github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8 h1:f+oWsMOmNPc8JmEHVZIycC7hBoQxHH9pNKQORJNozsQ= github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8/go.mod h1:wcDNUvekVysuuOpQKo3191zZyTpiI6se1N1ULghS0sw= github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y= github.com/golang/mock v1.4.0/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= github.com/golang/mock v1.4.1/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= github.com/golang/mock v1.4.3/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= github.com/golang/mock v1.4.4/go.mod h1:l3mdAwkq5BuhzHwde/uurv3sEJeZMXNpwsxVWU71h+4= github.com/golang/mock v1.5.0/go.mod h1:CWnOUgYIOo4TcNZ0wHX3YZCqsaM1I1Jvs6v3mP3KVu8= github.com/golang/mock v1.6.0/go.mod h1:p6yTPP+5HYm5mzsMV8JkE6ZKdX+/wYM6Hr+LicevLPs= github.com/golang/mock v1.7.0-rc.1 h1:YojYx61/OLFsiv6Rw1Z96LpldJIy31o+UHmwAUMJ6/U= github.com/golang/mock v1.7.0-rc.1/go.mod h1:s42URUywIqd+OcERslBJvOjepvNymP31m3q8d/GkuRs= github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= github.com/golang/protobuf v1.3.4/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= github.com/golang/protobuf v1.3.5/go.mod h1:6O5/vntMXwX2lRkT1hjjk0nAC1IDOTvTlVgjlRvqsdk= github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8= github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA= github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs= github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w= github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0= github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QDs8UjoX8= github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= github.com/golang/protobuf v1.5.1/go.mod h1:DopwsBzvsk0Fs44TXzsVbJyPhcCPeIwnvohx4u74HPM= github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= github.com/golang/protobuf v1.5.3/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek= github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps= github.com/golang/snappy v0.0.0-20170215233205-553a64147049/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= github.com/golang/snappy v0.0.1/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= github.com/golang/snappy v0.0.3/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM= github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= github.com/google/flatbuffers v1.11.0/go.mod h1:1AeVuKshWv4vARoZatz6mlQ0JxURH0Kv5+zNeJKJCa8= github.com/google/flatbuffers v2.0.8+incompatible/go.mod h1:1AeVuKshWv4vARoZatz6mlQ0JxURH0Kv5+zNeJKJCa8= github.com/google/flatbuffers v23.5.26+incompatible/go.mod h1:1AeVuKshWv4vARoZatz6mlQ0JxURH0Kv5+zNeJKJCa8= github.com/google/flatbuffers/go v0.0.0-20230110200425-62e4d2e5b215 h1:HA3/6NDG1q6qLD2sCqDFT/ua/1/wctoLo7leuFcFdSE= github.com/google/flatbuffers/go v0.0.0-20230110200425-62e4d2e5b215/go.mod h1:qmRCJW6OqZkfBt584Cmq1im0f4367CLrdABrq5lMOWo= github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.4.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.7/go.mod h1:n+brtR0CgQNWTVd5ZUFpTBC8YFBDLK/h/bpaJ8/DtOE= github.com/google/go-cmp v0.5.8/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8= github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU= github.com/google/goexpect v0.0.0-20210430020637-ab937bf7fd6f h1:7MmqygqdeJtziBUpm4Z9ThROFZUaVGaePMfcDnluf1E= github.com/google/goexpect v0.0.0-20210430020637-ab937bf7fd6f/go.mod h1:n1ej5+FqyEytMt/mugVDZLIiqTMO+vsrgY+kM6ohzN0= github.com/google/goterm v0.0.0-20190703233501-fc88cf888a3f/go.mod h1:nOFQdrUlIlx6M6ODdSpBj1NVA+VgLC6kmw60mkw34H4= github.com/google/goterm v0.0.0-20200907032337-555d40f16ae2 h1:CVuJwN34x4xM2aT4sIKhmeib40NeBPhRihNjQmpJsA4= github.com/google/goterm v0.0.0-20200907032337-555d40f16ae2/go.mod h1:nOFQdrUlIlx6M6ODdSpBj1NVA+VgLC6kmw60mkw34H4= github.com/google/martian v2.1.0+incompatible h1:/CP5g8u/VJHijgedC/Legn3BAbAaWPgecwXBIDzw5no= github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= github.com/google/martian/v3 v3.1.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= github.com/google/martian/v3 v3.2.1/go.mod h1:oBOf6HBosgwRXnUGWUB05QECsc6uvmMiJ3+6W4l/CUk= github.com/google/martian/v3 v3.3.2/go.mod h1:oBOf6HBosgwRXnUGWUB05QECsc6uvmMiJ3+6W4l/CUk= github.com/google/martian/v3 v3.3.3 h1:DIhPTQrbPkgs2yJYdXU/eNACCG5DVQjySNRNlflZ9Fc= github.com/google/martian/v3 v3.3.3/go.mod h1:iEPrYcgCF7jA9OtScMFQyAlZZ4YXTKEtJ1E6RWzmBA0= github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= github.com/google/pprof v0.0.0-20191218002539-d4f498aebedc/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= github.com/google/pprof v0.0.0-20200212024743-f11f1df84d12/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= github.com/google/pprof v0.0.0-20200229191704-1ebb73c60ed3/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= github.com/google/pprof v0.0.0-20200430221834-fc25d7d30c6d/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= github.com/google/pprof v0.0.0-20200708004538-1a94d8640e99/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= github.com/google/pprof v0.0.0-20201023163331-3e6fc7fc9c4c/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/pprof v0.0.0-20201203190320-1bf35d6f28c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/pprof v0.0.0-20201218002935-b9804c9f04c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/pprof v0.0.0-20210122040257-d980be63207e/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/pprof v0.0.0-20210226084205-cbba55b83ad5/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/pprof v0.0.0-20210601050228-01bbb1931b22/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/pprof v0.0.0-20210609004039-a478d1d731e9/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/pprof v0.0.0-20210720184732-4bb14d4b1be1/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/pprof v0.0.0-20241021161924-4cf4322d492d h1:dcUSYLuKITgwgLZJZpB+CKecsC8mXHhErghMX9ohbf4= github.com/google/pprof v0.0.0-20241021161924-4cf4322d492d/go.mod h1:vavhavw2zAxS5dIdcRluK6cSGGPlZynqzFM8NdvU144= github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= github.com/google/s2a-go v0.1.9 h1:LGD7gtMgezd8a/Xak7mEWL0PjoTQFvpRudN895yqKW0= github.com/google/s2a-go v0.1.9/go.mod h1:YA0Ei2ZQL3acow2O62kdp9UlnvMmU7kA6Eutn0dXayM= github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510 h1:El6M4kTTCOh6aBiKaUGG7oYTSPP8MxqL4YI3kZKwcP4= github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510/go.mod h1:pupxD2MaaD3pAXIBCelhxNneeOaAeabZDe5s4K6zSpQ= github.com/google/uuid v1.1.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/googleapis/enterprise-certificate-proxy v0.0.0-20220520183353-fd19c99a87aa/go.mod h1:17drOmN3MwGY7t0e+Ei9b45FFGA3fBs3x36SsCg1hq8= github.com/googleapis/enterprise-certificate-proxy v0.1.0/go.mod h1:17drOmN3MwGY7t0e+Ei9b45FFGA3fBs3x36SsCg1hq8= github.com/googleapis/enterprise-certificate-proxy v0.2.0/go.mod h1:8C0jb7/mgJe/9KK8Lm7X9ctZC2t60YyIpYEI16jx0Qg= github.com/googleapis/enterprise-certificate-proxy v0.2.1/go.mod h1:AwSRAtLfXpU5Nm3pW+v7rGDHp09LsPtGY9MduiEsR9k= github.com/googleapis/enterprise-certificate-proxy v0.2.3/go.mod h1:AwSRAtLfXpU5Nm3pW+v7rGDHp09LsPtGY9MduiEsR9k= github.com/googleapis/enterprise-certificate-proxy v0.3.5 h1:VgzTY2jogw3xt39CusEnFJWm7rlsq5yL5q9XdLOuP5g= github.com/googleapis/enterprise-certificate-proxy v0.3.5/go.mod h1:MkHOF77EYAE7qfSuSS9PU6g4Nt4e11cnsDUowfwewLA= github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= github.com/googleapis/gax-go/v2 v2.1.0/go.mod h1:Q3nei7sK6ybPYH7twZdmQpAd1MKb7pfu6SK+H1/DsU0= github.com/googleapis/gax-go/v2 v2.1.1/go.mod h1:hddJymUZASv3XPyGkUpKj8pPO47Rmb0eJc8R6ouapiM= github.com/googleapis/gax-go/v2 v2.2.0/go.mod h1:as02EH8zWkzwUoLbBaFeQ+arQaj/OthfcblKl4IGNaM= github.com/googleapis/gax-go/v2 v2.3.0/go.mod h1:b8LNqSzNabLiUpXKkY7HAR5jr6bIT99EXz9pXxye9YM= github.com/googleapis/gax-go/v2 v2.4.0/go.mod h1:XOTVJ59hdnfJLIP/dh8n5CGryZR2LxK9wbMD5+iXC6c= github.com/googleapis/gax-go/v2 v2.5.1/go.mod h1:h6B0KMMFNtI2ddbGJn3T3ZbwkeT6yqEF02fYlzkUCyo= github.com/googleapis/gax-go/v2 v2.6.0/go.mod h1:1mjbznJAPHFpesgE5ucqfYEscaz5kMdcIDwU/6+DDoY= github.com/googleapis/gax-go/v2 v2.7.0/go.mod h1:TEop28CZZQ2y+c0VxMUmu1lV+fQx57QpBWsYpwqHJx8= github.com/googleapis/gax-go/v2 v2.7.1/go.mod h1:4orTrqY6hXxxaUL4LHIPl6lGo8vAE38/qKbhSAKP6QI= github.com/googleapis/gax-go/v2 v2.14.1 h1:hb0FFeiPaQskmvakKu5EbCbpntQn48jyHuvrkurSS/Q= github.com/googleapis/gax-go/v2 v2.14.1/go.mod h1:Hb/NubMaVM88SrNkvl8X/o8XWwDJEPqouaLeN2IUxoA= github.com/googleapis/go-sql-spanner v1.11.1 h1:z3ThtKV5HFvaNv9UGc26+ggS+lS0dsCAkaFduKL7vws= github.com/googleapis/go-sql-spanner v1.11.1/go.mod h1:fuA5q4yMS3SZiVfRr5bvksPNk7zUn/irbQW62H/ffZw= github.com/googleapis/go-type-adapters v1.0.0/go.mod h1:zHW75FOG2aur7gAO2B+MLby+cLsWGBF62rFAi7WjWO4= github.com/googleapis/google-cloud-go-testing v0.0.0-20200911160855-bcd43fbb19e8/go.mod h1:dvDLG8qkwmyD9a/MJJN3XJcT3xFxOKAvTZGvuZmac9g= github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= github.com/gorilla/securecookie v1.1.1 h1:miw7JPhV+b/lAHSXz4qd/nN9jRiAFV5FwjeKyCS8BvQ= github.com/gorilla/securecookie v1.1.1/go.mod h1:ra0sb63/xPlUeL+yeDciTfxMRAA+MP+HVt/4epWDjd4= github.com/gorilla/sessions v1.2.1 h1:DHd3rPN5lE3Ts3D8rKkQ8x/0kqfeNmBAaiSi+o7FsgI= github.com/gorilla/sessions v1.2.1/go.mod h1:dk2InVEVJ0sfLlnXv9EAgkf6ecYs/i80K/zI+bUmuGM= github.com/gorilla/websocket v1.5.3 h1:saDtZ6Pbx/0u+bgYQ3q96pZgCzfhKXGPqt7kZ72aNNg= github.com/gorilla/websocket v1.5.3/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw= github.com/grpc-ecosystem/grpc-gateway/v2 v2.7.0/go.mod h1:hgWBS7lorOAVIJEQMi4ZsPv9hVvWI6+ch50m39Pf2Ks= github.com/grpc-ecosystem/grpc-gateway/v2 v2.11.3/go.mod h1:o//XUCC/F+yRGJoPO/VU0GSB0f8Nhgmxx0VIRUvaC0w= github.com/grpc-ecosystem/grpc-gateway/v2 v2.16.0 h1:YBftPWNWd4WwGqtY2yeZL2ef8rHAxPBD8KFhJpmcqms= github.com/grpc-ecosystem/grpc-gateway/v2 v2.16.0/go.mod h1:YN5jB8ie0yfIUg6VvR9Kz84aCaG7AsGZnLjhHbUqwPg= github.com/gsterjov/go-libsecret v0.0.0-20161001094733-a6f4afe4910c h1:6rhixN/i8ZofjG1Y75iExal34USq5p+wiN1tpie8IrU= github.com/gsterjov/go-libsecret v0.0.0-20161001094733-a6f4afe4910c/go.mod h1:NMPJylDgVpX0MLRlPy15sqSwOFv/U1GZ2m21JhFfek0= github.com/hailocab/go-hostpool v0.0.0-20160125115350-e80d13ce29ed h1:5upAirOpQc1Q53c0bnx2ufif5kANL7bfZWcc6VJWJd8= github.com/hailocab/go-hostpool v0.0.0-20160125115350-e80d13ce29ed/go.mod h1:tMWxXQ9wFIaZeTI9F+hmhFiGpFmhOHzyShyFUhRm0H4= github.com/hashicorp/go-cleanhttp v0.5.2 h1:035FKYIWjmULyFRBKPs8TBQoi0x6d9G4xc9neXJWAZQ= github.com/hashicorp/go-cleanhttp v0.5.2/go.mod h1:kO/YDlP8L1346E6Sodw+PrpBSV4/SoxCXGY6BqNFT48= github.com/hashicorp/go-hclog v1.6.3 h1:Qr2kF+eVWjTiYmU7Y31tYlP1h0q/X3Nl3tPGdaB11/k= github.com/hashicorp/go-hclog v1.6.3/go.mod h1:W4Qnvbt70Wk/zYJryRzDRU/4r0kIg0PVHBcfoyhpF5M= github.com/hashicorp/go-retryablehttp v0.7.7 h1:C8hUCYzor8PIfXHa4UrZkU4VvK8o9ISHxT2Q8+VepXU= github.com/hashicorp/go-retryablehttp v0.7.7/go.mod h1:pkQpWZeYWskR+D1tR2O5OcBFOxfA7DoAO6xtkuQnHTk= github.com/hashicorp/go-uuid v1.0.2/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= github.com/hashicorp/go-uuid v1.0.3 h1:2gKiV6YVmrJ1i2CKKa9obLvRieoRGviZFL26PcT/Co8= github.com/hashicorp/go-uuid v1.0.3/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= github.com/hashicorp/golang-lru v0.5.4/go.mod h1:iADmTwqILo4mZ8BN3D2Q6+9jd8WM5uGBxy+E8yxSoD4= github.com/hashicorp/golang-lru v1.0.2 h1:dV3g9Z/unq5DpblPpw+Oqcv4dU/1omnb4Ok8iPY6p1c= github.com/hashicorp/golang-lru v1.0.2/go.mod h1:iADmTwqILo4mZ8BN3D2Q6+9jd8WM5uGBxy+E8yxSoD4= github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4= github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= github.com/hexops/gotextdiff v1.0.3 h1:gitA9+qJrrTCsiCl7+kh75nPqQt1cx4ZkudSTLoUqJM= github.com/hexops/gotextdiff v1.0.3/go.mod h1:pSWU5MAI3yDq+fZBTazCSJysOMbxWL1BSow5/V2vxeg= github.com/iancoleman/strcase v0.2.0/go.mod h1:iwCmte+B7n89clKwxIoIXy/HfoL7AsD47ZCWhYzw7ho= github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= github.com/icholy/digest v1.1.0 h1:HfGg9Irj7i+IX1o1QAmPfIBNu/Q5A5Tu3n/MED9k9H4= github.com/icholy/digest v1.1.0/go.mod h1:QNrsSGQ5v7v9cReDI0+eyjsXGUoRSUZQHeQ5C4XLa0Y= github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8= github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= github.com/jackc/pgpassfile v1.0.0 h1:/6Hmqy13Ss2zCq62VdNG8tM1wchn8zjSGOBJ6icpsIM= github.com/jackc/pgpassfile v1.0.0/go.mod h1:CEx0iS5ambNFdcRtxPj5JhEz+xB6uRky5eyVu/W2HEg= github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 h1:iCEnooe7UlwOQYpKFhBabPMi4aNAfoODPEFNiAnClxo= github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761/go.mod h1:5TJZWKEWniPve33vlWYSoGYefn3gLQRzjfDlhSJ9ZKM= github.com/jackc/pgx/v5 v5.7.2 h1:mLoDLV6sonKlvjIEsV56SkWNCnuNv531l94GaIzO+XI= github.com/jackc/pgx/v5 v5.7.2/go.mod h1:ncY89UGWxg82EykZUwSpUKEfccBGGYq1xjrOpsbsfGQ= github.com/jackc/puddle/v2 v2.2.2 h1:PR8nw+E/1w0GLuRFSmiioY6UooMp6KJv0/61nB7icHo= github.com/jackc/puddle/v2 v2.2.2/go.mod h1:vriiEXHvEE654aYKXXjOvZM39qJ0q+azkZFrfEOc3H4= github.com/jcmturner/aescts/v2 v2.0.0 h1:9YKLH6ey7H4eDBXW8khjYslgyqG2xZikXP0EQFKrle8= github.com/jcmturner/aescts/v2 v2.0.0/go.mod h1:AiaICIRyfYg35RUkr8yESTqvSy7csK90qZ5xfvvsoNs= github.com/jcmturner/dnsutils/v2 v2.0.0 h1:lltnkeZGL0wILNvrNiVCR6Ro5PGU/SeBvVO/8c/iPbo= github.com/jcmturner/dnsutils/v2 v2.0.0/go.mod h1:b0TnjGOvI/n42bZa+hmXL+kFJZsFT7G4t3HTlQ184QM= github.com/jcmturner/gofork v1.7.6 h1:QH0l3hzAU1tfT3rZCnW5zXl+orbkNMMRGJfdJjHVETg= github.com/jcmturner/gofork v1.7.6/go.mod h1:1622LH6i/EZqLloHfE7IeZ0uEJwMSUyQ/nDd82IeqRo= github.com/jcmturner/goidentity/v6 v6.0.1 h1:VKnZd2oEIMorCTsFBnJWbExfNN7yZr3EhJAxwOkZg6o= github.com/jcmturner/goidentity/v6 v6.0.1/go.mod h1:X1YW3bgtvwAXju7V3LCIMpY0Gbxyjn/mY9zx4tFonSg= github.com/jcmturner/gokrb5/v8 v8.4.4 h1:x1Sv4HaTpepFkXbt2IkL29DXRf8sOfZXo8eRKh687T8= github.com/jcmturner/gokrb5/v8 v8.4.4/go.mod h1:1btQEpgT6k+unzCwX1KdWMEwPPkkgBtP+F6aCACiMrs= github.com/jcmturner/rpc/v2 v2.0.3 h1:7FXXj8Ti1IaVFpSAziCZWNzbNuZmnvw/i6CqLNdWfZY= github.com/jcmturner/rpc/v2 v2.0.3/go.mod h1:VUJYCIDm3PVOEHw8sgt091/20OJjskO/YJki3ELg/Hc= github.com/jeandeaual/go-locale v0.0.0-20241217141322-fcc2cadd6f08 h1:wMeVzrPO3mfHIWLZtDcSaGAe2I4PW9B/P5nMkRSwCAc= github.com/jeandeaual/go-locale v0.0.0-20241217141322-fcc2cadd6f08/go.mod h1:ZDXo8KHryOWSIqnsb/CiDq7hQUYryCgdVnxbj8tDG7o= github.com/jedib0t/go-pretty/v6 v6.2.7/go.mod h1:FMkOpgGD3EZ91cW8g/96RfxoV7bdeJyzXPYgz1L1ln0= github.com/jedib0t/go-pretty/v6 v6.6.7 h1:m+LbHpm0aIAPLzLbMfn8dc3Ht8MW7lsSO4MPItz/Uuo= github.com/jedib0t/go-pretty/v6 v6.6.7/go.mod h1:YwC5CE4fJ1HFUDeivSV1r//AmANFHyqczZk+U6BDALU= github.com/jessevdk/go-flags v1.4.0/go.mod h1:4FA24M0QyGHXBuZZK/XkWh8h0e1EYbRYJSGM75WSRxI= github.com/jinzhu/inflection v1.0.0 h1:K317FqzuhWc8YvSVlFMCCUb36O/S9MCKRDI7QkRKD/E= github.com/jinzhu/inflection v1.0.0/go.mod h1:h+uFLlag+Qp1Va5pdKtLDYj+kHp5pxUVkryuEj+Srlc= github.com/jinzhu/now v1.1.4/go.mod h1:d3SSVoowX0Lcu0IBviAWJpolVfI5UJVZZ7cO71lE/z8= github.com/jinzhu/now v1.1.5 h1:/o9tlHleP7gOFmsnYNz3RGnqzefHA47wQpKrrdTIwXQ= github.com/jinzhu/now v1.1.5/go.mod h1:d3SSVoowX0Lcu0IBviAWJpolVfI5UJVZZ7cO71lE/z8= github.com/jmespath/go-jmespath v0.4.0 h1:BEgLn5cpjn8UN1mAw4NjwDrS35OdebyEtFe+9YPoQUg= github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo= github.com/jmespath/go-jmespath/internal/testify v1.5.1 h1:shLQSRRSCCPj3f2gpwzGwWFoC7ycTf1rcQZHOlsJ6N8= github.com/jmespath/go-jmespath/internal/testify v1.5.1/go.mod h1:L3OGu8Wl2/fWfCI6z80xFu9LTZmf1ZRjMHUOPmWr69U= github.com/jmrobles/h2go v0.5.0 h1:r+V3J1+8z5tExKHcVc8u0tXJfov391zEffJYALWKhA0= github.com/jmrobles/h2go v0.5.0/go.mod h1:p7Vjfu/9f7g2RI1CkpwXnwqskV+47HviBg4C4FlW8eI= github.com/joeshaw/multierror v0.0.0-20140124173710-69b34d4ec901/go.mod h1:Z86h9688Y0wesXCyonoVr47MasHilkuLMqGhRZ4Hpak= github.com/jonboulle/clockwork v0.5.0 h1:Hyh9A8u51kptdkR+cqRpT1EebBwTn1oK9YfGYbdFz6I= github.com/jonboulle/clockwork v0.5.0/go.mod h1:3mZlmanh0g2NDKO5TWZVJAfofYk64M7XN3SzBPjZF60= github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk= github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU= github.com/jung-kurt/gofpdf v1.0.0/go.mod h1:7Id9E/uU8ce6rXgefFLlgrJj/GYY22cpxn+r32jIOes= github.com/jung-kurt/gofpdf v1.0.3-0.20190309125859-24315acbbda5/go.mod h1:7Id9E/uU8ce6rXgefFLlgrJj/GYY22cpxn+r32jIOes= github.com/kardianos/osext v0.0.0-20190222173326-2bc1f35cddc0 h1:iQTw/8FWTuc7uiaSepXwyf3o52HaUYcV+Tu66S3F5GA= github.com/kardianos/osext v0.0.0-20190222173326-2bc1f35cddc0/go.mod h1:1NbS8ALrpOvjt0rHPNLyCIeMtbizbir8U//inJ+zuB8= github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51/go.mod h1:CzGEWj7cYgsdH8dAjBGEr58BoE7ScuLd+fwFZ44+/x8= github.com/kenshaw/colors v0.2.1 h1:faVXggEiC12dah/CEec8qvbRGbyp6Zx+OwaH3Y/6sB4= github.com/kenshaw/colors v0.2.1/go.mod h1:Aok7+9KpR+qEwgCxDEoLBS6IGFhY1iRJIzbcv5ijewI= github.com/kenshaw/rasterm v0.1.12 h1:NfxlvB+HI02tOIUHv+4gV2rC/MstIQb0wBCC5mL8It0= github.com/kenshaw/rasterm v0.1.12/go.mod h1:oKL76NdzLgqRc+d4cHAi5btI1kx5JhnLm11GIzxRlkE= github.com/keybase/go-keychain v0.0.0-20231219164618-57a3676c3af6 h1:IsMZxCuZqKuao2vNdfD82fjjgPLfyHLpR41Z88viRWs= github.com/keybase/go-keychain v0.0.0-20231219164618-57a3676c3af6/go.mod h1:3VeWNIJaW+O5xpRQbPp0Ybqu1vJd/pm7s2F473HRrkw= github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= github.com/kisielk/sqlstruct v0.0.0-20201105191214-5f3e10d3ab46/go.mod h1:yyMNCyc/Ib3bDTKd379tNMpB/7/H5TjM2Y9QJ5THLbE= github.com/klauspost/asmfmt v1.3.2 h1:4Ri7ox3EwapiOjCki+hw14RyKk201CN4rzyCJRFLpK4= github.com/klauspost/asmfmt v1.3.2/go.mod h1:AG8TuvYojzulgDAMCnYn50l/5QV3Bs/tp6j0HLHbNSE= github.com/klauspost/compress v1.13.6/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk= github.com/klauspost/compress v1.15.9/go.mod h1:PhcZ0MbTNciWF3rruxRgKxI5NkcHHrHUDtV4Yw2GlzU= github.com/klauspost/compress v1.18.0 h1:c/Cqfb0r+Yi+JtIEq73FWXVkRonBlf0CRNYc8Zttxdo= github.com/klauspost/compress v1.18.0/go.mod h1:2Pp+KzxcywXVXMr50+X0Q/Lsb43OQHYWRCY2AiWywWQ= github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg= github.com/klauspost/cpuid/v2 v2.2.10 h1:tBs3QSyvjDyFTq3uoc/9xFpCuOsJQFNPiAhYdw2skhE= github.com/klauspost/cpuid/v2 v2.2.10/go.mod h1:hqwkgyIinND0mEev00jJYCxPNVRVXFQeu1XKlok6oO0= github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= github.com/kr/pretty v0.3.0/go.mod h1:640gp4NfQd8pI5XOwp5fnNeVWj67G7CFk/SaSQn7NBk= github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc= github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw= github.com/lib/pq v1.10.9 h1:YXG7RB+JIjhP29X+OtkiDnYaXQwpS4JEWq7dtCCRUEw= github.com/lib/pq v1.10.9/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= github.com/lyft/protoc-gen-star v0.6.0/go.mod h1:TGAoBVkt8w7MPG72TrKIu85MIdXwDuzJYeZuUPFPNwA= github.com/lyft/protoc-gen-star v0.6.1/go.mod h1:TGAoBVkt8w7MPG72TrKIu85MIdXwDuzJYeZuUPFPNwA= github.com/lyft/protoc-gen-star/v2 v2.0.1/go.mod h1:RcCdONR2ScXaYnQC5tUzxzlpA3WVYF7/opLeUgcQs/o= github.com/magiconair/properties v1.8.7 h1:IeQXZAiQcpL9mgcAe1Nu6cX9LLw6ExEHKjN0VQdvPDY= github.com/magiconair/properties v1.8.7/go.mod h1:Dhd985XPs7jluiymwWYZ0G4Z61jb3vdS329zhj2hYo0= github.com/marcboeker/go-duckdb v1.8.5 h1:tkYp+TANippy0DaIOP5OEfBEwbUINqiFqgwMQ44jME0= github.com/marcboeker/go-duckdb v1.8.5/go.mod h1:6mK7+WQE4P4u5AFLvVBmhFxY5fvhymFptghgJX6B+/8= github.com/mattn/go-adodb v0.0.1 h1:g/pk3V8m/WFX2IQRI58wAC24OQUFFXEiNsvs7dQ1WKg= github.com/mattn/go-adodb v0.0.1/go.mod h1:jaSTRde4bohMuQgYQPxW3xRTPtX/cZKyxPrFVseJULo= github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg= github.com/mattn/go-colorable v0.1.14 h1:9A9LHSqF/7dyVVX6g0U9cwm9pG3kP9gSzcuIPHPsaIE= github.com/mattn/go-colorable v0.1.14/go.mod h1:6LmQG8QLFO4G5z1gPvYEzlUgJ2wF+stgPZH1UqBm1s8= github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= github.com/mattn/go-isatty v0.0.17/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= github.com/mattn/go-isatty v0.0.19/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= github.com/mattn/go-runewidth v0.0.4/go.mod h1:LwmH8dsx7+W8Uxz3IHJYH5QSwggIsqBzpuz5H//U1FU= github.com/mattn/go-runewidth v0.0.13/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w= github.com/mattn/go-runewidth v0.0.16 h1:E5ScNMtiwvlvB5paMFdw9p4kSQzbXFikJ5SQO6TULQc= github.com/mattn/go-runewidth v0.0.16/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w= github.com/mattn/go-sixel v0.0.5 h1:55w2FR5ncuhKhXrM5ly1eiqMQfZsnAHIpYNGZX03Cv8= github.com/mattn/go-sixel v0.0.5/go.mod h1:h2Sss+DiUEHy0pUqcIB6PFXo5Cy8sTQEFr3a9/5ZLNw= github.com/mattn/go-sqlite3 v1.14.14/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU= github.com/mattn/go-sqlite3 v1.14.24 h1:tpSp2G2KyMnnQu99ngJ47EIkWVmliIizyZBfPrBWDRM= github.com/mattn/go-sqlite3 v1.14.24/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y= github.com/microsoft/go-mssqldb v1.8.0 h1:7cyZ/AT7ycDsEoWPIXibd+aVKFtteUNhDGf3aobP+tw= github.com/microsoft/go-mssqldb v1.8.0/go.mod h1:6znkekS3T2vp0waiMhen4GPU1BiAsrP+iXHcE7a7rFo= github.com/minio/asm2plan9s v0.0.0-20200509001527-cdd76441f9d8 h1:AMFGa4R4MiIpspGNG7Z948v4n35fFGB3RR3G/ry4FWs= github.com/minio/asm2plan9s v0.0.0-20200509001527-cdd76441f9d8/go.mod h1:mC1jAcsrzbxHt8iiaC+zU4b1ylILSosueou12R++wfY= github.com/minio/c2goasm v0.0.0-20190812172519-36a3d3bbc4f3 h1:+n/aFZefKZp7spd8DFdX7uMikMLXX4oubIzJF4kv/wI= github.com/minio/c2goasm v0.0.0-20190812172519-36a3d3bbc4f3/go.mod h1:RagcQ7I8IeTMnF8JTXieKnO4Z6JCsikNEzj0DwauVzE= github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y= github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY= github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= github.com/mithrandie/csvq v1.18.1 h1:f7NB2scbb7xx2ffPduJ2VtZ85RpWXfvanYskAkGlCBU= github.com/mithrandie/csvq v1.18.1/go.mod h1:MRJj7AtcXfk7jhNGxLuJGP3LORmh4lpiPWxQ7VyCRn8= github.com/mithrandie/csvq-driver v1.7.0 h1:ejiavXNWwTPMyr3fJFnhcqd1L1cYudA0foQy9cZrqhw= github.com/mithrandie/csvq-driver v1.7.0/go.mod h1:HcN3xL9UCJnBYA/AIQOOB/KlyfXAiYr5yxDmiwrGk5o= github.com/mithrandie/go-file/v2 v2.1.0 h1:XA5Tl+73GXMDvgwSE3Sg0uC5FkLr3hnXs8SpUas0hyg= github.com/mithrandie/go-file/v2 v2.1.0/go.mod h1:9YtTF3Xo59GqC1Pxw6KyGVcM/qubAMlxVsqI/u9r++c= github.com/mithrandie/go-text v1.6.0 h1:8gOXTMPbMY8DJbKMTv8kHhADcJlDWXqS/YQH4SyWO6s= github.com/mithrandie/go-text v1.6.0/go.mod h1:xCgj1xiNbI/d4xA9sLVvXkjh5B2tNx2ZT2/3rpmh8to= github.com/mithrandie/ternary v1.1.1 h1:k/joD6UGVYxHixYmSR8EGgDFNONBMqyD373xT4QRdC4= github.com/mithrandie/ternary v1.1.1/go.mod h1:0D9Ba3+09K2TdSZO7/bFCC0GjSXetCvYuYq0u8FY/1g= github.com/moby/docker-image-spec v1.3.1 h1:jMKff3w6PgbfSa69GfNg+zN/XLhfXJGnEx3Nl2EsFP0= github.com/moby/docker-image-spec v1.3.1/go.mod h1:eKmb5VW8vQEh/BAr2yvVNvuiJuY6UIocYsFu/DxxRpo= github.com/moby/patternmatcher v0.6.0 h1:GmP9lR19aU5GqSSFko+5pRqHi+Ohk1O69aFiKkVGiPk= github.com/moby/patternmatcher v0.6.0/go.mod h1:hDPoyOpDY7OrrMDLaYoY3hf52gNCR/YOUYxkhApJIxc= github.com/moby/sys/sequential v0.5.0 h1:OPvI35Lzn9K04PBbCLW0g4LcFAJgHsvXsRyewg5lXtc= github.com/moby/sys/sequential v0.5.0/go.mod h1:tH2cOOs5V9MlPiXcQzRC+eEyab644PWKGRYaaV5ZZlo= github.com/moby/sys/user v0.1.0 h1:WmZ93f5Ux6het5iituh9x2zAG7NFY9Aqi49jjE1PaQg= github.com/moby/sys/user v0.1.0/go.mod h1:fKJhFOnsCN6xZ5gSfbM6zaHGgDJMrqt9/reuj4T7MmU= github.com/moby/sys/userns v0.1.0 h1:tVLXkFOxVu9A64/yh59slHVv9ahO9UIev4JZusOLG/g= github.com/moby/sys/userns v0.1.0/go.mod h1:IHUYgu/kao6N8YZlp9Cf444ySSvCmDlmzUcYfDHOl28= github.com/moby/term v0.5.0 h1:xt8Q1nalod/v7BqbG21f8mQPqH+xAaC9C3N3wfWbVP0= github.com/moby/term v0.5.0/go.mod h1:8FzsFHVUBGZdbDsJw/ot+X+d5HLUbvklYLJ9uGfcI3Y= github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe/go.mod h1:wL8QJuTMNUDYhXwkmfOly8iTdp5TEcJFWZD2D7SIkUc= github.com/morikuni/aec v1.0.0 h1:nP9CBfwrvYnBRgY6qfDQkygYDmYwOilePFkwzv4dU8A= github.com/morikuni/aec v1.0.0/go.mod h1:BbKIizmSmc5MMPqRYbxO4ZU0S0+P200+tUnFx7PXmsc= github.com/mtibben/percent v0.2.1 h1:5gssi8Nqo8QU/r2pynCm+hBQHpkB/uNK7BJCFogWdzs= github.com/mtibben/percent v0.2.1/go.mod h1:KG9uO+SZkUp+VkRHsCdYQV3XSZrrSpR3O9ibNBTZrns= github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 h1:C3w9PqII01/Oq1c1nUAm88MOHcQC9l5mIlSMApZMrHA= github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ= github.com/nakagami/chacha20 v0.1.0 h1:2fbf5KeVUw7oRpAe6/A7DqvBJLYYu0ka5WstFbnkEVo= github.com/nakagami/chacha20 v0.1.0/go.mod h1:xpoujepNFA7MvYLvX5xKHzlOHimDrLI9Ll8zfOJ0l2E= github.com/nakagami/firebirdsql v0.9.14 h1:y4tK+dZtlGuvIecOsbX/8ak1381FpK9Tp/mShRtnxBk= github.com/nakagami/firebirdsql v0.9.14/go.mod h1:bZKRs3rpHAjJgXAoc9YiPobTz3R22i41Zjo+llIS2B0= github.com/nathan-fiscaletti/consolesize-go v0.0.0-20220204101620-317176b6684d h1:NqRhLdNVlozULwM1B3VaHhcXYSgrOAv8V5BE65om+1Q= github.com/nathan-fiscaletti/consolesize-go v0.0.0-20220204101620-317176b6684d/go.mod h1:cxIIfNMTwff8f/ZvRouvWYF6wOoO7nj99neWSx2q/Es= github.com/ncruces/go-strftime v0.1.9 h1:bY0MQC28UADQmHmaF5dgpLmImcShSi2kHU9XLdhx/f4= github.com/ncruces/go-strftime v0.1.9/go.mod h1:Fwc5htZGVVkseilnfgOVb9mKy6w1naJmn9CehxcKcls= github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno= github.com/oklog/ulid/v2 v2.0.2 h1:r4fFzBm+bv0wNKNh5eXTwU7i85y5x+uwkxCUTNVQqLc= github.com/oklog/ulid/v2 v2.0.2/go.mod h1:mtBL0Qe/0HAx6/a4Z30qxVIAL1eQDweXq5lxOEiwQ68= github.com/onsi/gomega v1.34.1 h1:EUMJIKUjM8sKjYbtxQI9A4z2o+rruxnzNvpknOXie6k= github.com/onsi/gomega v1.34.1/go.mod h1:kU1QgUvBDLXBJq618Xvm2LUX6rSAfRaFRTcdOeDLwwY= github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U= github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM= github.com/opencontainers/image-spec v1.1.0 h1:8SG7/vwALn54lVB/0yZ/MMwhFrPYtpEHQb2IpWsCzug= github.com/opencontainers/image-spec v1.1.0/go.mod h1:W4s4sFTMaBeK1BQLXbG4AdM2szdn85PY75RI83NrTrM= github.com/opencontainers/runc v1.1.13 h1:98S2srgG9vw0zWcDpFMn5TRrh8kLxa/5OFUstuUhmRs= github.com/opencontainers/runc v1.1.13/go.mod h1:R016aXacfp/gwQBYw2FDGa9m+n6atbLWrYY8hNMT/sA= github.com/ory/dockertest/v3 v3.11.0 h1:OiHcxKAvSDUwsEVh2BjxQQc/5EHz9n0va9awCtNGuyA= github.com/ory/dockertest/v3 v3.11.0/go.mod h1:VIPxS1gwT9NpPOrfD3rACs8Y9Z7yhzO4SB194iUDnUI= github.com/paulmach/orb v0.11.1 h1:3koVegMC4X/WeiXYz9iswopaTwMem53NzTJuTF20JzU= github.com/paulmach/orb v0.11.1/go.mod h1:5mULz1xQfs3bmQm63QEJA6lNGujuRafwA5S/EnuLaLU= github.com/paulmach/protoscan v0.2.1/go.mod h1:SpcSwydNLrxUGSDvXvO0P7g7AuhJ7lcKfDlhJCDw2gY= github.com/pelletier/go-toml/v2 v2.2.3 h1:YmeHyLY8mFWbdkNWwpr+qIL2bEqT0o95WSdkNHvL12M= github.com/pelletier/go-toml/v2 v2.2.3/go.mod h1:MfCQTFTvCcUyyvvwm1+G6H/jORL20Xlb6rzQu9GuUkc= github.com/phpdave11/gofpdf v1.4.2/go.mod h1:zpO6xFn9yxo3YLyMvW8HcKWVdbNqgIfOOp2dXMnm1mY= github.com/phpdave11/gofpdi v1.0.12/go.mod h1:vBmVV0Do6hSBHC8uKUQ71JGW+ZGQq74llk/7bXwjDoI= github.com/phpdave11/gofpdi v1.0.13/go.mod h1:vBmVV0Do6hSBHC8uKUQ71JGW+ZGQq74llk/7bXwjDoI= github.com/pierrec/lz4/v4 v4.1.15/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4= github.com/pierrec/lz4/v4 v4.1.22 h1:cKFw6uJDK+/gfw5BcDL0JL5aBsAFdsIT18eRtLj7VIU= github.com/pierrec/lz4/v4 v4.1.22/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4= github.com/pingcap/errors v0.11.4 h1:lFuQV/oaUMGcD2tqt+01ROSmJs75VG1ToEOkZIZ4nE4= github.com/pingcap/errors v0.11.4/go.mod h1:Oi8TUi2kEtXXLMJk9l1cGmz20kV3TaQ0usTwv5KuLY8= github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c h1:+mdjkGKdHQG3305AYmdv1U2eRNDiU2ErMBj1gwrq8eQ= github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c/go.mod h1:7rwL4CYBLnjLxUqIJNnCWiEdr3bn6IUYi15bNlnbCCU= github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA= github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/profile v1.6.0/go.mod h1:qBsxPvzyUincmltOk6iyRVxHYg4adc0OFOv72ZdLa18= github.com/pkg/sftp v1.10.1/go.mod h1:lYOWFsE0bwd1+KfKJaKeuokY15vzFx25BLbzYYoAxZI= github.com/pkg/sftp v1.13.1/go.mod h1:3HaPG6Dq1ILlpPZRO0HVMrsydcdLt6HRDccSgb87qRg= github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10 h1:GFCKgmp0tecUJ0sJuv4pzYCqS9+RGSn52M3FUwPs+uo= github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10/go.mod h1:t/avpk3KcrXxUnYOhZhMXJlSEyie6gQbtLq5NM3loB8= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U= github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/prestodb/presto-go-client v0.0.0-20240426182841-905ac40a1783 h1:1/uuAh1vatqywFmudA7PHVUc/Iu5W4iFft1r7MVubf8= github.com/prestodb/presto-go-client v0.0.0-20240426182841-905ac40a1783/go.mod h1:9mH1KvIoMeUe/OIs6WCJGvrR15FvC0y+SSMkIQQkF3M= github.com/prometheus/client_golang v1.21.0 h1:DIsaGmiaBkSangBgMtWdNfxbMNdku5IK6iNhrEqWvdA= github.com/prometheus/client_golang v1.21.0/go.mod h1:U9NM32ykUErtVBxdvD3zfi+EuFkkaBvMb09mIfe0Zgg= github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= github.com/prometheus/client_model v0.2.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= github.com/prometheus/client_model v0.3.0/go.mod h1:LDGWKZIo7rky3hgvBe+caln+Dr3dPggB5dvjtD7w9+w= github.com/prometheus/client_model v0.6.1 h1:ZKSh/rekM+n3CeS952MLRAdFwIKqeY8b62p8ais2e9E= github.com/prometheus/client_model v0.6.1/go.mod h1:OrxVMOVHjw3lKMa8+x6HeMGkHMQyHDk9E3jmP2AmGiY= github.com/prometheus/common v0.62.0 h1:xasJaQlnWAeyHdUBeGjXmutelfJHWMRr+Fg4QszZ2Io= github.com/prometheus/common v0.62.0/go.mod h1:vyBcEuLSvWos9B1+CyL7JZ2up+uFzXhkqml0W5zIY1I= github.com/prometheus/procfs v0.0.0-20190425082905-87a4384529e0/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA= github.com/prometheus/procfs v0.15.1 h1:YagwOFzUgYfKKHX6Dr+sHT7km/hxC76UB0learggepc= github.com/prometheus/procfs v0.15.1/go.mod h1:fB45yRUv8NstnjriLhBQLuOUt+WW4BsoGhij/e3PBqk= github.com/proullon/ramsql v0.1.4 h1:yTFRTn46gFH/kPbzCx+mGjuFlyTBUeDr3h2ldwxddl0= github.com/proullon/ramsql v0.1.4/go.mod h1:CFGqeQHQpdRfWqYmWD3yXqPTEaHkF4zgXy1C6qDWc9E= github.com/redis/go-redis/v9 v9.7.0 h1:HhLSs+B6O021gwzl+locl0zEDnyNkxMtf/Z3NNBMa9E= github.com/redis/go-redis/v9 v9.7.0/go.mod h1:f6zhXITC7JUJIlPEiBOTXxJgPLdZcA93GewI7inzyWw= github.com/rekby/fixenv v0.6.1 h1:jUFiSPpajT4WY2cYuc++7Y1zWrnCxnovGCIX72PZniM= github.com/rekby/fixenv v0.6.1/go.mod h1:/b5LRc06BYJtslRtHKxsPWFT/ySpHV+rWvzTg+XWk4c= github.com/remyoudompheng/bigfft v0.0.0-20200410134404-eec4a21b6bb0/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo= github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec h1:W09IVJc94icq4NjY3clb7Lk8O1qJ8BdBEF8z0ibU0rE= github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo= github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc= github.com/rivo/uniseg v0.4.7 h1:WUdvkW8uEhrYfLC4ZzdpI2ztxP1I582+49Oc5Mq64VQ= github.com/rivo/uniseg v0.4.7/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88= github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ= github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc= github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs= github.com/rogpeppe/go-internal v1.11.0/go.mod h1:ddIwULY96R17DhadqLgMfk9H9tvdUzkipdSkR5nkCZA= github.com/rogpeppe/go-internal v1.14.1 h1:UQB4HGPB6osV0SQTLymcB4TgvyWu6ZyliaW0tI/otEQ= github.com/rogpeppe/go-internal v1.14.1/go.mod h1:MaRKkUm5W0goXpeCfT7UZI6fk/L7L7so1lCWt35ZSgc= github.com/rs/xid v1.5.0/go.mod h1:trrq9SKmegXys3aeAKXMUTdJsYXVwGY3RLcfgqegfbg= github.com/rs/zerolog v1.33.0 h1:1cU2KZkvPxNyfgEmhHAz/1A9Bz+llsdYzklWFzgp0r8= github.com/rs/zerolog v1.33.0/go.mod h1:/7mN4D5sKwJLZQ2b/znpjC3/GQWY/xaDXUM0kKWRHss= github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/ruudk/golang-pdf417 v0.0.0-20181029194003-1af4ab5afa58/go.mod h1:6lfFZQK844Gfx8o5WFuvpxWRwnSoipWe/p622j1v06w= github.com/ruudk/golang-pdf417 v0.0.0-20201230142125-a7e3863a1245/go.mod h1:pQAZKsJ8yyVxGRWYNEm9oFB8ieLgKFnamEyDmSA0BRk= github.com/sagikazarmark/locafero v0.6.0 h1:ON7AQg37yzcRPU69mt7gwhFEBwxI6P9T4Qu3N51bwOk= github.com/sagikazarmark/locafero v0.6.0/go.mod h1:77OmuIc6VTraTXKXIs/uvUxKGUXjE1GbemJYHqdNjX0= github.com/sagikazarmark/slog-shim v0.1.0 h1:diDBnUNK9N/354PgrxMywXnAwEr1QZcOr6gto+ugjYE= github.com/sagikazarmark/slog-shim v0.1.0/go.mod h1:SrcSrq8aKtyuqEI1uvTDTK1arOWRIczQRv+GVI1AkeQ= github.com/satori/go.uuid v1.2.0/go.mod h1:dA0hQrYB0VpLJoorglMZABFdXlWrHn1NEOzdhQKdks0= github.com/segmentio/asm v1.2.0 h1:9BQrFxC+YOHJlTlHGkTrFWf59nbL3XnCoFLTwDCI7ys= github.com/segmentio/asm v1.2.0/go.mod h1:BqMnlJP91P8d+4ibuonYZw9mfnzI9HfxselHZr5aAcs= github.com/shopspring/decimal v1.4.0 h1:bxl37RwXBklmTi0C79JfXCEBD1cqqHt0bbgBAGFp81k= github.com/shopspring/decimal v1.4.0/go.mod h1:gawqmDU56v4yIKSwfBSFip1HdCCXN8/+DMd9qYNcwME= github.com/sijms/go-ora/v2 v2.8.24 h1:TODRWjWGwJ1VlBOhbTLat+diTYe8HXq2soJeB+HMjnw= github.com/sijms/go-ora/v2 v2.8.24/go.mod h1:QgFInVi3ZWyqAiJwzBQA+nbKYKH77tdp1PYoCqhR2dU= github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= github.com/sirupsen/logrus v1.9.0/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ= github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc= github.com/smartystreets/goconvey v1.6.4/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA= github.com/snowflakedb/gosnowflake v1.13.0 h1:NQoy4mnHUmBuruJhzAGVRO9YLpFxayYTCLf+dxvG7bk= github.com/snowflakedb/gosnowflake v1.13.0/go.mod h1:nwiPNHaS3EGxnW1rr10ascVYFLA4EKrqMX2TxPt0+N4= github.com/soniakeys/quant v1.0.0 h1:N1um9ktjbkZVcywBVAAYpZYSHxEfJGzshHCxx/DaI0Y= github.com/soniakeys/quant v1.0.0/go.mod h1:HI1k023QuVbD4H8i9YdfZP2munIHU4QpjsImz6Y6zds= github.com/sourcegraph/conc v0.3.0 h1:OQTbbt6P72L20UqAkXXuLOj79LfEanQ+YQFNpLA9ySo= github.com/sourcegraph/conc v0.3.0/go.mod h1:Sdozi7LEKbFPqYX2/J+iBAM6HpqSLTASQIKqDmF7Mt0= github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA= github.com/spaolacci/murmur3 v1.1.0 h1:7c1g84S4BPRrfL5Xrdp6fOJ206sU9y293DDHaoy0bLI= github.com/spaolacci/murmur3 v1.1.0/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA= github.com/spf13/afero v1.3.3/go.mod h1:5KUK8ByomD5Ti5Artl0RtHeI5pTF7MIDuXL3yY520V4= github.com/spf13/afero v1.6.0/go.mod h1:Ai8FlHk4v/PARR026UzYexafAt9roJ7LcLMAmO6Z93I= github.com/spf13/afero v1.9.2/go.mod h1:iUV7ddyEEZPO5gA3zD4fJt6iStLlL+Lg4m2cihcDf8Y= github.com/spf13/afero v1.11.0 h1:WJQKhtpdm3v2IzqG8VMqrr6Rf3UYpEF239Jy9wNepM8= github.com/spf13/afero v1.11.0/go.mod h1:GH9Y3pIexgf1MTIWtNGyogA5MwRIDXGUr+hbWNoBjkY= github.com/spf13/cast v1.7.0 h1:ntdiHjuueXFgm5nzDRdOS4yfT43P5Fnud6DH50rz/7w= github.com/spf13/cast v1.7.0/go.mod h1:ancEpBxwJDODSW/UG4rDrAqiKolqNNh2DX3mk86cAdo= github.com/spf13/cobra v1.9.1 h1:CXSaggrXdbHK9CF+8ywj8Amf7PBRmPCOJugH954Nnlo= github.com/spf13/cobra v1.9.1/go.mod h1:nDyEzZ8ogv936Cinf6g1RU9MRY64Ir93oCnqb9wxYW0= github.com/spf13/pflag v1.0.6 h1:jFzHGLGAlb3ruxLB8MhbI6A8+AQX/2eW4qeyNZXNp2o= github.com/spf13/pflag v1.0.6/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= github.com/spf13/viper v1.19.0 h1:RWq5SEjt8o25SROyN3z2OrDB9l7RPd3lwTWU8EcEdcI= github.com/spf13/viper v1.19.0/go.mod h1:GQUN9bilAbhU/jgc1bKs99f/suXKeUMct8Adx5+Ntkg= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY= github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA= github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= github.com/stretchr/testify v1.8.3/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA= github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= github.com/subosito/gotenv v1.6.0 h1:9NlTDc1FTs4qu0DDq7AEtTPNw6SVm7uBMsUCUjABIf8= github.com/subosito/gotenv v1.6.0/go.mod h1:Dk4QP5c2W3ibzajGcXpNraDfq2IrhjMIvMSWPKKo0FU= github.com/test-go/testify v1.1.4 h1:Tf9lntrKUMHiXQ07qBScBTSA0dhYQlu83hswqelv1iE= github.com/test-go/testify v1.1.4/go.mod h1:rH7cfJo/47vWGdi4GPj16x3/t1xGOj2YxzmNQzk2ghU= github.com/thda/tds v0.1.7 h1:s29kbnJK0agL3ps85A/sb9XS2uxgKF5UJ6AZjbyqXX4= github.com/thda/tds v0.1.7/go.mod h1:isLIF1oZdXfkqVMJM8RyNrsjlHPlTKnPlnsBs7ngZcM= github.com/tidwall/pretty v1.0.0/go.mod h1:XNkn88O1ChpSDQmQeStsy+sBenx6DDtFZJxhVysOjyk= github.com/trinodb/trino-go-client v0.321.0 h1:ViwiBxLNlJARWLCH4Q6MOjWFu/WrsznOM7QzRG/kRlY= github.com/trinodb/trino-go-client v0.321.0/go.mod h1:F+7TZRD0+0M8XqYsgXT8+EJT1pSlbxTECVD1BDzCc70= github.com/twmb/murmur3 v1.1.8 h1:8Yt9taO/WN3l08xErzjeschgZU2QSrwm1kclYq+0aRg= github.com/twmb/murmur3 v1.1.8/go.mod h1:Qq/R7NUyOfr65zD+6Q5IHKsJLwP7exErjN6lyyq3OSQ= github.com/uber-go/tally v3.3.17+incompatible/go.mod h1:YDTIBxdXyOU/sCWilKB4bgyufu1cEi0jdVnRdxvjnmU= github.com/uber-go/tally v3.5.10+incompatible h1:PDkAMvnVYOdJtvyCZQURmHtOakCiWAWFVKejrrvOMBo= github.com/uber-go/tally v3.5.10+incompatible/go.mod h1:YDTIBxdXyOU/sCWilKB4bgyufu1cEi0jdVnRdxvjnmU= github.com/uber/athenadriver v1.1.15 h1:z/hivAcXmGgUCVoXgVvwwIzc4auTeF3TCmwyFTtd8NE= github.com/uber/athenadriver v1.1.15/go.mod h1:RnKD7+9Aup8iuFfhK+I26U+z137IXWeoLaEZDepd0Eg= github.com/vertica/vertica-sql-go v1.3.3 h1:fL+FKEAEy5ONmsvya2WH5T8bhkvY27y/Ik3ReR2T+Qw= github.com/vertica/vertica-sql-go v1.3.3/go.mod h1:jnn2GFuv+O2Jcjktb7zyc4Utlbu9YVqpHH/lx63+1M4= github.com/xdg-go/pbkdf2 v1.0.0/go.mod h1:jrpuAogTd400dnrH08LKmI/xc1MbPOebTwRqcT5RDeI= github.com/xdg-go/scram v1.1.1/go.mod h1:RaEWvsqvNKKvBPvcKeFjrG2cJqOkHTiyTpzz23ni57g= github.com/xdg-go/stringprep v1.0.3/go.mod h1:W3f5j4i+9rC0kuIEJL0ky1VpHXQU3ocBgklLGvcBnW8= github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU= github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb h1:zGWFAtiMcyryUHoUjUJX0/lt1H2+i2Ka2n+D3DImSNo= github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU= github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 h1:EzJWgHovont7NscjpAxXsDA8S8BMYve8Y5+7cuRE7R0= github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415/go.mod h1:GwrjFmJcFw6At/Gs6z4yjiIwzuJ1/+UwLxMQDVQXShQ= github.com/xeipuuv/gojsonschema v1.2.0 h1:LhYJRs+L4fBtjZUfuSZIKGeVu0QRy8e5Xi7D17UxZ74= github.com/xeipuuv/gojsonschema v1.2.0/go.mod h1:anYRn/JVcOK2ZgGU+IjEV4nwlhoK5sQluxsYJ78Id3Y= github.com/xo/dburl v0.23.3 h1:s9tUyKAkcgRfNQ7ut5gaDWC9s5ROafY3hmNOrGbNXtE= github.com/xo/dburl v0.23.3/go.mod h1:uazlaAQxj4gkshhfuuYyvwCBouOmNnG2aDxTCFZpmL4= github.com/xo/tblfmt v0.0.0-20190609041254-28c54ec42ce8/go.mod h1:3U5kKQdIhwACye7ml3acccHmjGExY9WmUGU7rnDWgv0= github.com/xo/tblfmt v0.15.0 h1:yZjoeEP6uELgCe1iO+Dq/rswqkWY2rWu5oeAZ9FkKyE= github.com/xo/tblfmt v0.15.0/go.mod h1:RVeyRDz6wHQESwSUG2hLDcEhilUapBW8H3jYQXib8U8= github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e h1:JVG44RsyaB9T2KIHavMF/ppJZNG9ZpyihvCd0w101no= github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e/go.mod h1:RbqR21r5mrJuqunuUZ/Dhy/avygyECGrLceyNeo4LiM= github.com/xwb1989/sqlparser v0.0.0-20180606152119-120387863bf2 h1:zzrxE1FKn5ryBNl9eKOeqQ58Y/Qpo3Q9QNxKHX5uzzQ= github.com/xwb1989/sqlparser v0.0.0-20180606152119-120387863bf2/go.mod h1:hzfGeIUDq/j97IG+FhNqkowIyEcD88LrW6fyU3K3WqY= github.com/xyproto/randomstring v1.0.5 h1:YtlWPoRdgMu3NZtP45drfy1GKoojuR7hmRcnhZqKjWU= github.com/xyproto/randomstring v1.0.5/go.mod h1:rgmS5DeNXLivK7YprL0pY+lTuhNQW3iGxZ18UQApw/E= github.com/ydb-platform/ydb-go-genproto v0.0.0-20241112172322-ea1f63298f77 h1:LY6cI8cP4B9rrpTleZk95+08kl2gF4rixG7+V/dwL6Q= github.com/ydb-platform/ydb-go-genproto v0.0.0-20241112172322-ea1f63298f77/go.mod h1:Er+FePu1dNUieD+XTMDduGpQuCPssK5Q4BjF+IIXJ3I= github.com/ydb-platform/ydb-go-sdk/v3 v3.100.2 h1:huEi8ihTfSroMdsrXxt+J/jUxUtaUdMX04xEa2nDa0A= github.com/ydb-platform/ydb-go-sdk/v3 v3.100.2/go.mod h1:knXehPLqrF/uBrYY0EbDtAMR+Ve8sAwIm/pNsfvbs7E= github.com/yookoala/realpath v1.0.0 h1:7OA9pj4FZd+oZDsyvXWQvjn5oBdcHRTV44PpdMSuImQ= github.com/yookoala/realpath v1.0.0/go.mod h1:gJJMA9wuX7AcqLy1+ffPatSCySA1FQ2S8Ya9AIoYBpE= github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d/go.mod h1:rHwXgn7JulP+udvsHwJoVG1YGAP6VLg4y9I5dyZdqmA= github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= github.com/yuin/goldmark v1.4.1/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= github.com/zeebo/assert v1.3.0 h1:g7C04CbJuIDKNPFHmsk4hwZDO5O+kntRxzaUoNXj+IQ= github.com/zeebo/assert v1.3.0/go.mod h1:Pq9JiuJQpG8JLJdtkwrJESF0Foym2/D9XMU5ciN/wJ0= github.com/zeebo/xxh3 v1.0.2 h1:xZmwmqxHZA8AI603jOQ0tMqmBr9lPeFwGg6d+xy9DC0= github.com/zeebo/xxh3 v1.0.2/go.mod h1:5NWz9Sef7zIDm2JHfFlcQvNekmcEl9ekUZQQKCYaDcA= github.com/ziutek/mymysql v1.5.4 h1:GB0qdRGsTwQSBVYuVShFBKaXSnSnYYC2d9knnE1LHFs= github.com/ziutek/mymysql v1.5.4/go.mod h1:LMSpPZ6DbqWFxNCHW77HeMg9I646SAhApZ/wKdgO/C0= github.com/ziutek/telnet v0.0.0-20180329124119-c3b780dc415b/go.mod h1:IZpXDfkJ6tWD3PhBK5YzgQT+xJWh7OsdwiG8hA2MkO4= gitlab.com/nyarla/go-crypt v0.0.0-20160106005555-d9a5dc2b789b h1:7gd+rd8P3bqcn/96gOZa3F5dpJr/vEiDQYlNb/y2uNs= gitlab.com/nyarla/go-crypt v0.0.0-20160106005555-d9a5dc2b789b/go.mod h1:T3BPAOm2cqquPa0MKWeNkmOM5RQsRhkrwMWonFMN7fE= go.mongodb.org/mongo-driver v1.11.4/go.mod h1:PTSz5yu21bkT/wXpkS7WR5f0ddqw5quethTUn9WM+2g= go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= go.opencensus.io v0.22.5/go.mod h1:5pWMHQbX5EPX2/62yrJeAkowc+lfs/XD7Uxpq3pI6kk= go.opencensus.io v0.23.0/go.mod h1:XItmlyltB5F7CS4xOC1DcqMoFqwtC6OG2xF7mCv7P7E= go.opencensus.io v0.24.0 h1:y73uSU6J157QMP2kn2r30vwW1A2W2WFwSCGnAVxeaD0= go.opencensus.io v0.24.0/go.mod h1:vNK8G9p7aAivkbmorf4v+7Hgx+Zs0yY+0fOtgBfjQKo= go.opentelemetry.io/auto/sdk v1.1.0 h1:cH53jehLUN6UFLY71z+NDOiNJqDdPRaXzTel0sJySYA= go.opentelemetry.io/auto/sdk v1.1.0/go.mod h1:3wSPjt5PWp2RhlCcmmOial7AvC4DQqZb7a7wCow3W8A= go.opentelemetry.io/contrib/detectors/gcp v1.34.0 h1:JRxssobiPg23otYU5SbWtQC//snGVIM3Tx6QRzlQBao= go.opentelemetry.io/contrib/detectors/gcp v1.34.0/go.mod h1:cV4BMFcscUR/ckqLkbfQmF0PRsq8w/lMGzdbCSveBHo= go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.59.0 h1:rgMkmiGfix9vFJDcDi1PK8WEQP4FLQwLDfhp5ZLpFeE= go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.59.0/go.mod h1:ijPqXp5P6IRRByFVVg9DY8P5HkxkHE5ARIa+86aXPf4= go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.59.0 h1:CV7UdSGJt/Ao6Gp4CXckLxVRRsRgDHoI8XjbL3PDl8s= go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.59.0/go.mod h1:FRmFuRJfag1IZ2dPkHnEoSFVgTVPUd2qf5Vi69hLb8I= go.opentelemetry.io/otel v1.34.0 h1:zRLXxLCgL1WyKsPVrgbSdMN4c0FMkDAskSTQP+0hdUY= go.opentelemetry.io/otel v1.34.0/go.mod h1:OWFPOQ+h4G8xpyjgqo4SxJYdDQ/qmRH+wivy7zzx9oI= go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.19.0 h1:Mne5On7VWdx7omSrSSZvM4Kw7cS7NQkOOmLcgscI51U= go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.19.0/go.mod h1:IPtUMKL4O3tH5y+iXVyAXqpAwMuzC1IrxVS81rummfE= go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.19.0 h1:IeMeyr1aBvBiPVYihXIaeIZba6b8E1bYp7lbdxK8CQg= go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.19.0/go.mod h1:oVdCUtjq9MK9BlS7TtucsQwUcXcymNiEDjgDD2jMtZU= go.opentelemetry.io/otel/metric v1.34.0 h1:+eTR3U0MyfWjRDhmFMxe2SsW64QrZ84AOhvqS7Y+PoQ= go.opentelemetry.io/otel/metric v1.34.0/go.mod h1:CEDrp0fy2D0MvkXE+dPV7cMi8tWZwX3dmaIhwPOaqHE= go.opentelemetry.io/otel/sdk v1.34.0 h1:95zS4k/2GOy069d321O8jWgYsW3MzVV+KuSPKp7Wr1A= go.opentelemetry.io/otel/sdk v1.34.0/go.mod h1:0e/pNiaMAqaykJGKbi+tSjWfNNHMTxoC9qANsCzbyxU= go.opentelemetry.io/otel/sdk/metric v1.34.0 h1:5CeK9ujjbFVL5c1PhLuStg1wxA7vQv7ce1EK0Gyvahk= go.opentelemetry.io/otel/sdk/metric v1.34.0/go.mod h1:jQ/r8Ze28zRKoNRdkjCZxfs6YvBTG1+YIqyFVFYec5w= go.opentelemetry.io/otel/trace v1.34.0 h1:+ouXS2V8Rd4hp4580a8q23bg0azF2nI8cqLYnC8mh/k= go.opentelemetry.io/otel/trace v1.34.0/go.mod h1:Svm7lSjQD7kG7KJ/MUHPVXSDGz2OX4h0M2jHBhmSfRE= go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI= go.opentelemetry.io/proto/otlp v0.15.0/go.mod h1:H7XAot3MsfNsj7EXtrA2q5xSNQ10UqI405h3+duxN4U= go.opentelemetry.io/proto/otlp v0.19.0/go.mod h1:H7XAot3MsfNsj7EXtrA2q5xSNQ10UqI405h3+duxN4U= go.opentelemetry.io/proto/otlp v1.0.0 h1:T0TX0tmXU8a3CbNXzEKGeU5mIVOdf0oykP+u2lIVU/I= go.opentelemetry.io/proto/otlp v1.0.0/go.mod h1:Sy6pihPLfYHkr3NkUbEhGHFhINUSI/v80hjKIs5JXpM= go.uber.org/atomic v1.5.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ= go.uber.org/atomic v1.6.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ= go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc= go.uber.org/atomic v1.9.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc= go.uber.org/atomic v1.11.0 h1:ZvwS0R+56ePWxUNi+Atn9dWONBPp/AUETXlHW0DxSjE= go.uber.org/atomic v1.11.0/go.mod h1:LUxbIzbOniOlMKjJjyPfpl4v+PKK2cNJn91OQbhoJI0= go.uber.org/config v1.4.0/go.mod h1:aCyrMHmUAc/s2h9sv1koP84M9ZF/4K+g2oleyESO/Ig= go.uber.org/dig v1.9.0/go.mod h1:X34SnWGr8Fyla9zQNO2GSO2D+TIuqB14OS8JhYocIyw= go.uber.org/fx v1.12.0/go.mod h1:egT3Kyg1JFYQkvKLZ3EsykxkNrZxgXS+gKoKo7abERY= go.uber.org/goleak v0.10.0/go.mod h1:VCZuO8V8mFPlL0F5J5GK1rtHV3DrFcQ1R8ryq7FK0aI= go.uber.org/goleak v1.1.10/go.mod h1:8a7PlsEVH3e/a/GLqe5IIrQx6GzcnRmZEufDUTk4A7A= go.uber.org/goleak v1.1.11-0.20210813005559-691160354723/go.mod h1:cwTWslyiVhfpKIDGSZEM2HlOvcqm+tG4zioyIeLoqMQ= go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto= go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE= go.uber.org/mock v0.4.0 h1:VcM4ZOtdbR4f6VXfiOpwpVJDL6lCReaZ6mw31wqh7KU= go.uber.org/mock v0.4.0/go.mod h1:a6FSlNadKUHUa9IP5Vyt1zh4fC7uAwxMutEAscFbkZc= go.uber.org/multierr v1.4.0/go.mod h1:VgVr7evmIr6uPjLBxg28wmKNXyqE9akIJ5XnfpiKl+4= go.uber.org/multierr v1.5.0/go.mod h1:FeouvMocqHpRaaGuG9EjoKcStLC43Zu/fmqdUMPcKYU= go.uber.org/multierr v1.6.0/go.mod h1:cdWPpRnG4AhwMwsgIHip0KRBQjJy5kYEpYjJxpXp9iU= go.uber.org/multierr v1.11.0 h1:blXXJkSxSSfBVBlC76pxqeO+LN3aDfLQo+309xJstO0= go.uber.org/multierr v1.11.0/go.mod h1:20+QtiLqy0Nd6FdQB9TLXag12DsQkrbs3htMFfDN80Y= go.uber.org/tools v0.0.0-20190618225709-2cfd321de3ee/go.mod h1:vJERXedbb3MVM5f9Ejo0C68/HhF8uaILCdgjnY+goOA= go.uber.org/zap v1.15.0/go.mod h1:Mb2vm2krFEG5DV0W9qcHBYFtp/Wku1cvYaqPsS/WYfc= go.uber.org/zap v1.19.0/go.mod h1:xg/QME4nWcxGxrpdeYfq7UvYrLh66cuVKdrbD1XF/NI= go.uber.org/zap v1.19.1/go.mod h1:j3DNczoxDZroyBnOT1L/Q79cfUMGZxlv/9dzN7SM1rI= go.uber.org/zap v1.27.0 h1:aJMhYGrd5QSmlpLMr2MftRKl7t8J8PTZPA732ud/XR8= go.uber.org/zap v1.27.0/go.mod h1:GB2qFLM7cTU87MWRP2mPIjqfIDnGu+VIO4V/SdhGo2E= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20190701094942-4def268fd1a4/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20190820162420-60c769a6c586/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20200728195943-123391ffb6de/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20210421170649-83a5a9bb288b/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.0.0-20211108221036-ceb1ce70b4fa/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= golang.org/x/crypto v0.1.0/go.mod h1:RecgLatLF4+eUMCP1PoPZQb+cVrJcOPbHkTkbkB9sbw= golang.org/x/crypto v0.6.0/go.mod h1:OFC/31mSvZgRz0V1QTNCzfAI1aIRzbiufJtkMIlEp58= golang.org/x/crypto v0.11.0/go.mod h1:xgJhtzW8F9jGdVFWZESrid1U1bjeNy4zgy5cRr/CIio= golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliYc= golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU= golang.org/x/crypto v0.21.0/go.mod h1:0BP7YvVV9gBbVKyeTG0Gyn+gZm94bibOW5BjDEYAOMs= golang.org/x/crypto v0.35.0 h1:b15kiHdrGCHrP6LvwaQ3c03kgNhhiMgvlhxHQhmg2Xs= golang.org/x/crypto v0.35.0/go.mod h1:dy7dXNW32cAb/6/PRuTNsix8T+vJAqvuIy5Bli/x0YQ= golang.org/x/exp v0.0.0-20180321215751-8460e604b9de/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20180807140117-3d87b88a115f/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190125153040-c74c464bbbf2/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= golang.org/x/exp v0.0.0-20190829153037-c13cbed26979/go.mod h1:86+5VVa7VpoJ4kLfm080zCjGlMRFzhUhsZKEZO7MGek= golang.org/x/exp v0.0.0-20191002040644-a1355ae1e2c3/go.mod h1:NOZ3BPKG0ec/BKJQgnvsSFpcKLM5xXVWnvZS97DWHgE= golang.org/x/exp v0.0.0-20191030013958-a1ab85dbe136/go.mod h1:JXzH8nQsPlswgeRAPE3MuO9GYsAcnJvJ4vnMwN/5qkY= golang.org/x/exp v0.0.0-20191129062945-2f5052295587/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM= golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU= golang.org/x/exp v0.0.0-20220827204233-334a2380cb91/go.mod h1:cyybsKvd6eL0RnXn6p/Grxp8F5bW7iYuBgsNCOHpMYE= golang.org/x/exp v0.0.0-20250228200357-dead58393ab7 h1:aWwlzYV971S4BXRS9AmqwDLAD85ouC6X+pocatKY58c= golang.org/x/exp v0.0.0-20250228200357-dead58393ab7/go.mod h1:BHOTPb3L19zxehTsLoJXVaTktb06DFgmdW6Wb9s8jqk= golang.org/x/image v0.0.0-20180708004352-c73c2afc3b81/go.mod h1:ux5Hcp/YLpHSI86hEcLt0YII63i6oz57MZXIpbrjZUs= golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= golang.org/x/image v0.0.0-20190910094157-69e4b8554b2a/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= golang.org/x/image v0.0.0-20200119044424-58c23975cae1/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= golang.org/x/image v0.0.0-20200430140353-33d19683fad8/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= golang.org/x/image v0.0.0-20200618115811-c13761719519/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= golang.org/x/image v0.0.0-20201208152932-35266b937fa6/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= golang.org/x/image v0.0.0-20210216034530-4410531fe030/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= golang.org/x/image v0.0.0-20210607152325-775e3b0c77b9/go.mod h1:023OzeP/+EPmXeapQh35lcL3II3LrY8Ic+EFFKVhULM= golang.org/x/image v0.0.0-20210628002857-a66eb6448b8d/go.mod h1:023OzeP/+EPmXeapQh35lcL3II3LrY8Ic+EFFKVhULM= golang.org/x/image v0.0.0-20211028202545-6944b10bf410/go.mod h1:023OzeP/+EPmXeapQh35lcL3II3LrY8Ic+EFFKVhULM= golang.org/x/image v0.0.0-20220302094943-723b81ca9867/go.mod h1:023OzeP/+EPmXeapQh35lcL3II3LrY8Ic+EFFKVhULM= golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= golang.org/x/lint v0.0.0-20190409202823-959b441ac422/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= golang.org/x/lint v0.0.0-20190909230951-414d861bb4ac/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= golang.org/x/lint v0.0.0-20191125180803-fdd1cda4f05f/go.mod h1:5qLYkcX4OjUUV8bRuDixDT3tpyyb+LUpUlRWLxfhWrs= golang.org/x/lint v0.0.0-20200130185559-910be7a94367/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= golang.org/x/lint v0.0.0-20200302205851-738671d3881b/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= golang.org/x/lint v0.0.0-20201208152925-83fdc39ff7b5/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= golang.org/x/lint v0.0.0-20210508222113-6edffad5e616/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE= golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o= golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc= golang.org/x/mod v0.1.0/go.mod h1:0QHyrYULN0/3qlju5TqG8bIK38QM8yzMo5ekMj3DlcY= golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= golang.org/x/mod v0.1.1-0.20191107180719-034126e5016b/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.5.0/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro= golang.org/x/mod v0.5.1/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= golang.org/x/mod v0.6.0/go.mod h1:4mET923SAdbXp2ki8ey+zGs1SLqsuM2Y0uvdZR/fUNI= golang.org/x/mod v0.7.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/mod v0.9.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/mod v0.15.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= golang.org/x/mod v0.16.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= golang.org/x/mod v0.23.0 h1:Zb7khfcRGKk+kqfxFaP5tZqCnDZMjC5VtUBs87Hr6QM= golang.org/x/mod v0.23.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190501004415-9ce7a6920f09/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20190628185345-da137c7871d7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20190724013045-ca1201d0de80/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20191209160850-c0dbc17a3553/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20200114155413-6afb5195e5aa/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20200202094626-16171245cfb2/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20200222125558-5a598a2470a0/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20200301022130-244492dfa37a/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20200324143707-d3edc9973b7e/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= golang.org/x/net v0.0.0-20200501053045-e0ff5e5a1de5/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= golang.org/x/net v0.0.0-20200506145744-7e3656a0809f/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= golang.org/x/net v0.0.0-20200513185701-a91f0712d120/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= golang.org/x/net v0.0.0-20200520182314-0ba52f642ac2/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= golang.org/x/net v0.0.0-20200707034311-ab3426394381/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= golang.org/x/net v0.0.0-20201031054903-ff519b6c9102/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= golang.org/x/net v0.0.0-20201209123823-ac852fbbde11/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20201224014010-6772e930b67b/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210119194325-5f4716e94777/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210316092652-d523dce5a7f4/go.mod h1:RBQZq4jEuRlivfhVLdyRGr576XBO4/greRjx4P4O3yc= golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= golang.org/x/net v0.0.0-20210503060351-7fd8e65b6420/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20210813160813-60bc85c4be6d/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20211015210444-4f30a5c0130f/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20220127200216-cd36cc0744dd/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= golang.org/x/net v0.0.0-20220225172249-27dd8689420f/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= golang.org/x/net v0.0.0-20220325170049-de3da57026de/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= golang.org/x/net v0.0.0-20220412020605-290c469a71a5/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= golang.org/x/net v0.0.0-20220425223048-2871e0cb64e4/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= golang.org/x/net v0.0.0-20220607020251-c690dde0001d/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= golang.org/x/net v0.0.0-20220617184016-355a448f1bc9/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= golang.org/x/net v0.0.0-20220624214902-1bab6f366d9e/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= golang.org/x/net v0.0.0-20220909164309-bea034e7d591/go.mod h1:YDH+HFinaLZZlnHAfSS6ZXJJ9M9t4Dl22yv3iI2vPwk= golang.org/x/net v0.0.0-20221012135044-0b7e1fb9d458/go.mod h1:YDH+HFinaLZZlnHAfSS6ZXJJ9M9t4Dl22yv3iI2vPwk= golang.org/x/net v0.0.0-20221014081412-f15817d10f9b/go.mod h1:YDH+HFinaLZZlnHAfSS6ZXJJ9M9t4Dl22yv3iI2vPwk= golang.org/x/net v0.1.0/go.mod h1:Cx3nUiGt4eDBEyega/BKRp+/AlGL8hYe7U9odMt2Cco= golang.org/x/net v0.2.0/go.mod h1:KqCZLdyyvdV855qA2rE3GC2aiw5xGR5TEjj8smXukLY= golang.org/x/net v0.4.0/go.mod h1:MBQ8lrhLObU/6UmLb4fmbmk5OcyYmqtbGd/9yIeKjEE= golang.org/x/net v0.5.0/go.mod h1:DivGGAXEgPSlEBzxGzZI+ZLohi+xUj054jfeKui00ws= golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= golang.org/x/net v0.7.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= golang.org/x/net v0.8.0/go.mod h1:QVkue5JL9kW//ek3r6jTKnTFis1tRmNAW2P1shuFdJc= golang.org/x/net v0.9.0/go.mod h1:d48xBJpPfHeWQsugry2m+kC02ZBRGRgulfHnEXEuWns= golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg= golang.org/x/net v0.12.0/go.mod h1:zEVYFnQC7m/vmpQFELhcD1EWkZlX69l4oqgmer6hfKA= golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk= golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44= golang.org/x/net v0.22.0/go.mod h1:JKghWKKOSdJwpW2GEx0Ja7fmaKnMsbu+MWVZTokSYmg= golang.org/x/net v0.35.0 h1:T5GQRQb2y08kTAByq9L4/bz8cipCdA8FbRTXewonqY8= golang.org/x/net v0.35.0/go.mod h1:EglIi67kWsHKlRzzVMUD93VMSWGFOMSZgxFjparz1Qk= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20191202225959-858c2ad4c8b6/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20200902213428-5d25da1a8d43/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/oauth2 v0.0.0-20201109201403-9fd604954f58/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/oauth2 v0.0.0-20201208152858-08078c50e5b5/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/oauth2 v0.0.0-20210218202405-ba52d332ba99/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/oauth2 v0.0.0-20210220000619-9bb904979d93/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/oauth2 v0.0.0-20210313182246-cd4f82c27b84/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/oauth2 v0.0.0-20210514164344-f6687ab2804c/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/oauth2 v0.0.0-20210628180205-a41e5a781914/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/oauth2 v0.0.0-20210805134026-6f1e6394065a/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/oauth2 v0.0.0-20210819190943-2bc19b11175f/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/oauth2 v0.0.0-20211104180415-d3ed0bb246c8/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/oauth2 v0.0.0-20220223155221-ee480838109b/go.mod h1:DAh4E804XQdzx2j+YRIaUnCqCV2RuMz24cGBJ5QYIrc= golang.org/x/oauth2 v0.0.0-20220309155454-6242fa91716a/go.mod h1:DAh4E804XQdzx2j+YRIaUnCqCV2RuMz24cGBJ5QYIrc= golang.org/x/oauth2 v0.0.0-20220411215720-9780585627b5/go.mod h1:DAh4E804XQdzx2j+YRIaUnCqCV2RuMz24cGBJ5QYIrc= golang.org/x/oauth2 v0.0.0-20220608161450-d0670ef3b1eb/go.mod h1:jaDAt6Dkxork7LmZnYtzbRWj0W47D86a3TGe0YHBvmE= golang.org/x/oauth2 v0.0.0-20220622183110-fd043fe589d2/go.mod h1:jaDAt6Dkxork7LmZnYtzbRWj0W47D86a3TGe0YHBvmE= golang.org/x/oauth2 v0.0.0-20220822191816-0ebed06d0094/go.mod h1:h4gKUeWbJ4rQPri7E0u6Gs4e9Ri2zaLxzw5DI5XGrYg= golang.org/x/oauth2 v0.0.0-20220909003341-f21342109be1/go.mod h1:h4gKUeWbJ4rQPri7E0u6Gs4e9Ri2zaLxzw5DI5XGrYg= golang.org/x/oauth2 v0.0.0-20221006150949-b44042a4b9c1/go.mod h1:h4gKUeWbJ4rQPri7E0u6Gs4e9Ri2zaLxzw5DI5XGrYg= golang.org/x/oauth2 v0.0.0-20221014153046-6fdb5e3db783/go.mod h1:h4gKUeWbJ4rQPri7E0u6Gs4e9Ri2zaLxzw5DI5XGrYg= golang.org/x/oauth2 v0.4.0/go.mod h1:RznEsdpjGAINPTOF0UH/t+xJ75L18YO3Ho6Pyn+uRec= golang.org/x/oauth2 v0.5.0/go.mod h1:9/XBHVqLaWO3/BRHs5jbpYCnOZVjj5V0ndyaAM7KB4I= golang.org/x/oauth2 v0.6.0/go.mod h1:ycmewcwgD4Rpr3eZJLSB4Kyyljb3qDh40vJ8STE5HKw= golang.org/x/oauth2 v0.7.0/go.mod h1:hPLQkd9LyjfXTiRohC/41GhcFqxisoUQ99sCUOHO9x4= golang.org/x/oauth2 v0.27.0 h1:da9Vo7/tDv5RH/7nZDz1eMGS/q1Vv1N/7FCrBhI9I3M= golang.org/x/oauth2 v0.27.0/go.mod h1:onh5ek6nERTohokkhCD/y2cV4Do3fxFHFuAejCkRWT8= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220601150217-0de741cfad7f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220819030929-7fc1605a5dde/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220929204114-8fcdb60fdcc0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y= golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sync v0.11.0 h1:GGz8+XQP4FvTTrjZPzNKTMFtSXH80RAzG+5ghFPgK9w= golang.org/x/sync v0.11.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sys v0.0.0-20180816055513-1c9583448a9c/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190801041406-cbf593c0f2f3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190916202348-b4ddaad3f8a3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200113162924-86b910548bc1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200202164722-d101bd2416d5/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200212091648-12a6c2dcc1e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200331124033-c3d80250170d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200501052902-10377860bb8e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200511232937-7e40ca221e25/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200515095857-1151b9dac4a9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200523222454-059865788121/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200803210538-64077c9b5642/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200905004654-be1d3432aa8f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201201145000-ef89a241ccb3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210104204734-6f8348627aad/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210220050731-9a76102bfb43/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210225134936-a50acf3fe073/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210304124612-50617c2ba197/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210305230114-8fe3ee5dd75b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210315160823-c6e025ad8005/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210320140829-1e4c9ba3b0c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210423185535-09eb48e85fd7/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210514084401-e8d321eab015/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210603125802-9665404d3644/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210616094352-59db8d763f22/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210806184541-e5e7981a1069/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210816183151-1e6c022a8912/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210823070655-63515b42dcdf/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210908233432-aa78b53d3365/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20211007075335-d3039528d8ac/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20211019181941-9d821ace8654/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20211124211545-fe61309f8881/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20211210111614-af8b64212486/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220128215802-99c3d69c2c27/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220209214540-3681064d5158/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220227234510-4e6760a101f9/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220328115105-d36c6a25d886/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220412211240-33da011f77ad/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220502124256-b6088ccd6cba/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220503163025-988cb79eb6c6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220610221304-9f5ed59c137d/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220615213510-4f61da869c0c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220624220833-87e55d714810/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220728004956-3c1f35247d10/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220829200755-d48e67d00261/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20221013171732-95e765b1cc43/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.2.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.3.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.4.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.7.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.10.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.14.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.18.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.30.0 h1:QjkSwP/36a20jFYWkSue1YwXzLmsV5Gfq7Eiy72C1uc= golang.org/x/sys v0.30.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/telemetry v0.0.0-20240228155512-f48c80bd79b2/go.mod h1:TeRTkGYfJXctD9OcfyVLyj2J3IxLnKwHJR8f4D8a3YE= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/term v0.1.0/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/term v0.2.0/go.mod h1:TVmDHMZPmdnySmBfhjOoOdhjzdE1h4u1VwSiw2l1Nuc= golang.org/x/term v0.3.0/go.mod h1:q750SLmJuPmVoN1blW3UFBPREJfb1KmY3vwxfr+nFDA= golang.org/x/term v0.4.0/go.mod h1:9P2UbLfCdcvo3p/nzKvsmas4TnlujnuoV9hGgYzW1lQ= golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= golang.org/x/term v0.6.0/go.mod h1:m6U89DPEgQRMq3DNkDClhWw02AUbt2daBVO4cn4Hv9U= golang.org/x/term v0.7.0/go.mod h1:P32HKFT3hSsZrRxla30E9HqToFYAQPCMs/zFMBUFqPY= golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo= golang.org/x/term v0.10.0/go.mod h1:lpqdcUyK/oCiQxvxVrppt5ggO2KCZ5QblwqPnfZ6d5o= golang.org/x/term v0.12.0/go.mod h1:owVbMEjm3cBLCHdkQu9b1opXd4ETQWc3BhuQGKgXgvU= golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk= golang.org/x/term v0.18.0/go.mod h1:ILwASektA3OnRv7amZ1xhE/KTR+u50pbXfZ03+6Nx58= golang.org/x/term v0.29.0 h1:L6pJp37ocefwRRtYPKSWOWzOtWSxVajvz2ldH/xi3iU= golang.org/x/term v0.29.0/go.mod h1:6bl4lRlvVuDgSf3179VpIxBF0o10JUpXWOnI7nErv7s= golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ= golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/text v0.5.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/text v0.6.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/text v0.8.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= golang.org/x/text v0.11.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= golang.org/x/text v0.22.0 h1:bofq7m3/HAFvbF51jz3Q9wLg3jkvSPuiZu/pD1XwgtM= golang.org/x/text v0.22.0/go.mod h1:YRoo4H8PVmsu+E3Ou7cqLVH8oXWIHVoX0jqUWALQhfY= golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20220922220347-f3bd1da661af/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.1.0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.3.0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.10.0 h1:3usCWA8tQn0L8+hFJQNgzpWbd89begxN66o1Ojdn5L4= golang.org/x/time v0.10.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM= golang.org/x/tools v0.0.0-20180525024113-a5b4c53f6e8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190206041539-40960b6deb8e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= golang.org/x/tools v0.0.0-20190328211700-ab21143f2384/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= golang.org/x/tools v0.0.0-20190802003818-e9bb7d36c060/go.mod h1:jcCCGcm9btYwXyDqrUWc6MKQKKGJCWEQ3AfLSRIbEuI= golang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20190927191325-030b2cf1153e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191029041327-9cc4af7d6b2c/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191029190741-b9c20aec41a5/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191030062658-86caa796c7ab/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191104232314-dc038396d1f0/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191108193012-7d206e10da11/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191113191852-77e3bb0ad9e7/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191114200427-caa0b0f7d508/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191115202509-3a792d9c32b2/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191125144606-a911d9008d1f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191130070609-6e064ea0cf2d/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191216173652-a0e659d51361/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= golang.org/x/tools v0.0.0-20191227053925-7b8e75db28f4/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= golang.org/x/tools v0.0.0-20200117161641-43d50277825c/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= golang.org/x/tools v0.0.0-20200122220014-bf1340f18c4a/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= golang.org/x/tools v0.0.0-20200204074204-1cc6d1ef6c74/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= golang.org/x/tools v0.0.0-20200207183749-b753a1ba74fa/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= golang.org/x/tools v0.0.0-20200212150539-ea181f53ac56/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= golang.org/x/tools v0.0.0-20200224181240-023911ca70b2/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= golang.org/x/tools v0.0.0-20200227222343-706bc42d1f0d/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= golang.org/x/tools v0.0.0-20200304193943-95d2e580d8eb/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= golang.org/x/tools v0.0.0-20200312045724-11d5b4c81c7d/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= golang.org/x/tools v0.0.0-20200331025713-a30bf2db82d4/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8= golang.org/x/tools v0.0.0-20200501065659-ab2804fb9c9d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20200512131952-2bc93b1c0c88/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20200515010526-7d3b6ebf133d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20200618134242-20370b0cb4b2/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20200729194436-6467de6f59a7/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= golang.org/x/tools v0.0.0-20200804011535-6c149bb5ef0d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= golang.org/x/tools v0.0.0-20200825202427-b303f430e36d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= golang.org/x/tools v0.0.0-20200904185747-39188db58858/go.mod h1:Cj7w3i3Rnn0Xh82ur9kSqwfTHTeVxaDqrfMjpcNT6bE= golang.org/x/tools v0.0.0-20201110124207-079ba7bd75cd/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20201124115921-2c860bdd6e78/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20201201161351-ac6f37ff4c2a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20201208233053-a543418bbed2/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20210105154028-b0ab187a4818/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20210108195828-e2f9c7f1fc8e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0= golang.org/x/tools v0.1.1/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.1.2/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.1.3/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.1.4/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.1.8/go.mod h1:nABZi5QlRsZVlzPpHl034qft6wpY4eDcsTt5AaioBiU= golang.org/x/tools v0.1.9/go.mod h1:nABZi5QlRsZVlzPpHl034qft6wpY4eDcsTt5AaioBiU= golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= golang.org/x/tools v0.2.0/go.mod h1:y4OqIKeOV/fWJetJ8bXPU1sEVniLMIyDAZWeHdV+NTA= golang.org/x/tools v0.3.0/go.mod h1:/rWhSS2+zyEVwoJf8YAX6L2f0ntZ7Kn/mGgAWcipA5k= golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU= golang.org/x/tools v0.7.0/go.mod h1:4pg6aUX35JBAogB10C9AtvVL+qowtN4pT3CGSQex14s= golang.org/x/tools v0.11.0/go.mod h1:anzJrxPjNtfgiYQYirP2CPGzGLxrH2u2QBhn6Bf3qY8= golang.org/x/tools v0.13.0/go.mod h1:HvlwmtVNQAhOuCjW7xxvovg8wbNq7LwfXh/k7wXUl58= golang.org/x/tools v0.19.0/go.mod h1:qoJWxmGSIBmAeriMx19ogtrEPrGtDbPK634QFIcLAhc= golang.org/x/tools v0.30.0 h1:BgcpHewrV5AUp2G9MebG4XPFI1E2W41zU1SaqVA9vJY= golang.org/x/tools v0.30.0/go.mod h1:c347cR/OJfw5TI+GfX7RUPNMdDRRbjvYTS0jPyvsVtY= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20220411194840-2f41105eb62f/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20220517211312-f3a8303e98df/go.mod h1:K8+ghG5WaK9qNqU5K3HdILfMLy1f3aNYFI/wnl100a8= golang.org/x/xerrors v0.0.0-20220609144429-65e65417b02f/go.mod h1:K8+ghG5WaK9qNqU5K3HdILfMLy1f3aNYFI/wnl100a8= golang.org/x/xerrors v0.0.0-20220907171357-04be3eba64a2/go.mod h1:K8+ghG5WaK9qNqU5K3HdILfMLy1f3aNYFI/wnl100a8= golang.org/x/xerrors v0.0.0-20240903120638-7835f813f4da h1:noIWHXmPHxILtqtCOPIhSt0ABwskkZKjD3bXGnZGpNY= golang.org/x/xerrors v0.0.0-20240903120638-7835f813f4da/go.mod h1:NDW/Ps6MPRej6fsCIbMTohpP40sJ/P/vI1MoTEGwX90= gonum.org/v1/gonum v0.0.0-20180816165407-929014505bf4/go.mod h1:Y+Yx5eoAFn32cQvJDxZx5Dpnq+c3wtXuadVZAcxbbBo= gonum.org/v1/gonum v0.8.2/go.mod h1:oe/vMfY3deqTw+1EZJhuvEW2iwGF1bW9wwu7XCu0+v0= gonum.org/v1/gonum v0.9.3/go.mod h1:TZumC3NeyVQskjXqmyWt4S3bINhy7B4eYwW69EbyX+0= gonum.org/v1/gonum v0.11.0/go.mod h1:fSG4YDCxxUZQJ7rKsQrj0gMOg00Il0Z96/qMA4bVQhA= gonum.org/v1/gonum v0.15.1 h1:FNy7N6OUZVUaWG9pTiD+jlhdQ3lMP+/LcTpJ6+a8sQ0= gonum.org/v1/gonum v0.15.1/go.mod h1:eZTZuRFrzu5pcyjN5wJhcIhnUdNijYxX1T2IcrOGY0o= gonum.org/v1/netlib v0.0.0-20190313105609-8cb42192e0e0/go.mod h1:wa6Ws7BG/ESfp6dHfk7C6KdzKA7wR7u/rKwOGE66zvw= gonum.org/v1/plot v0.0.0-20190515093506-e2840ee46a6b/go.mod h1:Wt8AAjI+ypCyYX3nZBvf6cAIx93T+c/OS2HFAYskSZc= gonum.org/v1/plot v0.9.0/go.mod h1:3Pcqqmp6RHvJI72kgb8fThyUnav364FOsdDo2aGW5lY= gonum.org/v1/plot v0.10.1/go.mod h1:VZW5OlhkL1mysU9vaqNHnsy86inf6Ot+jB3r+BczCEo= google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE= google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M= google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= google.golang.org/api v0.9.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= google.golang.org/api v0.13.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= google.golang.org/api v0.14.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= google.golang.org/api v0.15.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= google.golang.org/api v0.17.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= google.golang.org/api v0.18.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= google.golang.org/api v0.19.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= google.golang.org/api v0.20.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= google.golang.org/api v0.22.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= google.golang.org/api v0.24.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE= google.golang.org/api v0.28.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE= google.golang.org/api v0.29.0/go.mod h1:Lcubydp8VUV7KeIHD9z2Bys/sm/vGKnG1UHuDBSrHWM= google.golang.org/api v0.30.0/go.mod h1:QGmEvQ87FHZNiUVJkT14jQNYJ4ZJjdRF23ZXz5138Fc= google.golang.org/api v0.35.0/go.mod h1:/XrVsuzM0rZmrsbjJutiuftIzeuTQcEeaYcSk/mQ1dg= google.golang.org/api v0.36.0/go.mod h1:+z5ficQTmoYpPn8LCUNVpK5I7hwkpjbcgqA7I34qYtE= google.golang.org/api v0.40.0/go.mod h1:fYKFpnQN0DsDSKRVRcQSDQNtqWPfM9i+zNPxepjRCQ8= google.golang.org/api v0.41.0/go.mod h1:RkxM5lITDfTzmyKFPt+wGrCJbVfniCr2ool8kTBzRTU= google.golang.org/api v0.43.0/go.mod h1:nQsDGjRXMo4lvh5hP0TKqF244gqhGcr/YSIykhUk/94= google.golang.org/api v0.47.0/go.mod h1:Wbvgpq1HddcWVtzsVLyfLp8lDg6AA241LmgIL59tHXo= google.golang.org/api v0.48.0/go.mod h1:71Pr1vy+TAZRPkPs/xlCf5SsU8WjuAWv1Pfjbtukyy4= google.golang.org/api v0.50.0/go.mod h1:4bNT5pAuq5ji4SRZm+5QIkjny9JAyVD/3gaSihNefaw= google.golang.org/api v0.51.0/go.mod h1:t4HdrdoNgyN5cbEfm7Lum0lcLDLiise1F8qDKX00sOU= google.golang.org/api v0.54.0/go.mod h1:7C4bFFOvVDGXjfDTAsgGwDgAxRDeQ4X8NvUedIt6z3k= google.golang.org/api v0.55.0/go.mod h1:38yMfeP1kfjsl8isn0tliTjIb1rJXcQi4UXlbqivdVE= google.golang.org/api v0.56.0/go.mod h1:38yMfeP1kfjsl8isn0tliTjIb1rJXcQi4UXlbqivdVE= google.golang.org/api v0.57.0/go.mod h1:dVPlbZyBo2/OjBpmvNdpn2GRm6rPy75jyU7bmhdrMgI= google.golang.org/api v0.61.0/go.mod h1:xQRti5UdCmoCEqFxcz93fTl338AVqDgyaDRuOZ3hg9I= google.golang.org/api v0.63.0/go.mod h1:gs4ij2ffTRXwuzzgJl/56BdwJaA194ijkfn++9tDuPo= google.golang.org/api v0.67.0/go.mod h1:ShHKP8E60yPsKNw/w8w+VYaj9H6buA5UqDp8dhbQZ6g= google.golang.org/api v0.70.0/go.mod h1:Bs4ZM2HGifEvXwd50TtW70ovgJffJYw2oRCOFU/SkfA= google.golang.org/api v0.71.0/go.mod h1:4PyU6e6JogV1f9eA4voyrTY2batOLdgZ5qZ5HOCc4j8= google.golang.org/api v0.74.0/go.mod h1:ZpfMZOVRMywNyvJFeqL9HRWBgAuRfSjJFpe9QtRRyDs= google.golang.org/api v0.75.0/go.mod h1:pU9QmyHLnzlpar1Mjt4IbapUCy8J+6HD6GeELN69ljA= google.golang.org/api v0.77.0/go.mod h1:pU9QmyHLnzlpar1Mjt4IbapUCy8J+6HD6GeELN69ljA= google.golang.org/api v0.78.0/go.mod h1:1Sg78yoMLOhlQTeF+ARBoytAcH1NNyyl390YMy6rKmw= google.golang.org/api v0.80.0/go.mod h1:xY3nI94gbvBrE0J6NHXhxOmW97HG7Khjkku6AFB3Hyg= google.golang.org/api v0.84.0/go.mod h1:NTsGnUFJMYROtiquksZHBWtHfeMC7iYthki7Eq3pa8o= google.golang.org/api v0.85.0/go.mod h1:AqZf8Ep9uZ2pyTvgL+x0D3Zt0eoT9b5E8fmzfu6FO2g= google.golang.org/api v0.90.0/go.mod h1:+Sem1dnrKlrXMR/X0bPnMWyluQe4RsNoYfmNLhOIkzw= google.golang.org/api v0.93.0/go.mod h1:+Sem1dnrKlrXMR/X0bPnMWyluQe4RsNoYfmNLhOIkzw= google.golang.org/api v0.95.0/go.mod h1:eADj+UBuxkh5zlrSntJghuNeg8HwQ1w5lTKkuqaETEI= google.golang.org/api v0.96.0/go.mod h1:w7wJQLTM+wvQpNf5JyEcBoxK0RH7EDrh/L4qfsuJ13s= google.golang.org/api v0.97.0/go.mod h1:w7wJQLTM+wvQpNf5JyEcBoxK0RH7EDrh/L4qfsuJ13s= google.golang.org/api v0.98.0/go.mod h1:w7wJQLTM+wvQpNf5JyEcBoxK0RH7EDrh/L4qfsuJ13s= google.golang.org/api v0.99.0/go.mod h1:1YOf74vkVndF7pG6hIHuINsM7eWwpVTAfNMNiL91A08= google.golang.org/api v0.100.0/go.mod h1:ZE3Z2+ZOr87Rx7dqFsdRQkRBk36kDtp/h+QpHbB7a70= google.golang.org/api v0.102.0/go.mod h1:3VFl6/fzoA+qNuS1N1/VfXY4LjoXN/wzeIp7TweWwGo= google.golang.org/api v0.103.0/go.mod h1:hGtW6nK1AC+d9si/UBhw8Xli+QMOf6xyNAyJw4qU9w0= google.golang.org/api v0.106.0/go.mod h1:2Ts0XTHNVWxypznxWOYUeI4g3WdP9Pk2Qk58+a/O9MY= google.golang.org/api v0.107.0/go.mod h1:2Ts0XTHNVWxypznxWOYUeI4g3WdP9Pk2Qk58+a/O9MY= google.golang.org/api v0.108.0/go.mod h1:2Ts0XTHNVWxypznxWOYUeI4g3WdP9Pk2Qk58+a/O9MY= google.golang.org/api v0.110.0/go.mod h1:7FC4Vvx1Mooxh8C5HWjzZHcavuS2f6pmJpZx60ca7iI= google.golang.org/api v0.111.0/go.mod h1:qtFHvU9mhgTJegR31csQ+rwxyUTHOKFqCKWp1J0fdw0= google.golang.org/api v0.114.0/go.mod h1:ifYI2ZsFK6/uGddGfAD5BMxlnkBqCmqHSDUVi45N5Yg= google.golang.org/api v0.223.0 h1:JUTaWEriXmEy5AhvdMgksGGPEFsYfUKaPEYXd4c3Wvc= google.golang.org/api v0.223.0/go.mod h1:C+RS7Z+dDwds2b+zoAk5hN/eSfsiCn0UDrYof/M4d2M= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0= google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= google.golang.org/appengine v1.6.6/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= google.golang.org/appengine v1.6.7/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= google.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= google.golang.org/genproto v0.0.0-20190502173448-54afdca5d873/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= google.golang.org/genproto v0.0.0-20190801165951-fa694d86fc64/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= google.golang.org/genproto v0.0.0-20190911173649-1774047e7e51/go.mod h1:IbNlFCBrqXvoKpeg0TB2l7cyZUmoaFKYIwrEpbDKLA8= google.golang.org/genproto v0.0.0-20191108220845-16a3f7862a1a/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= google.golang.org/genproto v0.0.0-20191115194625-c23dd37a84c9/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= google.golang.org/genproto v0.0.0-20191216164720-4f79533eabd1/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= google.golang.org/genproto v0.0.0-20191230161307-f3c370f40bfb/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= google.golang.org/genproto v0.0.0-20200115191322-ca5a22157cba/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= google.golang.org/genproto v0.0.0-20200122232147-0452cf42e150/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= google.golang.org/genproto v0.0.0-20200204135345-fa8e72b47b90/go.mod h1:GmwEX6Z4W5gMy59cAlVYjN9JhxgbQH6Gn+gFDQe2lzA= google.golang.org/genproto v0.0.0-20200212174721-66ed5ce911ce/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= google.golang.org/genproto v0.0.0-20200224152610-e50cd9704f63/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= google.golang.org/genproto v0.0.0-20200228133532-8c2c7df3a383/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= google.golang.org/genproto v0.0.0-20200305110556-506484158171/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= google.golang.org/genproto v0.0.0-20200312145019-da6875a35672/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= google.golang.org/genproto v0.0.0-20200331122359-1ee6d9798940/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= google.golang.org/genproto v0.0.0-20200430143042-b979b6f78d84/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= google.golang.org/genproto v0.0.0-20200511104702-f5ebc3bea380/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= google.golang.org/genproto v0.0.0-20200513103714-09dca8ec2884/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= google.golang.org/genproto v0.0.0-20200515170657-fc4c6c6a6587/go.mod h1:YsZOwe1myG/8QRHRsmBRE1LrgQY60beZKjly0O1fX9U= google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= google.golang.org/genproto v0.0.0-20200618031413-b414f8b61790/go.mod h1:jDfRM7FcilCzHH/e9qn6dsT145K34l5v+OpcnNgKAAA= google.golang.org/genproto v0.0.0-20200729003335-053ba62fc06f/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20200804131852-c06518451d9c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20200825200019-8632dd797987/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20200904004341-0bd0a958aa1d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20201109203340-2640f1f9cdfb/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20201201144952-b05cb90ed32e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20201210142538-e3217bee35cc/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20201214200347-8c77b98c765d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20210108203827-ffc7fda8c3d7/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20210222152913-aa3ee6e6a81c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20210226172003-ab064af71705/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20210303154014-9728d6b83eeb/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20210310155132-4ce2db91004e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20210319143718-93e7006c17a6/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20210329143202-679c6ae281ee/go.mod h1:9lPAdzaEmUacj36I+k7YKbEc5CXzPIeORRgDAUOu28A= google.golang.org/genproto v0.0.0-20210402141018-6c239bbf2bb1/go.mod h1:9lPAdzaEmUacj36I+k7YKbEc5CXzPIeORRgDAUOu28A= google.golang.org/genproto v0.0.0-20210513213006-bf773b8c8384/go.mod h1:P3QM42oQyzQSnHPnZ/vqoCdDmzH28fzWByN9asMeM8A= google.golang.org/genproto v0.0.0-20210602131652-f16073e35f0c/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0= google.golang.org/genproto v0.0.0-20210604141403-392c879c8b08/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0= google.golang.org/genproto v0.0.0-20210608205507-b6d2f5bf0d7d/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0= google.golang.org/genproto v0.0.0-20210624195500-8bfb893ecb84/go.mod h1:SzzZ/N+nwJDaO1kznhnlzqS8ocJICar6hYhVyhi++24= google.golang.org/genproto v0.0.0-20210713002101-d411969a0d9a/go.mod h1:AxrInvYm1dci+enl5hChSFPOmmUF1+uAa/UsgNRWd7k= google.golang.org/genproto v0.0.0-20210716133855-ce7ef5c701ea/go.mod h1:AxrInvYm1dci+enl5hChSFPOmmUF1+uAa/UsgNRWd7k= google.golang.org/genproto v0.0.0-20210728212813-7823e685a01f/go.mod h1:ob2IJxKrgPT52GcgX759i1sleT07tiKowYBGbczaW48= google.golang.org/genproto v0.0.0-20210805201207-89edb61ffb67/go.mod h1:ob2IJxKrgPT52GcgX759i1sleT07tiKowYBGbczaW48= google.golang.org/genproto v0.0.0-20210813162853-db860fec028c/go.mod h1:cFeNkxwySK631ADgubI+/XFU/xp8FD5KIVV4rj8UC5w= google.golang.org/genproto v0.0.0-20210821163610-241b8fcbd6c8/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= google.golang.org/genproto v0.0.0-20210828152312-66f60bf46e71/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= google.golang.org/genproto v0.0.0-20210831024726-fe130286e0e2/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= google.golang.org/genproto v0.0.0-20210903162649-d08c68adba83/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= google.golang.org/genproto v0.0.0-20210909211513-a8c4777a87af/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= google.golang.org/genproto v0.0.0-20210924002016-3dee208752a0/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= google.golang.org/genproto v0.0.0-20211118181313-81c1377c94b1/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= google.golang.org/genproto v0.0.0-20211206160659-862468c7d6e0/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= google.golang.org/genproto v0.0.0-20211208223120-3a66f561d7aa/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= google.golang.org/genproto v0.0.0-20211221195035-429b39de9b1c/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= google.golang.org/genproto v0.0.0-20220126215142-9970aeb2e350/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= google.golang.org/genproto v0.0.0-20220207164111-0872dc986b00/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= google.golang.org/genproto v0.0.0-20220218161850-94dd64e39d7c/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI= google.golang.org/genproto v0.0.0-20220222213610-43724f9ea8cf/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI= google.golang.org/genproto v0.0.0-20220304144024-325a89244dc8/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI= google.golang.org/genproto v0.0.0-20220310185008-1973136f34c6/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI= google.golang.org/genproto v0.0.0-20220324131243-acbaeb5b85eb/go.mod h1:hAL49I2IFola2sVEjAn7MEwsja0xp51I0tlGAf9hz4E= google.golang.org/genproto v0.0.0-20220329172620-7be39ac1afc7/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo= google.golang.org/genproto v0.0.0-20220407144326-9054f6ed7bac/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo= google.golang.org/genproto v0.0.0-20220413183235-5e96e2839df9/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo= google.golang.org/genproto v0.0.0-20220414192740-2d67ff6cf2b4/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo= google.golang.org/genproto v0.0.0-20220421151946-72621c1f0bd3/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo= google.golang.org/genproto v0.0.0-20220429170224-98d788798c3e/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo= google.golang.org/genproto v0.0.0-20220502173005-c8bf987b8c21/go.mod h1:RAyBrSAP7Fh3Nc84ghnVLDPuV51xc9agzmm4Ph6i0Q4= google.golang.org/genproto v0.0.0-20220505152158-f39f71e6c8f3/go.mod h1:RAyBrSAP7Fh3Nc84ghnVLDPuV51xc9agzmm4Ph6i0Q4= google.golang.org/genproto v0.0.0-20220518221133-4f43b3371335/go.mod h1:RAyBrSAP7Fh3Nc84ghnVLDPuV51xc9agzmm4Ph6i0Q4= google.golang.org/genproto v0.0.0-20220523171625-347a074981d8/go.mod h1:RAyBrSAP7Fh3Nc84ghnVLDPuV51xc9agzmm4Ph6i0Q4= google.golang.org/genproto v0.0.0-20220608133413-ed9918b62aac/go.mod h1:KEWEmljWE5zPzLBa/oHl6DaEt9LmfH6WtH1OHIvleBA= google.golang.org/genproto v0.0.0-20220616135557-88e70c0c3a90/go.mod h1:KEWEmljWE5zPzLBa/oHl6DaEt9LmfH6WtH1OHIvleBA= google.golang.org/genproto v0.0.0-20220617124728-180714bec0ad/go.mod h1:KEWEmljWE5zPzLBa/oHl6DaEt9LmfH6WtH1OHIvleBA= google.golang.org/genproto v0.0.0-20220624142145-8cd45d7dbd1f/go.mod h1:KEWEmljWE5zPzLBa/oHl6DaEt9LmfH6WtH1OHIvleBA= google.golang.org/genproto v0.0.0-20220628213854-d9e0b6570c03/go.mod h1:KEWEmljWE5zPzLBa/oHl6DaEt9LmfH6WtH1OHIvleBA= google.golang.org/genproto v0.0.0-20220722212130-b98a9ff5e252/go.mod h1:GkXuJDJ6aQ7lnJcRF+SJVgFdQhypqgl3LB1C9vabdRE= google.golang.org/genproto v0.0.0-20220801145646-83ce21fca29f/go.mod h1:iHe1svFLAZg9VWz891+QbRMwUv9O/1Ww+/mngYeThbc= google.golang.org/genproto v0.0.0-20220815135757-37a418bb8959/go.mod h1:dbqgFATTzChvnt+ujMdZwITVAJHFtfyN1qUhDqEiIlk= google.golang.org/genproto v0.0.0-20220817144833-d7fd3f11b9b1/go.mod h1:dbqgFATTzChvnt+ujMdZwITVAJHFtfyN1qUhDqEiIlk= google.golang.org/genproto v0.0.0-20220822174746-9e6da59bd2fc/go.mod h1:dbqgFATTzChvnt+ujMdZwITVAJHFtfyN1qUhDqEiIlk= google.golang.org/genproto v0.0.0-20220829144015-23454907ede3/go.mod h1:dbqgFATTzChvnt+ujMdZwITVAJHFtfyN1qUhDqEiIlk= google.golang.org/genproto v0.0.0-20220829175752-36a9c930ecbf/go.mod h1:dbqgFATTzChvnt+ujMdZwITVAJHFtfyN1qUhDqEiIlk= google.golang.org/genproto v0.0.0-20220913154956-18f8339a66a5/go.mod h1:0Nb8Qy+Sk5eDzHnzlStwW3itdNaWoZA5XeSG+R3JHSo= google.golang.org/genproto v0.0.0-20220914142337-ca0e39ece12f/go.mod h1:0Nb8Qy+Sk5eDzHnzlStwW3itdNaWoZA5XeSG+R3JHSo= google.golang.org/genproto v0.0.0-20220915135415-7fd63a7952de/go.mod h1:0Nb8Qy+Sk5eDzHnzlStwW3itdNaWoZA5XeSG+R3JHSo= google.golang.org/genproto v0.0.0-20220916172020-2692e8806bfa/go.mod h1:0Nb8Qy+Sk5eDzHnzlStwW3itdNaWoZA5XeSG+R3JHSo= google.golang.org/genproto v0.0.0-20220919141832-68c03719ef51/go.mod h1:0Nb8Qy+Sk5eDzHnzlStwW3itdNaWoZA5XeSG+R3JHSo= google.golang.org/genproto v0.0.0-20220920201722-2b89144ce006/go.mod h1:ht8XFiar2npT/g4vkk7O0WYS1sHOHbdujxbEp7CJWbw= google.golang.org/genproto v0.0.0-20220926165614-551eb538f295/go.mod h1:woMGP53BroOrRY3xTxlbr8Y3eB/nzAvvFM83q7kG2OI= google.golang.org/genproto v0.0.0-20220926220553-6981cbe3cfce/go.mod h1:woMGP53BroOrRY3xTxlbr8Y3eB/nzAvvFM83q7kG2OI= google.golang.org/genproto v0.0.0-20221010155953-15ba04fc1c0e/go.mod h1:3526vdqwhZAwq4wsRUaVG555sVgsNmIjRtO7t/JH29U= google.golang.org/genproto v0.0.0-20221014173430-6e2ab493f96b/go.mod h1:1vXfmgAz9N9Jx0QA82PqRVauvCz1SGSz739p0f183jM= google.golang.org/genproto v0.0.0-20221014213838-99cd37c6964a/go.mod h1:1vXfmgAz9N9Jx0QA82PqRVauvCz1SGSz739p0f183jM= google.golang.org/genproto v0.0.0-20221024153911-1573dae28c9c/go.mod h1:9qHF0xnpdSfF6knlcsnpzUu5y+rpwgbvsyGAZPBMg4s= google.golang.org/genproto v0.0.0-20221024183307-1bc688fe9f3e/go.mod h1:9qHF0xnpdSfF6knlcsnpzUu5y+rpwgbvsyGAZPBMg4s= google.golang.org/genproto v0.0.0-20221027153422-115e99e71e1c/go.mod h1:CGI5F/G+E5bKwmfYo09AXuVN4dD894kIKUFmVbP2/Fo= google.golang.org/genproto v0.0.0-20221109142239-94d6d90a7d66/go.mod h1:rZS5c/ZVYMaOGBfO68GWtjOw/eLaZM1X6iVtgjZ+EWg= google.golang.org/genproto v0.0.0-20221114212237-e4508ebdbee1/go.mod h1:rZS5c/ZVYMaOGBfO68GWtjOw/eLaZM1X6iVtgjZ+EWg= google.golang.org/genproto v0.0.0-20221117204609-8f9c96812029/go.mod h1:rZS5c/ZVYMaOGBfO68GWtjOw/eLaZM1X6iVtgjZ+EWg= google.golang.org/genproto v0.0.0-20221118155620-16455021b5e6/go.mod h1:rZS5c/ZVYMaOGBfO68GWtjOw/eLaZM1X6iVtgjZ+EWg= google.golang.org/genproto v0.0.0-20221201164419-0e50fba7f41c/go.mod h1:rZS5c/ZVYMaOGBfO68GWtjOw/eLaZM1X6iVtgjZ+EWg= google.golang.org/genproto v0.0.0-20221201204527-e3fa12d562f3/go.mod h1:rZS5c/ZVYMaOGBfO68GWtjOw/eLaZM1X6iVtgjZ+EWg= google.golang.org/genproto v0.0.0-20221202195650-67e5cbc046fd/go.mod h1:cTsE614GARnxrLsqKREzmNYJACSWWpAWdNMwnD7c2BE= google.golang.org/genproto v0.0.0-20221227171554-f9683d7f8bef/go.mod h1:RGgjbofJ8xD9Sq1VVhDM1Vok1vRONV+rg+CjzG4SZKM= google.golang.org/genproto v0.0.0-20230110181048-76db0878b65f/go.mod h1:RGgjbofJ8xD9Sq1VVhDM1Vok1vRONV+rg+CjzG4SZKM= google.golang.org/genproto v0.0.0-20230112194545-e10362b5ecf9/go.mod h1:RGgjbofJ8xD9Sq1VVhDM1Vok1vRONV+rg+CjzG4SZKM= google.golang.org/genproto v0.0.0-20230113154510-dbe35b8444a5/go.mod h1:RGgjbofJ8xD9Sq1VVhDM1Vok1vRONV+rg+CjzG4SZKM= google.golang.org/genproto v0.0.0-20230123190316-2c411cf9d197/go.mod h1:RGgjbofJ8xD9Sq1VVhDM1Vok1vRONV+rg+CjzG4SZKM= google.golang.org/genproto v0.0.0-20230124163310-31e0e69b6fc2/go.mod h1:RGgjbofJ8xD9Sq1VVhDM1Vok1vRONV+rg+CjzG4SZKM= google.golang.org/genproto v0.0.0-20230125152338-dcaf20b6aeaa/go.mod h1:RGgjbofJ8xD9Sq1VVhDM1Vok1vRONV+rg+CjzG4SZKM= google.golang.org/genproto v0.0.0-20230127162408-596548ed4efa/go.mod h1:RGgjbofJ8xD9Sq1VVhDM1Vok1vRONV+rg+CjzG4SZKM= google.golang.org/genproto v0.0.0-20230209215440-0dfe4f8abfcc/go.mod h1:RGgjbofJ8xD9Sq1VVhDM1Vok1vRONV+rg+CjzG4SZKM= google.golang.org/genproto v0.0.0-20230216225411-c8e22ba71e44/go.mod h1:8B0gmkoRebU8ukX6HP+4wrVQUY1+6PkQ44BSyIlflHA= google.golang.org/genproto v0.0.0-20230222225845-10f96fb3dbec/go.mod h1:3Dl5ZL0q0isWJt+FVcfpQyirqemEuLAK/iFvg1UP1Hw= google.golang.org/genproto v0.0.0-20230223222841-637eb2293923/go.mod h1:3Dl5ZL0q0isWJt+FVcfpQyirqemEuLAK/iFvg1UP1Hw= google.golang.org/genproto v0.0.0-20230303212802-e74f57abe488/go.mod h1:TvhZT5f700eVlTNwND1xoEZQeWTB2RY/65kplwl/bFA= google.golang.org/genproto v0.0.0-20230306155012-7f2fa6fef1f4/go.mod h1:NWraEVixdDnqcqQ30jipen1STv2r/n24Wb7twVTGR4s= google.golang.org/genproto v0.0.0-20230320184635-7606e756e683/go.mod h1:NWraEVixdDnqcqQ30jipen1STv2r/n24Wb7twVTGR4s= google.golang.org/genproto v0.0.0-20230323212658-478b75c54725/go.mod h1:UUQDJDOlWu4KYeJZffbWgBkS1YFobzKbLVfK69pe0Ak= google.golang.org/genproto v0.0.0-20230330154414-c0448cd141ea/go.mod h1:UUQDJDOlWu4KYeJZffbWgBkS1YFobzKbLVfK69pe0Ak= google.golang.org/genproto v0.0.0-20230331144136-dcfb400f0633/go.mod h1:UUQDJDOlWu4KYeJZffbWgBkS1YFobzKbLVfK69pe0Ak= google.golang.org/genproto v0.0.0-20230410155749-daa745c078e1/go.mod h1:nKE/iIaLqn2bQwXBg8f1g2Ylh6r5MN5CmZvuzZCgsCU= google.golang.org/genproto v0.0.0-20250303144028-a0af3efb3deb h1:ITgPrl429bc6+2ZraNSzMDk3I95nmQln2fuPstKwFDE= google.golang.org/genproto v0.0.0-20250303144028-a0af3efb3deb/go.mod h1:sAo5UzpjUwgFBCzupwhcLcxHVDK7vG5IqI30YnwX2eE= google.golang.org/genproto/googleapis/api v0.0.0-20250303144028-a0af3efb3deb h1:p31xT4yrYrSM/G4Sn2+TNUkVhFCbG9y8itM2S6Th950= google.golang.org/genproto/googleapis/api v0.0.0-20250303144028-a0af3efb3deb/go.mod h1:jbe3Bkdp+Dh2IrslsFCklNhweNTBgSYanP1UXhJDhKg= google.golang.org/genproto/googleapis/rpc v0.0.0-20250303144028-a0af3efb3deb h1:TLPQVbx1GJ8VKZxz52VAxl1EBgKXXbTiU9Fc5fZeLn4= google.golang.org/genproto/googleapis/rpc v0.0.0-20250303144028-a0af3efb3deb/go.mod h1:LuRYeWDFV6WOn90g357N17oMCaxpgCnbi/44qJvDn2I= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY= google.golang.org/grpc v1.26.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= google.golang.org/grpc v1.27.1/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= google.golang.org/grpc v1.28.0/go.mod h1:rpkK4SK4GF4Ach/+MFLZUBavHOvF2JJB5uozKKal+60= google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3IjizoKk= google.golang.org/grpc v1.30.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= google.golang.org/grpc v1.31.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= google.golang.org/grpc v1.31.1/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= google.golang.org/grpc v1.33.1/go.mod h1:fr5YgcSWrqhRRxogOsw7RzIpsmvOZ6IcH4kBYTpR3n0= google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc= google.golang.org/grpc v1.34.0/go.mod h1:WotjhfgOW/POjDeRt8vscBtXq+2VjORFy659qA51WJ8= google.golang.org/grpc v1.35.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= google.golang.org/grpc v1.36.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= google.golang.org/grpc v1.36.1/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= google.golang.org/grpc v1.37.0/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM= google.golang.org/grpc v1.37.1/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM= google.golang.org/grpc v1.38.0/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM= google.golang.org/grpc v1.39.0/go.mod h1:PImNr+rS9TWYb2O4/emRugxiyHZ5JyHW5F+RPnDzfrE= google.golang.org/grpc v1.39.1/go.mod h1:PImNr+rS9TWYb2O4/emRugxiyHZ5JyHW5F+RPnDzfrE= google.golang.org/grpc v1.40.0/go.mod h1:ogyxbiOoUXAkP+4+xa6PZSE9DZgIHtSpzjDTB9KAK34= google.golang.org/grpc v1.40.1/go.mod h1:ogyxbiOoUXAkP+4+xa6PZSE9DZgIHtSpzjDTB9KAK34= google.golang.org/grpc v1.42.0/go.mod h1:k+4IHHFw41K8+bbowsex27ge2rCb65oeWqe4jJ590SU= google.golang.org/grpc v1.44.0/go.mod h1:k+4IHHFw41K8+bbowsex27ge2rCb65oeWqe4jJ590SU= google.golang.org/grpc v1.45.0/go.mod h1:lN7owxKUQEqMfSyQikvvk5tf/6zMPsrK+ONuO11+0rQ= google.golang.org/grpc v1.46.0/go.mod h1:vN9eftEi1UMyUsIF80+uQXhHjbXYbm0uXoFCACuMGWk= google.golang.org/grpc v1.46.2/go.mod h1:vN9eftEi1UMyUsIF80+uQXhHjbXYbm0uXoFCACuMGWk= google.golang.org/grpc v1.47.0/go.mod h1:vN9eftEi1UMyUsIF80+uQXhHjbXYbm0uXoFCACuMGWk= google.golang.org/grpc v1.48.0/go.mod h1:vN9eftEi1UMyUsIF80+uQXhHjbXYbm0uXoFCACuMGWk= google.golang.org/grpc v1.49.0/go.mod h1:ZgQEeidpAuNRZ8iRrlBKXZQP1ghovWIVhdJRyCDK+GI= google.golang.org/grpc v1.50.0/go.mod h1:ZgQEeidpAuNRZ8iRrlBKXZQP1ghovWIVhdJRyCDK+GI= google.golang.org/grpc v1.50.1/go.mod h1:ZgQEeidpAuNRZ8iRrlBKXZQP1ghovWIVhdJRyCDK+GI= google.golang.org/grpc v1.51.0/go.mod h1:wgNDFcnuBGmxLKI/qn4T+m5BtEBYXJPvibbUPsAIPww= google.golang.org/grpc v1.52.3/go.mod h1:pu6fVzoFb+NBYNAvQL08ic+lvB2IojljRYuun5vorUY= google.golang.org/grpc v1.53.0/go.mod h1:OnIrk0ipVdj4N5d9IUoFUx72/VlD7+jUsHwZgwSMQpw= google.golang.org/grpc v1.54.0/go.mod h1:PUSEXI6iWghWaB6lXM4knEgpJNu2qUcKfDtNci3EC2g= google.golang.org/grpc v1.56.3/go.mod h1:I9bI3vqKfayGqPUAwGdOSu7kt6oIJLixfffKrpXqQ9s= google.golang.org/grpc v1.70.0 h1:pWFv03aZoHzlRKHWicjsZytKAiYCtNS0dHbXnIdq7jQ= google.golang.org/grpc v1.70.0/go.mod h1:ofIJqVKDXx/JiXrwr2IG4/zwdH9txy3IlF40RmcJSQw= google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.1.0/go.mod h1:6Kw0yEErY5E/yWrBtf03jp27GLLJujG4z/JK95pnjjw= google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE= google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo= google.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGjtUeSXeh4= google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= google.golang.org/protobuf v1.28.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= google.golang.org/protobuf v1.28.1/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= google.golang.org/protobuf v1.29.1/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= google.golang.org/protobuf v1.30.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= google.golang.org/protobuf v1.36.5 h1:tPhr+woSbjfYvY6/GPufUoYizxw1cF/yFoxJ2fmpwlM= google.golang.org/protobuf v1.36.5/go.mod h1:9fA7Ob0pmnwhb644+1+CVWFRbNajQ6iRojtC/QF5bRE= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20200902074654-038fdea0a05b/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= gopkg.in/inf.v0 v0.9.1 h1:73M5CoZyi3ZLMOyDlQh031Cx6N9NDJ2Vvfl76EDAgDc= gopkg.in/inf.v0 v0.9.1/go.mod h1:cWUDdTG/fYaXco+Dcufb5Vnc6Gp2YChqWtbxRZE0mXw= gopkg.in/ini.v1 v1.67.0 h1:Dgnx+6+nfE+IfzjUEISNeydPJh9AXNNsWbGP9KzCsOA= gopkg.in/ini.v1 v1.67.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= gopkg.in/jcmturner/aescts.v1 v1.0.1 h1:cVVZBK2b1zY26haWB4vbBiZrfFQnfbTVrE3xZq6hrEw= gopkg.in/jcmturner/aescts.v1 v1.0.1/go.mod h1:nsR8qBOg+OucoIW+WMhB3GspUQXq9XorLnQb9XtvcOo= gopkg.in/jcmturner/dnsutils.v1 v1.0.1 h1:cIuC1OLRGZrld+16ZJvvZxVJeKPsvd5eUIvxfoN5hSM= gopkg.in/jcmturner/dnsutils.v1 v1.0.1/go.mod h1:m3v+5svpVOhtFAP/wSz+yzh4Mc0Fg7eRhxkJMWSIz9Q= gopkg.in/jcmturner/goidentity.v3 v3.0.0 h1:1duIyWiTaYvVx3YX2CYtpJbUFd7/UuPYCfgXtQ3VTbI= gopkg.in/jcmturner/goidentity.v3 v3.0.0/go.mod h1:oG2kH0IvSYNIu80dVAyu/yoefjq1mNfM5bm88whjWx4= gopkg.in/jcmturner/gokrb5.v6 v6.1.1 h1:n0KFjpbuM5pFMN38/Ay+Br3l91netGSVqHPHEXeWUqk= gopkg.in/jcmturner/gokrb5.v6 v6.1.1/go.mod h1:NFjHNLrHQiruory+EmqDXCGv6CrjkeYeA+bR9mIfNFk= gopkg.in/jcmturner/rpc.v1 v1.1.0 h1:QHIUxTX1ISuAv9dD2wJ9HWQVuWDX/Zc0PfeC2tjc4rU= gopkg.in/jcmturner/rpc.v1 v1.1.0/go.mod h1:YIdkC4XfD6GXbzje11McwsDuOlZQSb9W4vfLvuNnlv8= gopkg.in/natefinch/lumberjack.v2 v2.0.0/go.mod h1:l0ndWWf7gzL7RNwBG7wST/UCcT4T24xpD6X8LsfU/+k= gopkg.in/yaml.v1 v1.0.0-20140924161607-9f9df34309c0/go.mod h1:WDnlLJ4WF5VGsH/HVa3CI79GS0ol3YnhVnKP89i0kNg= gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.5/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gorm.io/driver/bigquery v1.2.0 h1:E94oEXErYb4uImcR8oiCjE1SP2VdnrL5f3d78PtFWNk= gorm.io/driver/bigquery v1.2.0/go.mod h1:/5kcyb6RVIk/seff6YANAjB5aisE4oqY35x0Ix9iwXY= gorm.io/driver/postgres v1.5.2 h1:ytTDxxEv+MplXOfFe3Lzm7SjG09fcdb3Z/c056DTBx0= gorm.io/driver/postgres v1.5.2/go.mod h1:fmpX0m2I1PKuR7mKZiEluwrP3hbs+ps7JIGMUBpCgl8= gorm.io/gorm v1.24.0/go.mod h1:DVrVomtaYTbqs7gB/x2uVvqnXzv0nqjB396B8cG4dBA= gorm.io/gorm v1.25.11 h1:/Wfyg1B/je1hnDx3sMkX+gAlxrlZpn6X0BXRlwXlvHg= gorm.io/gorm v1.25.11/go.mod h1:xh7N7RHfYlNc5EmcI/El95gXusucDrQnHXe0+CgWcLQ= gotest.tools/gotestsum v1.12.0 h1:CmwtaGDkHxrZm4Ib0Vob89MTfpc3GrEFMJKovliPwGk= gotest.tools/gotestsum v1.12.0/go.mod h1:fAvqkSptospfSbQw26CTYzNwnsE/ztqLeyhP0h67ARY= gotest.tools/v3 v3.5.1 h1:EENdUnS3pdur5nybKYIh2Vfgc8IUNBjxDPSjtiJcOzU= gotest.tools/v3 v3.5.1/go.mod h1:isy3WKz7GK6uNw/sbHzfKBLvlvXwUyV06n6brMxxopU= honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg= honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= honnef.co/go/tools v0.1.3/go.mod h1:NgwopIslSNH47DimFoV78dnkksY2EFtX0ajyb3K/las= howett.net/plist v0.0.0-20181124034731-591f970eefbb/go.mod h1:vMygbs4qMhSZSc4lCUl2OEE+rDiIIJAIdR4m7MiMcm0= howett.net/plist v1.0.1 h1:37GdZ8tP09Q35o9ych3ehygcsL+HqKSwzctveSlarvM= howett.net/plist v1.0.1/go.mod h1:lqaXoTrLY4hg8tnEzNru53gicrbv7rrk+2xJA/7hw9g= lukechampine.com/uint128 v1.1.1/go.mod h1:c4eWIwlEGaxC/+H1VguhU4PHXNWDCDMUlWdIWl2j1gk= lukechampine.com/uint128 v1.2.0/go.mod h1:c4eWIwlEGaxC/+H1VguhU4PHXNWDCDMUlWdIWl2j1gk= modernc.org/b v1.1.0 h1:sFmr2MlofAtx5R0NC0btblNww5dqIHxXyT0SEiaTSIk= modernc.org/b v1.1.0/go.mod h1:yF+wmBAFjebNdVqZNTeNfmnLaLqq91wozvDLcuXz+ck= modernc.org/cc/v3 v3.36.0/go.mod h1:NFUHyPn4ekoC/JHeZFfZurN6ixxawE1BnVonP/oahEI= modernc.org/cc/v3 v3.36.2/go.mod h1:NFUHyPn4ekoC/JHeZFfZurN6ixxawE1BnVonP/oahEI= modernc.org/cc/v3 v3.36.3/go.mod h1:NFUHyPn4ekoC/JHeZFfZurN6ixxawE1BnVonP/oahEI= modernc.org/cc/v4 v4.24.4 h1:TFkx1s6dCkQpd6dKurBNmpo+G8Zl4Sq/ztJ+2+DEsh0= modernc.org/cc/v4 v4.24.4/go.mod h1:uVtb5OGqUKpoLWhqwNQo/8LwvoiEBLvZXIQ/SmO6mL0= modernc.org/ccgo/v3 v3.0.0-20220428102840-41399a37e894/go.mod h1:eI31LL8EwEBKPpNpA4bU1/i+sKOwOrQy8D87zWUcRZc= modernc.org/ccgo/v3 v3.0.0-20220430103911-bc99d88307be/go.mod h1:bwdAnOoaIt8Ax9YdWGjxWsdkPcZyRPHqrOvJxaKAKGw= modernc.org/ccgo/v3 v3.16.4/go.mod h1:tGtX0gE9Jn7hdZFeU88slbTh1UtCYKusWOoCJuvkWsQ= modernc.org/ccgo/v3 v3.16.6/go.mod h1:tGtX0gE9Jn7hdZFeU88slbTh1UtCYKusWOoCJuvkWsQ= modernc.org/ccgo/v3 v3.16.8/go.mod h1:zNjwkizS+fIFDrDjIAgBSCLkWbJuHF+ar3QRn+Z9aws= modernc.org/ccgo/v3 v3.16.9/go.mod h1:zNMzC9A9xeNUepy6KuZBbugn3c0Mc9TeiJO4lgvkJDo= modernc.org/ccgo/v4 v4.23.16 h1:Z2N+kk38b7SfySC1ZkpGLN2vthNJP1+ZzGZIlH7uBxo= modernc.org/ccgo/v4 v4.23.16/go.mod h1:nNma8goMTY7aQZQNTyN9AIoJfxav4nvTnvKThAeMDdo= modernc.org/ccorpus v1.11.6/go.mod h1:2gEUTrWqdpH2pXsmTM1ZkjeSrUWDpjMu2T6m29L/ErQ= modernc.org/db v1.0.13 h1:FeXvJZSrlWZbNrtpJba8HP+ExIPZmhcXj9S1dOjY/MU= modernc.org/db v1.0.13/go.mod h1:Rrl6+uLHHRIMbYpprlTyDXpHy9SsLQA2x4b8CCR8zIQ= modernc.org/file v1.0.9 h1:M+gDkDJ+bF7CWXqiLImo2PNcQwck1kOlnDTgqnCc4pc= modernc.org/file v1.0.9/go.mod h1:l9sqqgN86VmQ1J677GbwUGWntcQJ2OSWBrXUDSzVtlI= modernc.org/fileutil v1.1.2/go.mod h1:HdjlliqRHrMAI4nVOvvpYVzVgvRSK7WnoCiG0GUWJNo= modernc.org/fileutil v1.3.0 h1:gQ5SIzK3H9kdfai/5x41oQiKValumqNTDXMvKo62HvE= modernc.org/fileutil v1.3.0/go.mod h1:XatxS8fZi3pS8/hKG2GH/ArUogfxjpEKs3Ku3aK4JyQ= modernc.org/gc/v2 v2.6.3 h1:aJVhcqAte49LF+mGveZ5KPlsp4tdGdAOT4sipJXADjw= modernc.org/gc/v2 v2.6.3/go.mod h1:YgIahr1ypgfe7chRuJi2gD7DBQiKSLMPgBQe9oIiito= modernc.org/golex v1.1.0 h1:dmSaksHMd+y6NkBsRsCShNPRaSNCNH+abrVm5/gZic8= modernc.org/golex v1.1.0/go.mod h1:2pVlfqApurXhR1m0N+WDYu6Twnc4QuvO4+U8HnwoiRA= modernc.org/httpfs v1.0.6/go.mod h1:7dosgurJGp0sPaRanU53W4xZYKh14wfzX420oZADeHM= modernc.org/internal v1.0.8/go.mod h1:km71QBJPWkc1+LUldg2U9TJsKT6Q2QKHIykdEeCy/jw= modernc.org/internal v1.1.1 h1:P2UQBRYyFmJ+48cPPZiAYhryIU9bRRBd7iUKI4or/0E= modernc.org/internal v1.1.1/go.mod h1:T6BJ6EKi7nL7DSVgml05v5MNxHYdokVq8pns5+Hg0ac= modernc.org/libc v0.0.0-20220428101251-2d5f3daf273b/go.mod h1:p7Mg4+koNjc8jkqwcoFBJx7tXkpj00G77X7A72jXPXA= modernc.org/libc v1.16.0/go.mod h1:N4LD6DBE9cf+Dzf9buBlzVJndKr/iJHG97vGLHYnb5A= modernc.org/libc v1.16.1/go.mod h1:JjJE0eu4yeK7tab2n4S1w8tlWd9MxXLRzheaRnAKymU= modernc.org/libc v1.16.17/go.mod h1:hYIV5VZczAmGZAnG15Vdngn5HSF5cSkbvfz2B7GRuVU= modernc.org/libc v1.16.19/go.mod h1:p7Mg4+koNjc8jkqwcoFBJx7tXkpj00G77X7A72jXPXA= modernc.org/libc v1.17.0/go.mod h1:XsgLldpP4aWlPlsjqKRdHPqCxCjISdHfM/yeWC5GyW0= modernc.org/libc v1.17.1/go.mod h1:FZ23b+8LjxZs7XtFMbSzL/EhPxNbfZbErxEHc7cbD9s= modernc.org/libc v1.61.13 h1:3LRd6ZO1ezsFiX1y+bHd1ipyEHIJKvuprv0sLTBwLW8= modernc.org/libc v1.61.13/go.mod h1:8F/uJWL/3nNil0Lgt1Dpz+GgkApWh04N3el3hxJcA6E= modernc.org/lldb v1.0.8 h1:gM0Lpmgtw0h/ylWQSxABvzJ++TZKhf1Q/uPAGBAM6aU= modernc.org/lldb v1.0.8/go.mod h1:ybOcsZ/RNZo3q8fiGadQFRnD+1Jc+RWGcTPdeilCnUk= modernc.org/mathutil v1.2.2/go.mod h1:mZW8CKdRPY1v87qxC/wUdX5O1qDzXMP5TH3wjfpga6E= modernc.org/mathutil v1.4.1/go.mod h1:mZW8CKdRPY1v87qxC/wUdX5O1qDzXMP5TH3wjfpga6E= modernc.org/mathutil v1.5.0/go.mod h1:mZW8CKdRPY1v87qxC/wUdX5O1qDzXMP5TH3wjfpga6E= modernc.org/mathutil v1.7.1 h1:GCZVGXdaN8gTqB1Mf/usp1Y/hSqgI2vAGGP4jZMCxOU= modernc.org/mathutil v1.7.1/go.mod h1:4p5IwJITfppl0G4sUEDtCr4DthTaT47/N3aT6MhfgJg= modernc.org/memory v1.1.1/go.mod h1:/0wo5ibyrQiaoUoH7f9D8dnglAmILJ5/cxZlRECf+Nw= modernc.org/memory v1.2.0/go.mod h1:/0wo5ibyrQiaoUoH7f9D8dnglAmILJ5/cxZlRECf+Nw= modernc.org/memory v1.2.1/go.mod h1:PkUhL0Mugw21sHPeskwZW4D6VscE/GQJOnIpCnW6pSU= modernc.org/memory v1.8.2 h1:cL9L4bcoAObu4NkxOlKWBWtNHIsnnACGF/TbqQ6sbcI= modernc.org/memory v1.8.2/go.mod h1:ZbjSvMO5NQ1A2i3bWeDiVMxIorXwdClKE/0SZ+BMotU= modernc.org/opt v0.1.1/go.mod h1:WdSiB5evDcignE70guQKxYUl14mgWtbClRi5wmkkTX0= modernc.org/opt v0.1.3/go.mod h1:WdSiB5evDcignE70guQKxYUl14mgWtbClRi5wmkkTX0= modernc.org/opt v0.1.4 h1:2kNGMRiUjrp4LcaPuLY2PzUfqM/w9N23quVwhKt5Qm8= modernc.org/opt v0.1.4/go.mod h1:03fq9lsNfvkYSfxrfUhZCWPk1lm4cq4N+Bh//bEtgns= modernc.org/ql v1.4.11 h1:xMrJTGaTdobpBekjMfQzoDMYuvwIHgH4jYwFlGH/2xc= modernc.org/ql v1.4.11/go.mod h1:FH+w746kJCCjHQZcRxRBZau4DKgmf+/8rNNYt/LgBEA= modernc.org/sortutil v1.1.1/go.mod h1:DTj/8BqjEBLZFVPYvEGDfFFg94SsfPxQ70R+SQJ98qA= modernc.org/sortutil v1.2.1 h1:+xyoGf15mM3NMlPDnFqrteY07klSFxLElE2PVuWIJ7w= modernc.org/sortutil v1.2.1/go.mod h1:7ZI3a3REbai7gzCLcotuw9AC4VZVpYMjDzETGsSMqJE= modernc.org/sqlite v1.18.1/go.mod h1:6ho+Gow7oX5V+OiOQ6Tr4xeqbx13UZ6t+Fw9IRUG4d4= modernc.org/sqlite v1.36.0 h1:EQXNRn4nIS+gfsKeUTymHIz1waxuv5BzU7558dHSfH8= modernc.org/sqlite v1.36.0/go.mod h1:7MPwH7Z6bREicF9ZVUR78P1IKuxfZ8mRIDHD0iD+8TU= modernc.org/strutil v1.1.1/go.mod h1:DE+MQQ/hjKBZS2zNInV5hhcipt5rLPWkmpbGeW5mmdw= modernc.org/strutil v1.1.3/go.mod h1:MEHNA7PdEnEwLvspRMtWTNnp2nnyvMfkimT1NKNAGbw= modernc.org/strutil v1.2.1 h1:UneZBkQA+DX2Rp35KcM69cSsNES9ly8mQWD71HKlOA0= modernc.org/strutil v1.2.1/go.mod h1:EHkiggD70koQxjVdSBM3JKM7k6L0FbGE5eymy9i3B9A= modernc.org/tcl v1.13.1/go.mod h1:XOLfOwzhkljL4itZkK6T72ckMgvj0BDsnKNdZVUOecw= modernc.org/token v1.0.0/go.mod h1:UGzOrNV1mAFSEB63lOFHIpNRUVMvYTc6yu1SMY/XTDM= modernc.org/token v1.1.0 h1:Xl7Ap9dKaEs5kLoOQeQmPWevfnk/DM5qcLcYlA8ys6Y= modernc.org/token v1.1.0/go.mod h1:UGzOrNV1mAFSEB63lOFHIpNRUVMvYTc6yu1SMY/XTDM= modernc.org/z v1.5.1/go.mod h1:eWFB510QWW5Th9YGZT81s+LwvaAs3Q2yr4sP0rmLkv8= modernc.org/zappy v1.0.9/go.mod h1:y2c4Hv5jzyBP179SxNmx5H/BM6cVgNIXPQv2bCeR6IM= modernc.org/zappy v1.1.0 h1:cAf9HrymATNo2hYMc9c37y0tiZJYuKM2xa1ZAP8THUw= modernc.org/zappy v1.1.0/go.mod h1:cxC0dWAgZuyMsJ+KL3ZBgo3twyKGBB/0By/umSZE2bQ= rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8= rsc.io/pdf v0.1.1/go.mod h1:n8OzWcQ6Sp37PL01nO98y4iUCRdTGarVfzxY20ICaU4= rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0= rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA= sqlflow.org/gohive v0.0.0-20240730014249-8960223660e2 h1:zvkshqW4meDpDadU5rhwejwJrZVCfxo5qpJ4NoZeGbE= sqlflow.org/gohive v0.0.0-20240730014249-8960223660e2/go.mod h1:OAU0/vkmdKfZ363QgGTChI35KIBsS63sZWDNWcFFcBM= sqlflow.org/gomaxcompute v0.0.0-20210805062559-c14ae028b44c h1:Zo3qlfUn/rlMx9vWHpGE/luEtweuXHwrYbrFZwTG978= sqlflow.org/gomaxcompute v0.0.0-20210805062559-c14ae028b44c/go.mod h1:MxRFJp6UEk1OfnnVOIL3Jc7ROBH0dOpwF/J14A9LNdM= usql-0.19.19/handler/000077500000000000000000000000001476173253300143075ustar00rootroot00000000000000usql-0.19.19/handler/handler.go000066400000000000000000001172521476173253300162630ustar00rootroot00000000000000// Package handler provides a input process handler implementation for usql. package handler import ( "bufio" "bytes" "context" "database/sql" "errors" "fmt" "io" "log" "net/url" "os" "os/exec" "os/signal" "os/user" "path" "path/filepath" "regexp" "sort" "strconv" "strings" "syscall" "time" "unicode" "github.com/alecthomas/chroma/v2" "github.com/alecthomas/chroma/v2/formatters" "github.com/alecthomas/chroma/v2/styles" "github.com/go-git/go-billy/v5" "github.com/xo/dburl" "github.com/xo/dburl/passfile" "github.com/xo/tblfmt" "github.com/xo/usql/drivers" "github.com/xo/usql/drivers/completer" "github.com/xo/usql/drivers/metadata" "github.com/xo/usql/env" "github.com/xo/usql/metacmd" "github.com/xo/usql/rline" "github.com/xo/usql/stmt" ustyles "github.com/xo/usql/styles" "github.com/xo/usql/text" ) // Handler is a input process handler. // // Glues together usql's components to provide a "read-eval-print loop" (REPL) // for usql's interactive command-line and manages most of the core/high-level logic. // // Manages the active statement buffer, application IO, executing/querying SQL // statements, and handles backslash (\) commands encountered in the input // stream. type Handler struct { l rline.IO user *user.User // wd is the working directoyr. wd string // charts is the charts filesystem. charts billy.Filesystem // nopw indicates not asking for password. nopw bool // timing of every command executed timing bool // singleLineMode is single line mode singleLineMode bool // query statement buffer buf *stmt.Stmt // last statement last string lastPrefix string lastRaw string // batch batch bool batchEnd string // bind are bound values for exec statements bind []interface{} // connection u *dburl.URL db *sql.DB tx *sql.Tx // out file or pipe out io.WriteCloser } // New creates a new input handler. func New(l rline.IO, user *user.User, wd string, charts billy.Filesystem, nopw bool) *Handler { f, iactive := l.Next, l.Interactive() if iactive { f = func() ([]rune, error) { // next line r, err := l.Next() if err != nil { return nil, err } // save history _ = l.Save(string(r)) return r, nil } } h := &Handler{ l: l, user: user, wd: wd, charts: charts, nopw: nopw, buf: stmt.New(f), } if iactive { l.SetOutput(h.outputHighlighter) l.Completer(completer.NewDefaultCompleter(completer.WithConnStrings(h.connStrings()))) } return h } // SetSingleLineMode sets the single line mode toggle. func (h *Handler) SetSingleLineMode(singleLineMode bool) { h.singleLineMode = singleLineMode } // GetTiming gets the timing toggle. func (h *Handler) GetTiming() bool { return h.timing } // SetTiming sets the timing toggle. func (h *Handler) SetTiming(timing bool) { h.timing = timing } // outputHighlighter returns s as a highlighted string, based on the current // buffer and syntax highlighting settings. func (h *Handler) outputHighlighter(s string) string { // bail when string is empty (ie, contains no printable, non-space // characters) or if syntax highlighting is not enabled if empty(s) || env.All()["SYNTAX_HL"] != "true" { return s } // count end lines var endl string if m := linetermRE.FindStringSubmatch(s); m != nil { s = strings.TrimSuffix(s, m[0]) endl += m[0] } // leading whitespace var leading string // capture current query statement buffer orig := h.buf.RawString() full := orig if full != "" { full += "\n" } else { // get leading whitespace if i := strings.IndexFunc(s, func(r rune) bool { return !stmt.IsSpaceOrControl(r) }); i != -1 { leading = s[:i] } } full += s // setup statement parser st := drivers.NewStmt(h.u, func() func() ([]rune, error) { y := strings.Split(orig, "\n") if y[0] == "" { y[0] = s } else { y = append(y, s) } return func() ([]rune, error) { if len(y) > 0 { z := y[0] y = y[1:] return []rune(z), nil } return nil, io.EOF } }()) // accumulate all "active" statements in buffer, breaking either at // EOF or when a \ cmd has been encountered var err error var cmd, final string loop: for { cmd, _, err = st.Next(env.Unquote(h.user, false, env.All())) switch { case err != nil && err != io.EOF: return s + endl case err == io.EOF: break loop } if st.Ready() || cmd != "" { final += st.RawString() st.Reset(nil) // grab remaining whitespace to add to final l := len(final) // find first non empty character if i := strings.IndexFunc(full[l:], func(r rune) bool { return !stmt.IsSpaceOrControl(r) }); i != -1 { final += full[l : l+i] } } } if !st.Ready() && cmd == "" { final += st.RawString() } final = leading + final // determine whatever is remaining after "active" var remaining string if fnl := len(final); fnl < len(full) { remaining = full[fnl:] } // this happens when a read line is empty and/or has only // whitespace and a \ cmd if s == remaining { return s + endl } // highlight entire final accumulated buffer b := new(bytes.Buffer) if err := h.Highlight(b, final); err != nil { return s + endl } colored := b.String() // return only last line plus whatever remaining string (ie, after // a \ cmd) and the end line count ss := strings.Split(colored, "\n") return lastcolor(colored) + ss[len(ss)-1] + remaining + endl } // helpQuitExitRE is a regexp to use to match help, quit, or exit messages. var helpQuitExitRE = regexp.MustCompile(fmt.Sprintf(`(?im)^(%s|%s|%s)\s*$`, text.HelpPrefix, text.QuitPrefix, text.ExitPrefix)) // Run executes queries and commands. func (h *Handler) Run() error { stdout, stderr, iactive := h.l.Stdout(), h.l.Stderr(), h.l.Interactive() // display welcome info if iactive && env.Get("QUIET") == "off" { // logo if typ := env.TermGraphics(); typ.Available() { if err := typ.Encode(stdout, text.Logo); err != nil { return err } } // welcome text fmt.Fprintln(stdout, text.WelcomeDesc) fmt.Fprintln(stdout) } var lastErr error for { var execute bool // set prompt if iactive { h.l.Prompt(h.Prompt(env.Get("PROMPT1"))) } // read next statement/command cmd, paramstr, err := h.buf.Next(env.Unquote(h.user, false, env.All())) switch { case h.singleLineMode && err == nil: execute = h.buf.Len != 0 case err == rline.ErrInterrupt: h.buf.Reset(nil) continue case err != nil: if err == io.EOF { return lastErr } return err } var opt metacmd.Option if cmd != "" { cmd = strings.TrimPrefix(cmd, `\`) params := stmt.DecodeParams(paramstr) // decode r, err := metacmd.Decode(cmd, params) if err != nil { lastErr = WrapErr(cmd, err) switch { case err == text.ErrUnknownCommand: fmt.Fprintln(stderr, fmt.Sprintf(text.InvalidCommand, cmd)) case err == text.ErrMissingRequiredArgument: fmt.Fprintln(stderr, fmt.Sprintf(text.MissingRequiredArg, cmd)) default: fmt.Fprintln(stderr, "error:", err) } continue } // run opt, err = r.Run(h) if err != nil && err != rline.ErrInterrupt { lastErr = WrapErr(cmd, err) fmt.Fprintln(stderr, "error:", err) continue } // print unused command parameters for { ok, arg, err := params.Get(func(s string, isvar bool) (bool, string, error) { return true, s, nil }) if err != nil { fmt.Fprintln(stderr, "error:", err) } if !ok { break } fmt.Fprintln(stdout, fmt.Sprintf(text.ExtraArgumentIgnored, cmd, arg)) } } // help, exit, quit intercept if iactive && len(h.buf.Buf) >= 4 { i, first := stmt.RunesLastIndex(h.buf.Buf, '\n'), false if i == -1 { i, first = 0, true } if s := strings.ToLower(helpQuitExitRE.FindString(string(h.buf.Buf[i:]))); s != "" { switch s { case "help": s = text.HelpDescShort if first { s = text.HelpDesc h.buf.Reset(nil) } case "quit", "exit": s = text.QuitDesc if first { return nil } } fmt.Fprintln(stdout, s) } } // quit if opt.Quit { if h.out != nil { h.out.Close() } return nil } // execute buf if execute || h.buf.Ready() || opt.Exec != metacmd.ExecNone { // intercept batch query if h.u != nil { typ, end, batch := drivers.IsBatchQueryPrefix(h.u, h.buf.Prefix) switch { case h.batch && batch: err = fmt.Errorf("cannot perform %s in existing batch", typ) lastErr = WrapErr(h.buf.String(), err) fmt.Fprintln(stderr, "error:", err) continue // cannot use \g* while accumulating statements for batch queries case h.batch && typ != h.batchEnd && opt.Exec != metacmd.ExecNone: err = errors.New("cannot force batch execution") lastErr = WrapErr(h.buf.String(), err) fmt.Fprintln(stderr, "error:", err) continue case batch: h.batch, h.batchEnd = true, end case h.batch: var lend string if len(h.last) != 0 { lend = "\n" } // append to last h.last += lend + h.buf.String() h.lastPrefix = h.buf.Prefix h.lastRaw += lend + h.buf.RawString() h.buf.Reset(nil) // break if h.batchEnd != typ { continue } h.lastPrefix = h.batchEnd h.batch, h.batchEnd = false, "" } } if h.buf.Len != 0 { h.last, h.lastPrefix, h.lastRaw = h.buf.String(), h.buf.Prefix, h.buf.RawString() h.buf.Reset(nil) } // log.Printf(">> PROCESS EXECUTE: (%s) `%s`", h.lastPrefix, h.last) if !h.batch && h.last != "" && h.last != ";" { // force a transaction for batched queries for certain drivers var forceBatch bool if h.u != nil { _, _, forceBatch = drivers.IsBatchQueryPrefix(h.u, stmt.FindPrefix(h.last, true, true, true)) forceBatch = forceBatch && drivers.BatchAsTransaction(h.u) } // execute out := stdout if h.out != nil { out = h.out } ctx, stop := signal.NotifyContext(context.Background(), os.Interrupt) if err = h.Execute(ctx, out, opt, h.lastPrefix, h.last, forceBatch, h.unbind()...); err != nil { lastErr = WrapErr(h.last, err) if env.All()["ON_ERROR_STOP"] == "on" { if iactive { fmt.Fprintln(stderr, "error:", err) h.buf.Reset([]rune{}) // empty the buffer so no other statements are run continue } else { stop() return err } } else { fmt.Fprintln(stderr, "error:", err) } } stop() } } } } // Execute executes a query against the connected database. func (h *Handler) Execute(ctx context.Context, w io.Writer, opt metacmd.Option, prefix, sqlstr string, forceTrans bool, bind ...interface{}) error { if h.db == nil { return text.ErrNotConnected } // determine type and pre process string prefix, sqlstr, qtyp, err := drivers.Process(h.u, prefix, sqlstr) if err != nil { return drivers.WrapErr(h.u.Driver, err) } // start a transaction if forced if forceTrans { if err = h.BeginTx(ctx, nil); err != nil { return err } } f := h.doExecSingle switch opt.Exec { case metacmd.ExecExec: f = h.doExecExec case metacmd.ExecSet: f = h.doExecSet case metacmd.ExecWatch: f = h.doExecWatch } if err = drivers.WrapErr(h.u.Driver, f(ctx, w, opt, prefix, sqlstr, qtyp, bind)); err != nil { if forceTrans { defer h.tx.Rollback() h.tx = nil } return err } if forceTrans { return h.Commit() } return nil } // Reset resets the handler's query statement buffer. func (h *Handler) Reset(r []rune) { h.buf.Reset(r) h.last, h.lastPrefix, h.lastRaw, h.batch, h.batchEnd = "", "", "", false, "" } // Bind sets the bind parameters for the next query execution. func (h *Handler) Bind(bind []interface{}) { h.bind = bind } // unbind returns the bind parameters. func (h *Handler) unbind() []interface{} { v := h.bind h.bind = nil return v } // Prompt parses a prompt. // // NOTE: the documentation below is INCORRECT, as it is just copied from // https://www.postgresql.org/docs/current/app-psql.html#APP-PSQL-PROMPTING // // TODO/FIXME: complete this functionality (from psql documentation): // // %M - The full host name (with domain name) of the database server, or // [local] if the connection is over a Unix domain socket, or // [local:/dir/name], if the Unix domain socket is not at the compiled in // default location. // // %m - The host name of the database server, truncated at the first dot, or // [local] if the connection is over a Unix domain socket. // // %> - The port number at which the database server is listening. // // %n - The database session user name. (The expansion of this value might // change during a database session as the result of the command SET SESSION // AUTHORIZATION.) // // %/ - The name of the current database. // // %~ - Like %/, but the output is ~ (tilde) if the database is your default // database. // // %# - If the session user is a database superuser, then a #, otherwise a >. // (The expansion of this value might change during a database session as the // result of the command SET SESSION AUTHORIZATION.) // // %p - The process ID of the backend currently connected to. // // %R - In prompt 1 normally =, but @ if the session is in an inactive branch // of a conditional block, or ^ if in single-line mode, or ! if the session is // disconnected from the database (which can happen if \connect fails). In // prompt 2 %R is replaced by a character that depends on why psql expects // more input: - if the command simply wasn't terminated yet, but * if there // is an unfinished /* ... */ comment, a single quote if there is an // unfinished quoted string, a double quote if there is an unfinished quoted // identifier, a dollar sign if there is an unfinished dollar-quoted string, // or ( if there is an unmatched left parenthesis. In prompt 3 %R doesn't // produce anything. // // %x - Transaction status: an empty string when not in a transaction block, // or * when in a transaction block, or ! when in a failed transaction block, // or ? when the transaction state is indeterminate (for example, because // there is no connection). // // %l - The line number inside the current statement, starting from 1. // // %digits - The character with the indicated octal code is substituted. // // %:name: - The value of the psql variable name. See Variables, above, for // details. // // %`command` - The output of command, similar to ordinary “back-tick” // substitution. // // %[ ... %] - Prompts can contain terminal control characters which, for // example, change the color, background, or style of the prompt text, or // change the title of the terminal window. In order for the line editing // features of Readline to work properly, these non-printing control // characters must be designated as invisible by surrounding them with %[ and // %]. Multiple pairs of these can occur within the prompt. For example: // // testdb=> \set PROMPT1 '%[%033[1;33;40m%]%n@%/%R%[%033[0m%]%# ' // // results in a boldfaced (1;) yellow-on-black (33;40) prompt on // VT100-compatible, color-capable terminals. // // %w - Whitespace of the same width as the most recent output of PROMPT1. // This can be used as a PROMPT2 setting, so that multi-line statements are // aligned with the first line, but there is no visible secondary prompt. // // To insert a percent sign into your prompt, write %%. The default prompts are // '%/%R%x%# ' for prompts 1 and 2, and '>> ' for prompt 3. func (h *Handler) Prompt(prompt string) string { r, connected := []rune(prompt), h.db != nil end := len(r) var buf []byte for i := 0; i < end; i++ { if r[i] != '%' { buf = append(buf, string(r[i])...) continue } switch grab(r, i+1, end) { case '%': // literal buf = append(buf, '%') case 'S': // short driver name if connected { s := dburl.ShortAlias(h.u.Scheme) if s == "" { s = dburl.ShortAlias(h.u.Driver) } if s == "" { s = text.UnknownShortAlias } buf = append(buf, s+":"...) } else { buf = append(buf, text.NotConnected...) } case 'u': // dburl short if connected { buf = append(buf, h.u.Short()...) } else { buf = append(buf, text.NotConnected...) } case 'M': // full host name with domain if connected { buf = append(buf, h.u.Hostname()...) } case 'm': // host name truncated at first dot, or [local] if it's a domain socket if connected { s := h.u.Hostname() if i := strings.Index(s, "."); i != -1 { s = s[:i] } buf = append(buf, s...) } case '>': // the port number if connected { s := h.u.Port() if s != "" { s = ":" + s } buf = append(buf, s...) } case 'N': // database user if connected && h.u.User != nil { s := h.u.User.Username() if s != "" { buf = append(buf, s+"@"...) } } case 'n': // database user if connected && h.u.User != nil { buf = append(buf, h.u.User.Username()...) } case '/': // database name switch { case connected && h.u.Opaque != "": buf = append(buf, h.u.Opaque...) case connected && h.u.Path != "" && h.u.Path != "/": buf = append(buf, h.u.Path...) } case 'O': if connected { buf = append(buf, h.u.Opaque...) } case 'o': if connected { buf = append(buf, filepath.Base(h.u.Opaque)...) } case 'P': if connected { buf = append(buf, h.u.Path...) } case 'p': if connected { buf = append(buf, path.Base(h.u.Path)...) } case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9': j := i + 1 base := 10 if grab(r, j, end) == '0' { j++ base = 8 } if grab(r, j, end) == 'x' { j++ base = 16 } i = j for unicode.IsDigit(grab(r, i+1, end)) { i++ } n, err := strconv.ParseInt(string(r[j:i+1]), base, 16) if err == nil { buf = append(buf, byte(n)) } i-- case '~': // like %/ but ~ when default database case '#': // when superuser, a #, otherwise > if h.tx != nil || h.batch { buf = append(buf, '~') } else { buf = append(buf, '>') } // case 'p': // the process id of the connected backend -- never going to be supported case 'R': // statement state buf = append(buf, h.buf.State()...) case 'x': // empty when not in a transaction block, * in transaction block, ! in failed transaction block, or ? when indeterminate case 'l': // line number case ':': // variable value case '`': // value of the evaluated command case '[', ']': case 'w': } i++ } return string(buf) } // IO returns the io for the handler. func (h *Handler) IO() rline.IO { return h.l } // User returns the user for the handler. func (h *Handler) User() *user.User { return h.user } // URL returns the URL for the handler. func (h *Handler) URL() *dburl.URL { return h.u } // DB returns the sql.DB for the handler. func (h *Handler) DB() drivers.DB { if h.tx != nil { return h.tx } return h.db } // Last returns the last executed statement. func (h *Handler) Last() string { return h.last } // LastRaw returns the last raw (non-interpolated) executed statement. func (h *Handler) LastRaw() string { return h.lastRaw } // Buf returns the current query statement buffer. func (h *Handler) Buf() *stmt.Stmt { return h.buf } // Highlight highlights using the current environment settings. func (h *Handler) Highlight(w io.Writer, buf string) error { vars := env.All() // create lexer, formatter, styler l := chroma.Coalesce(drivers.Lexer(h.u)) f := formatters.Get(vars["SYNTAX_HL_FORMAT"]) s := styles.Get(vars["SYNTAX_HL_STYLE"]) // override background if vars["SYNTAX_HL_OVERRIDE_BG"] != "false" { s = ustyles.Get(vars["SYNTAX_HL_STYLE"]) } // tokenize stream it, err := l.Tokenise(nil, buf) if err != nil { return err } // write formatted output return f.Format(w, s, it) } // Open handles opening a specified database URL, passing either a single // string in the form of a URL, or more than one string, in which case the // first string is treated as a driver name, and the remaining strings are // joined (with a space) and passed as a DSN to sql.Open. // // If there is only one parameter, and it is not a well formatted URL, but // appears to be a file on disk, then an attempt will be made to open it with // an appropriate driver (mysql, postgres, sqlite3) depending on the type (unix // domain socket, directory, or regular file, respectively). func (h *Handler) Open(ctx context.Context, params ...string) error { if len(params) == 0 || params[0] == "" { return nil } if h.tx != nil { return text.ErrPreviousTransactionExists } if len(params) == 1 { if v, ok := env.Cget(params[0]); ok { params = v } } if len(params) < 2 { dsn := params[0] // parse dsn u, err := dburl.Parse(dsn) if err != nil { return err } h.u = u // force parameters h.forceParams(h.u) } else { h.u = &dburl.URL{ Driver: params[0], DSN: strings.Join(params[1:], " "), } } // open connection var err error h.db, err = drivers.Open(ctx, h.u, h.GetOutput, h.l.Stderr) if err != nil && !drivers.IsPasswordErr(h.u, err) { defer h.Close() return err } // set buffer options drivers.ConfigStmt(h.u, h.buf) // force error/check connection if err == nil { if err = drivers.Ping(ctx, h.u, h.db); err == nil { return h.Version(ctx) } } // bail without getting password if h.nopw || !drivers.IsPasswordErr(h.u, err) || len(params) > 1 || !h.l.Interactive() { defer h.Close() return err } // print the error fmt.Fprintln(h.l.Stderr(), "error:", err) // otherwise, try to collect a password ... dsn, err := h.Password(params[0]) if err != nil { // close connection defer h.Close() return err } // reconnect return h.Open(ctx, dsn) } func (h *Handler) connStrings() []string { entries, err := passfile.Entries(h.user.HomeDir, text.PassfileName) if err != nil { // ignore the error as this is only used for completer // and it'll be reported again when trying to force params before opening a conn entries = nil } available := drivers.Available() names := make([]string, 0, len(available)+len(entries)) for schema := range available { _, aliases := dburl.SchemeDriverAndAliases(schema) // TODO should we create all combinations of space, :, :// and +transport ? names = append(names, schema) names = append(names, aliases...) } for _, entry := range entries { if entry.Protocol == "*" { continue } user, host, port, dbname := "", "", "", "" if entry.Username != "*" { user = entry.Username + "@" if entry.Host != "*" { host = entry.Host if entry.Port != "*" { port = ":" + entry.Port } if entry.DBName != "*" { dbname = "/" + entry.DBName } } } names = append(names, fmt.Sprintf("%s://%s%s%s%s", entry.Protocol, user, host, port, dbname)) } for name := range env.Call() { names = append(names, name) } sort.Strings(names) return names } // forceParams forces connection parameters on a database URL, adding any // driver specific required parameters, and the username/password when a // matching entry exists in the PASS file. func (h *Handler) forceParams(u *dburl.URL) { // force driver parameters drivers.ForceParams(u) // see if password entry is present user, err := passfile.Match(u, h.user.HomeDir, text.PassfileName) switch { case err != nil: fmt.Fprintln(h.l.Stderr(), "error:", err) case user != nil: u.User = user } // copy back to u z, _ := dburl.Parse(u.String()) *u = *z } // Password collects a password from input, and returns a modified DSN // including the collected password. func (h *Handler) Password(dsn string) (string, error) { switch v, ok := env.Cget(dsn); { case dsn == "": return "", text.ErrMissingDSN case ok && len(v) < 2: return "", text.ErrNamedConnectionIsNotAURL case ok: dsn = v[0] } u, err := dburl.Parse(dsn) if err != nil { return "", err } user := h.user.Username if u.User != nil { user = u.User.Username() } pass, err := h.l.Password(text.EnterPassword) if err != nil { return "", err } u.User = url.UserPassword(user, pass) return u.String(), nil } // Close closes the database connection if it is open. func (h *Handler) Close() error { if h.tx != nil { return text.ErrPreviousTransactionExists } if h.db != nil { err := h.db.Close() drv := h.u.Driver h.db, h.u = nil, nil return drivers.WrapErr(drv, err) } return nil } // ReadVar reads a variable from the interactive prompt, saving it to // environment variables. func (h *Handler) ReadVar(typ, prompt string) (string, error) { var masked bool // check type switch typ { case "password": masked = true case "string", "int", "uint", "float", "bool": default: return "", text.ErrInvalidType } var v string var err error if masked { if prompt == "" { prompt = text.EnterPassword } v, err = h.l.Password(prompt) } else { h.l.Prompt(prompt) var r []rune r, err = h.l.Next() v = string(r) } switch typ { case "int": _, err = strconv.ParseInt(v, 10, 64) case "uint": _, err = strconv.ParseUint(v, 10, 64) case "float": _, err = strconv.ParseFloat(v, 64) case "bool": var b bool b, err = strconv.ParseBool(v) if err == nil { v = fmt.Sprintf("%v", b) } } if err != nil { errstr := err.Error() if i := strings.LastIndex(errstr, ":"); i != -1 { errstr = strings.TrimSpace(errstr[i+1:]) } return "", fmt.Errorf(text.InvalidValue, typ, v, errstr) } return v, nil } // ChangePassword changes a password for the user. func (h *Handler) ChangePassword(user string) (string, error) { if h.db == nil { return "", text.ErrNotConnected } if !h.l.Interactive() { return "", text.ErrNotInteractive } var err error if err = drivers.CanChangePassword(h.u); err != nil { return "", err } var newpw, newpw2, oldpw string // ask for previous password if user == "" && drivers.RequirePreviousPassword(h.u) { oldpw, err = h.l.Password(text.EnterPreviousPassword) if err != nil { return "", err } } // attempt to get passwords for i := 0; i < 3; i++ { if newpw, err = h.l.Password(text.NewPassword); err != nil { return "", err } if newpw2, err = h.l.Password(text.ConfirmPassword); err != nil { return "", err } if newpw == newpw2 { break } fmt.Fprintln(h.l.Stderr(), text.PasswordsDoNotMatch) } // verify passwords match if newpw != newpw2 { return "", text.ErrPasswordAttemptsExhausted } return drivers.ChangePassword(h.u, h.DB(), user, newpw, oldpw) } // Version prints the database version information after a successful connection. func (h *Handler) Version(ctx context.Context) error { if env.Get("SHOW_HOST_INFORMATION") != "true" || !h.l.Interactive() { return nil } if h.db == nil { return text.ErrNotConnected } ver, err := drivers.Version(ctx, h.u, h.DB()) switch { case err != nil: ver = fmt.Sprintf("", err) case ver == "": ver = "" } h.Print(text.ConnInfo, h.u.Driver, ver) return nil } // Print formats according to a format specifier and writes to handler's standard output. func (h *Handler) Print(s string, v ...interface{}) { if env.Get("QUIET") == "on" { return } fmt.Fprintln(h.l.Stdout(), fmt.Sprintf(s, v...)) } // doExecWatch repeatedly executes a query against the database. func (h *Handler) doExecWatch(ctx context.Context, w io.Writer, opt metacmd.Option, prefix, sqlstr string, qtyp bool, bind []interface{}) error { for { // the actual output that psql has: "Mon Jan 2006 3:04:05 PM MST" -- which is _slightly_ different than RFC1123 // fmt.Fprintf(w, "%s (every %fs)\n\n", time.Now().Format("Mon Jan 2006 3:04:05 PM MST"), float64(opt.Watch)/float64(time.Second)) fmt.Fprintf(w, "%s (every %v)\n", time.Now().Format(time.RFC1123), opt.Watch) fmt.Fprintln(w) if err := h.doExecSingle(ctx, w, opt, prefix, sqlstr, qtyp, bind); err != nil { return err } select { case <-ctx.Done(): if err := ctx.Err(); err != nil && !errors.Is(err, context.Canceled) { return err } return nil case <-time.After(opt.Watch): } } } // doExecSingle executes a single query against the database based on its query type. func (h *Handler) doExecSingle(ctx context.Context, w io.Writer, opt metacmd.Option, prefix, sqlstr string, qtyp bool, bind []interface{}) error { // exec or query f := h.doExec if qtyp { f = h.doQuery } // exec start := time.Now() if err := f(ctx, w, opt, prefix, sqlstr, bind); err != nil { return err } if h.timing { d := time.Since(start) s := text.TimingDesc v := []interface{}{float64(d.Microseconds()) / 1000} if d > 1*time.Second { s += " (%v)" v = append(v, d.Round(1*time.Millisecond)) } fmt.Fprintln(h.l.Stdout(), fmt.Sprintf(s, v...)) } return nil } // doExecSet executes a SQL query, setting all returned columns as variables. func (h *Handler) doExecSet(ctx context.Context, w io.Writer, opt metacmd.Option, prefix, sqlstr string, _ bool, bind []interface{}) error { // query rows, err := h.DB().QueryContext(ctx, sqlstr, bind...) if err != nil { return err } // get cols cols, err := drivers.Columns(h.u, rows) if err != nil { return err } // process row(s) var i int var row []string clen, tfmt := len(cols), env.GoTime() for rows.Next() { if i == 0 { row, err = h.scan(rows, clen, tfmt) if err != nil { return err } } i++ } if i > 1 { return text.ErrTooManyRows } // set vars for i, c := range cols { n := opt.Params["prefix"] + c if err = env.ValidIdentifier(n); err != nil { return fmt.Errorf(text.CouldNotSetVariable, n) } _ = env.Set(n, row[i]) } return nil } // doExecExec executes a query and re-executes all columns of all rows as if they // were their own queries. func (h *Handler) doExecExec(ctx context.Context, w io.Writer, _ metacmd.Option, prefix, sqlstr string, qtyp bool, bind []interface{}) error { // query rows, err := h.DB().QueryContext(ctx, sqlstr, bind...) if err != nil { return err } // exec resulting rows if err := h.doExecRows(ctx, w, rows); err != nil { return err } // check for additional result sets ... for rows.NextResultSet() { if err := h.doExecRows(ctx, w, rows); err != nil { return err } } return nil } // doQuery executes a doQuery against the database. func (h *Handler) doQuery(ctx context.Context, w io.Writer, opt metacmd.Option, typ, sqlstr string, bind []interface{}) error { // run query rows, err := h.DB().QueryContext(ctx, sqlstr, bind...) if err != nil { return err } defer rows.Close() params := env.Pall() params["time"] = env.GoTime() for k, v := range opt.Params { params[k] = v } var pipe io.WriteCloser var cmd *exec.Cmd if pipeName := params["pipe"]; pipeName != "" || h.out != nil { if params["expanded"] == "auto" && params["columns"] == "" { // don't rely on terminal size when piping output to a file or cmd params["expanded"] = "off" } if pipeName != "" { if pipeName[0] == '|' { pipe, cmd, err = env.Pipe(h.l.Stdout(), h.l.Stderr(), pipeName[1:]) } else { pipe, err = os.OpenFile(pipeName, os.O_TRUNC|os.O_CREATE|os.O_WRONLY, 0o644) } if err != nil { return err } w = pipe } } else if opt.Exec != metacmd.ExecWatch { params["pager_cmd"] = env.All()["PAGER"] } // set up column type config var extra []tblfmt.Option switch f := drivers.ColumnTypes(h.u); { case f != nil: extra = append(extra, tblfmt.WithColumnTypesFunc(f)) case drivers.UseColumnTypes(h.u): extra = append(extra, tblfmt.WithUseColumnTypes(true)) } resultSet := tblfmt.ResultSet(rows) // wrap query with crosstab if opt.Exec == metacmd.ExecCrosstab { var err error if resultSet, err = tblfmt.NewCrosstabView(rows, append(extra, tblfmt.WithParams(opt.Crosstab...))...); err != nil { return err } extra = nil } if drivers.LowerColumnNames(h.u) { params["lower_column_names"] = "true" } // encode and handle error conditions switch err := tblfmt.EncodeAll(w, resultSet, params, extra...); { case err != nil && cmd != nil && errors.Is(err, syscall.EPIPE): // broken pipe means pager quit before consuming all data, which might be expected return nil case err != nil && h.u.Driver == "sqlserver" && err == tblfmt.ErrResultSetHasNoColumns && strings.HasPrefix(typ, "EXEC"): // sqlserver EXEC statements sometimes do not have results, fake that // it was executed as a exec and not a query fmt.Fprintln(w, typ) case err != nil: return err case params["format"] == "aligned": fmt.Fprintln(w) } if pipe != nil { pipe.Close() if cmd != nil { cmd.Wait() } } return err } // doExecRows executes all the columns in the row. func (h *Handler) doExecRows(ctx context.Context, w io.Writer, rows *sql.Rows) error { // get columns cols, err := drivers.Columns(h.u, rows) if err != nil { return err } // process rows res := metacmd.Option{Exec: metacmd.ExecOnly} clen, tfmt := len(cols), env.GoTime() for rows.Next() { if clen != 0 { row, err := h.scan(rows, clen, tfmt) if err != nil { return err } // execute for _, sqlstr := range row { if err = h.Execute(ctx, w, res, stmt.FindPrefix(sqlstr, true, true, true), sqlstr, false); err != nil { return err } } } } return nil } // scan scans a row. func (h *Handler) scan(rows *sql.Rows, clen int, tfmt string) ([]string, error) { // scan to []interface{} r := make([]interface{}, clen) for i := range r { r[i] = new(interface{}) } if err := rows.Scan(r...); err != nil { return nil, err } // get conversion funcs cb, cm, cs, cd := drivers.ConvertBytes(h.u), drivers.ConvertMap(h.u), drivers.ConvertSlice(h.u), drivers.ConvertDefault(h.u) row := make([]string, clen) for n, z := range r { j := z.(*interface{}) switch x := (*j).(type) { case []byte: if x != nil { var err error if row[n], err = cb(x, tfmt); err != nil { return nil, err } } case string: row[n] = x case time.Time: row[n] = x.Format(tfmt) case fmt.Stringer: row[n] = x.String() case map[string]interface{}: if x != nil { var err error if row[n], err = cm(x); err != nil { return nil, err } } case []interface{}: if x != nil { var err error if row[n], err = cs(x); err != nil { return nil, err } } default: if x != nil { var err error if row[n], err = cd(x); err != nil { return nil, err } } } } return row, nil } // doExec does a database exec. func (h *Handler) doExec(ctx context.Context, w io.Writer, _ metacmd.Option, typ, sqlstr string, bind []interface{}) error { res, err := h.DB().ExecContext(ctx, sqlstr, bind...) if err != nil { _ = env.Set("ROW_COUNT", "0") return err } // get affected count, err := drivers.RowsAffected(h.u, res) if err != nil { _ = env.Set("ROW_COUNT", "0") return err } // print name if env.Get("QUIET") == "off" { fmt.Fprint(w, typ) // print count if count > 0 { fmt.Fprint(w, " ", count) } fmt.Fprintln(w) } return env.Set("ROW_COUNT", strconv.FormatInt(count, 10)) } // Begin begins a transaction. func (h *Handler) Begin(txOpts *sql.TxOptions) error { return h.BeginTx(context.Background(), txOpts) } // Begin begins a transaction in a context. func (h *Handler) BeginTx(ctx context.Context, txOpts *sql.TxOptions) error { if h.db == nil { return text.ErrNotConnected } if h.tx != nil { return text.ErrPreviousTransactionExists } var err error h.tx, err = h.db.BeginTx(ctx, txOpts) if err != nil { return drivers.WrapErr(h.u.Driver, err) } return nil } // Commit commits a transaction. func (h *Handler) Commit() error { if h.db == nil { return text.ErrNotConnected } if h.tx == nil { return text.ErrNoPreviousTransactionExists } tx := h.tx h.tx = nil if err := tx.Commit(); err != nil { return drivers.WrapErr(h.u.Driver, err) } return nil } // Rollback rollbacks a transaction. func (h *Handler) Rollback() error { if h.db == nil { return text.ErrNotConnected } if h.tx == nil { return text.ErrNoPreviousTransactionExists } tx := h.tx h.tx = nil if err := tx.Rollback(); err != nil { return drivers.WrapErr(h.u.Driver, err) } return nil } // Include includes the specified path. func (h *Handler) Include(path string, relative bool) error { if relative && !filepath.IsAbs(path) { path = filepath.Join(h.wd, path) } // open path, f, err := env.OpenFile(h.user, path, relative) if err != nil { return err } defer f.Close() r := bufio.NewReader(f) // setup rline l := &rline.Rline{ N: func() ([]rune, error) { buf := new(bytes.Buffer) var b []byte var isPrefix bool var err error for { // read b, isPrefix, err = r.ReadLine() // when not EOF if err != nil && err != io.EOF { return nil, err } // append if _, werr := buf.Write(b); werr != nil { return nil, werr } // end of line if !isPrefix || err != nil { break } } // peek and read possible line ending \n or \r\n if err != io.EOF { if err := peekEnding(buf, r); err != nil { return nil, err } } return []rune(buf.String()), err }, Out: h.l.Stdout(), Err: h.l.Stderr(), Pw: h.l.Password, } p := New(l, h.user, filepath.Dir(path), h.charts, h.nopw) p.db, p.u = h.db, h.u drivers.ConfigStmt(p.u, p.buf) err = p.Run() h.db, h.u = p.db, p.u return err } // MetadataWriter loads the metadata writer for the func (h *Handler) MetadataWriter(ctx context.Context) (metadata.Writer, error) { if h.db == nil { return nil, text.ErrNotConnected } return drivers.NewMetadataWriter(ctx, h.u, h.db, h.l.Stdout(), readerOpts()...) } // GetOutput gets the output writer. func (h *Handler) GetOutput() io.Writer { if h.out == nil { return h.l.Stdout() } return h.out } // SetOutput sets the output writer. func (h *Handler) SetOutput(o io.WriteCloser) { if h.out != nil { h.out.Close() } h.out = o } // FS is the filesystem interface. type FS interface{} // Error wraps handler errors. type Error struct { Buf string Err error } // WrapErr wraps an [error] using the specified driver when err is not nil. func WrapErr(buf string, err error) error { if err == nil { return nil } // avoid double wrapping error if _, ok := err.(*Error); ok { return err } return &Error{buf, err} } // Error satisfies the [error] interface, returning the original error message. func (e *Error) Error() string { return e.Err.Error() } // Unwrap returns the original error. func (e *Error) Unwrap() error { return e.Err } func readerOpts() []metadata.ReaderOption { var opts []metadata.ReaderOption envs := env.All() if envs["ECHO_HIDDEN"] == "on" || envs["ECHO_HIDDEN"] == "noexec" { if envs["ECHO_HIDDEN"] == "noexec" { opts = append(opts, metadata.WithDryRun(true)) } opts = append( opts, metadata.WithLogger(log.New(os.Stdout, "DEBUG: ", log.LstdFlags)), metadata.WithTimeout(30*time.Second), ) } return opts } // peekEnding peeks to see if the next successive bytes in r is \n or \r\n, // writing to w if it is. Does not advance r if the next bytes are not \n or // \r\n. func peekEnding(w io.Writer, r *bufio.Reader) error { // peek first byte buf, err := r.Peek(1) switch { case err != nil && err != io.EOF: return err case err == nil && buf[0] == '\n': if _, rerr := r.ReadByte(); err != nil && err != io.EOF { return rerr } _, werr := w.Write([]byte{'\n'}) return werr case err == nil && buf[0] != '\r': return nil } // peek second byte buf, err = r.Peek(1) switch { case err != nil && err != io.EOF: return err case err == nil && buf[0] != '\n': return nil } if _, rerr := r.ReadByte(); err != nil && err != io.EOF { return rerr } _, werr := w.Write([]byte{'\n'}) return werr } // grab grabs i from r, or returns 0 if i >= end. func grab(r []rune, i, end int) rune { if i < end { return r[i] } return 0 } // linetermRE is the end of line terminal. var linetermRE = regexp.MustCompile(`(?:\r?\n)+$`) // empty reports whether s contains at least one printable, non-space character. func empty(s string) bool { i := strings.IndexFunc(s, func(r rune) bool { return unicode.IsPrint(r) && !unicode.IsSpace(r) }) return i == -1 } var ansiRE = regexp.MustCompile(`\x1b[[0-9]+([:;][0-9]+)*m`) // lastcolor returns the last defined color in s, if any. func lastcolor(s string) string { if i := strings.LastIndex(s, "\n"); i != -1 { s = s[:i] } if i := strings.LastIndex(s, "\x1b[0m"); i != -1 { s = s[i+4:] } return strings.Join(ansiRE.FindAllString(s, -1), "") } usql-0.19.19/internal/000077500000000000000000000000001476173253300145065ustar00rootroot00000000000000usql-0.19.19/internal/adodb.go000066400000000000000000000002701476173253300161050ustar00rootroot00000000000000//go:build (all || most || adodb) && !no_adodb package internal // Code generated by gen.go. DO NOT EDIT. import ( _ "github.com/xo/usql/drivers/adodb" // Microsoft ADODB driver ) usql-0.19.19/internal/athena.go000066400000000000000000000002661476173253300163010ustar00rootroot00000000000000//go:build (all || most || athena) && !no_athena package internal // Code generated by gen.go. DO NOT EDIT. import ( _ "github.com/xo/usql/drivers/athena" // AWS Athena driver ) usql-0.19.19/internal/avatica.go000066400000000000000000000002751476173253300164510ustar00rootroot00000000000000//go:build (all || most || avatica) && !no_avatica package internal // Code generated by gen.go. DO NOT EDIT. import ( _ "github.com/xo/usql/drivers/avatica" // Apache Avatica driver ) usql-0.19.19/internal/bigquery.go000066400000000000000000000003011476173253300166560ustar00rootroot00000000000000//go:build (all || most || bigquery) && !no_bigquery package internal // Code generated by gen.go. DO NOT EDIT. import ( _ "github.com/xo/usql/drivers/bigquery" // Google BigQuery driver ) usql-0.19.19/internal/cassandra.go000066400000000000000000000002761476173253300170010ustar00rootroot00000000000000//go:build (all || most || cassandra) && !no_cassandra package internal // Code generated by gen.go. DO NOT EDIT. import ( _ "github.com/xo/usql/drivers/cassandra" // Cassandra driver ) usql-0.19.19/internal/chai.go000066400000000000000000000002551476173253300157430ustar00rootroot00000000000000//go:build (all || most || chai) && !no_chai package internal // Code generated by gen.go. DO NOT EDIT. import ( _ "github.com/xo/usql/drivers/chai" // ChaiSQL driver ) usql-0.19.19/internal/clickhouse.go000066400000000000000000000002771476173253300171740ustar00rootroot00000000000000//go:build (!no_base || clickhouse) && !no_clickhouse package internal // Code generated by gen.go. DO NOT EDIT. import ( _ "github.com/xo/usql/drivers/clickhouse" // ClickHouse driver ) usql-0.19.19/internal/cosmos.go000066400000000000000000000002721476173253300163410ustar00rootroot00000000000000//go:build (all || most || cosmos) && !no_cosmos package internal // Code generated by gen.go. DO NOT EDIT. import ( _ "github.com/xo/usql/drivers/cosmos" // Azure CosmosDB driver ) usql-0.19.19/internal/couchbase.go000066400000000000000000000002761476173253300167760ustar00rootroot00000000000000//go:build (all || most || couchbase) && !no_couchbase package internal // Code generated by gen.go. DO NOT EDIT. import ( _ "github.com/xo/usql/drivers/couchbase" // Couchbase driver ) usql-0.19.19/internal/csvq.go000066400000000000000000000002471476173253300160140ustar00rootroot00000000000000//go:build (!no_base || csvq) && !no_csvq package internal // Code generated by gen.go. DO NOT EDIT. import ( _ "github.com/xo/usql/drivers/csvq" // CSVQ driver ) usql-0.19.19/internal/databend.go000066400000000000000000000002721476173253300166000ustar00rootroot00000000000000//go:build (all || most || databend) && !no_databend package internal // Code generated by gen.go. DO NOT EDIT. import ( _ "github.com/xo/usql/drivers/databend" // Databend driver ) usql-0.19.19/internal/databricks.go000066400000000000000000000003021476173253300171370ustar00rootroot00000000000000//go:build (all || most || databricks) && !no_databricks package internal // Code generated by gen.go. DO NOT EDIT. import ( _ "github.com/xo/usql/drivers/databricks" // Databricks driver ) usql-0.19.19/internal/duckdb.go000066400000000000000000000002621476173253300162710ustar00rootroot00000000000000//go:build (all || most || duckdb) && !no_duckdb package internal // Code generated by gen.go. DO NOT EDIT. import ( _ "github.com/xo/usql/drivers/duckdb" // DuckDB driver ) usql-0.19.19/internal/dynamodb.go000066400000000000000000000002721476173253300166330ustar00rootroot00000000000000//go:build (all || most || dynamodb) && !no_dynamodb package internal // Code generated by gen.go. DO NOT EDIT. import ( _ "github.com/xo/usql/drivers/dynamodb" // DynamoDb driver ) usql-0.19.19/internal/exasol.go000066400000000000000000000002621476173253300163300ustar00rootroot00000000000000//go:build (all || most || exasol) && !no_exasol package internal // Code generated by gen.go. DO NOT EDIT. import ( _ "github.com/xo/usql/drivers/exasol" // Exasol driver ) usql-0.19.19/internal/firebird.go000066400000000000000000000002721476173253300166240ustar00rootroot00000000000000//go:build (all || most || firebird) && !no_firebird package internal // Code generated by gen.go. DO NOT EDIT. import ( _ "github.com/xo/usql/drivers/firebird" // Firebird driver ) usql-0.19.19/internal/flightsql.go000066400000000000000000000002761476173253300170370ustar00rootroot00000000000000//go:build (all || most || flightsql) && !no_flightsql package internal // Code generated by gen.go. DO NOT EDIT. import ( _ "github.com/xo/usql/drivers/flightsql" // FlightSQL driver ) usql-0.19.19/internal/godror.go000066400000000000000000000002701476173253300163300ustar00rootroot00000000000000//go:build (all || godror) && !no_godror package internal // Code generated by gen.go. DO NOT EDIT. import ( _ "github.com/xo/usql/drivers/godror" // GO DRiver for ORacle driver ) usql-0.19.19/internal/h2.go000066400000000000000000000002511476173253300153440ustar00rootroot00000000000000//go:build (all || most || h2) && !no_h2 package internal // Code generated by gen.go. DO NOT EDIT. import ( _ "github.com/xo/usql/drivers/h2" // Apache H2 driver ) usql-0.19.19/internal/hive.go000066400000000000000000000002611476173253300157670ustar00rootroot00000000000000//go:build (all || most || hive) && !no_hive package internal // Code generated by gen.go. DO NOT EDIT. import ( _ "github.com/xo/usql/drivers/hive" // Apache Hive driver ) usql-0.19.19/internal/ignite.go000066400000000000000000000002711476173253300163140ustar00rootroot00000000000000//go:build (all || most || ignite) && !no_ignite package internal // Code generated by gen.go. DO NOT EDIT. import ( _ "github.com/xo/usql/drivers/ignite" // Apache Ignite driver ) usql-0.19.19/internal/impala.go000066400000000000000000000002611476173253300162770ustar00rootroot00000000000000//go:build (bad || impala) && !no_impala package internal // Code generated by gen.go. DO NOT EDIT. import ( _ "github.com/xo/usql/drivers/impala" // Apache Impala driver ) usql-0.19.19/internal/internal.go000066400000000000000000000067301476173253300166570ustar00rootroot00000000000000// Package internal provides a way to obtain information about which database // drivers were included at build. package internal // Code generated by gen.go. DO NOT EDIT. // KnownBuildTags returns a map of known driver names to its respective build // tags. func KnownBuildTags() map[string]string { return map[string]string{ "adodb": "adodb", // github.com/mattn/go-adodb "athena": "awsathena", // github.com/uber/athenadriver/go "avatica": "avatica", // github.com/apache/calcite-avatica-go/v5 "bigquery": "bigquery", // gorm.io/driver/bigquery/driver "cassandra": "cql", // github.com/MichaelS11/go-cql-driver "chai": "chai", // github.com/chaisql/chai/driver "clickhouse": "clickhouse", // github.com/ClickHouse/clickhouse-go/v2 "cosmos": "cosmos", // github.com/btnguyen2k/gocosmos "couchbase": "n1ql", // github.com/couchbase/go_n1ql "csvq": "csvq", // github.com/mithrandie/csvq-driver "databend": "databend", // github.com/datafuselabs/databend-go "databricks": "databricks", // github.com/databricks/databricks-sql-go "duckdb": "duckdb", // github.com/marcboeker/go-duckdb "dynamodb": "dynamodb", // github.com/btnguyen2k/godynamo "exasol": "exasol", // github.com/exasol/exasol-driver-go "firebird": "firebirdsql", // github.com/nakagami/firebirdsql "flightsql": "flightsql", // github.com/apache/arrow/go/v17/arrow/flight/flightsql/driver "godror": "godror", // github.com/godror/godror "h2": "h2", // github.com/jmrobles/h2go "hive": "hive", // sqlflow.org/gohive "ignite": "ignite", // github.com/amsokol/ignite-go-client/sql "impala": "impala", // github.com/bippio/go-impala "maxcompute": "maxcompute", // sqlflow.org/gomaxcompute "moderncsqlite": "moderncsqlite", // modernc.org/sqlite "mymysql": "mymysql", // github.com/ziutek/mymysql/godrv "mysql": "mysql", // github.com/go-sql-driver/mysql "netezza": "nzgo", // github.com/IBM/nzgo/v12 "odbc": "odbc", // github.com/alexbrainman/odbc "oracle": "oracle", // github.com/sijms/go-ora/v2 "ots": "ots", // github.com/aliyun/aliyun-tablestore-go-sql-driver "pgx": "pgx", // github.com/jackc/pgx/v5/stdlib "postgres": "postgres", // github.com/lib/pq "presto": "presto", // github.com/prestodb/presto-go-client/presto "ql": "ql", // modernc.org/ql "ramsql": "ramsql", // github.com/proullon/ramsql/driver "sapase": "tds", // github.com/thda/tds "saphana": "hdb", // github.com/SAP/go-hdb/driver "snowflake": "snowflake", // github.com/snowflakedb/gosnowflake "spanner": "spanner", // github.com/googleapis/go-sql-spanner "sqlite3": "sqlite3", // github.com/mattn/go-sqlite3 "sqlserver": "sqlserver", // github.com/microsoft/go-mssqldb "trino": "trino", // github.com/trinodb/trino-go-client/trino "vertica": "vertica", // github.com/vertica/vertica-sql-go "voltdb": "voltdb", // github.com/VoltDB/voltdb-client-go/voltdbclient "ydb": "ydb", // github.com/ydb-platform/ydb-go-sdk/v3 } } usql-0.19.19/internal/maxcompute.go000066400000000000000000000003121476173253300172130ustar00rootroot00000000000000//go:build (all || most || maxcompute) && !no_maxcompute package internal // Code generated by gen.go. DO NOT EDIT. import ( _ "github.com/xo/usql/drivers/maxcompute" // Alibaba MaxCompute driver ) usql-0.19.19/internal/moderncsqlite.go000066400000000000000000000003201476173253300177010ustar00rootroot00000000000000//go:build (all || most || moderncsqlite) && !no_moderncsqlite package internal // Code generated by gen.go. DO NOT EDIT. import ( _ "github.com/xo/usql/drivers/moderncsqlite" // ModernC SQLite3 driver ) usql-0.19.19/internal/mymysql.go000066400000000000000000000002741476173253300165530ustar00rootroot00000000000000//go:build (all || most || mymysql) && !no_mymysql package internal // Code generated by gen.go. DO NOT EDIT. import ( _ "github.com/xo/usql/drivers/mymysql" // MySQL MyMySQL driver ) usql-0.19.19/internal/mysql.go000066400000000000000000000002531476173253300162020ustar00rootroot00000000000000//go:build (!no_base || mysql) && !no_mysql package internal // Code generated by gen.go. DO NOT EDIT. import ( _ "github.com/xo/usql/drivers/mysql" // MySQL driver ) usql-0.19.19/internal/netezza.go000066400000000000000000000002661476173253300165210ustar00rootroot00000000000000//go:build (all || most || netezza) && !no_netezza package internal // Code generated by gen.go. DO NOT EDIT. import ( _ "github.com/xo/usql/drivers/netezza" // Netezza driver ) usql-0.19.19/internal/odbc.go000066400000000000000000000002421476173253300157420ustar00rootroot00000000000000//go:build (all || odbc) && !no_odbc package internal // Code generated by gen.go. DO NOT EDIT. import ( _ "github.com/xo/usql/drivers/odbc" // ODBC driver ) usql-0.19.19/internal/oracle.go000066400000000000000000000002701476173253300163010ustar00rootroot00000000000000//go:build (!no_base || oracle) && !no_oracle package internal // Code generated by gen.go. DO NOT EDIT. import ( _ "github.com/xo/usql/drivers/oracle" // Oracle Database driver ) usql-0.19.19/internal/ots.go000066400000000000000000000002651476173253300156450ustar00rootroot00000000000000//go:build (all || most || ots) && !no_ots package internal // Code generated by gen.go. DO NOT EDIT. import ( _ "github.com/xo/usql/drivers/ots" // Alibaba Tablestore driver ) usql-0.19.19/internal/pgx.go000066400000000000000000000002611476173253300156320ustar00rootroot00000000000000//go:build (all || most || pgx) && !no_pgx package internal // Code generated by gen.go. DO NOT EDIT. import ( _ "github.com/xo/usql/drivers/pgx" // PostgreSQL PGX driver ) usql-0.19.19/internal/postgres.go000066400000000000000000000002711476173253300167030ustar00rootroot00000000000000//go:build (!no_base || postgres) && !no_postgres package internal // Code generated by gen.go. DO NOT EDIT. import ( _ "github.com/xo/usql/drivers/postgres" // PostgreSQL driver ) usql-0.19.19/internal/presto.go000066400000000000000000000002621476173253300163510ustar00rootroot00000000000000//go:build (all || most || presto) && !no_presto package internal // Code generated by gen.go. DO NOT EDIT. import ( _ "github.com/xo/usql/drivers/presto" // Presto driver ) usql-0.19.19/internal/ql.go000066400000000000000000000002501476173253300154460ustar00rootroot00000000000000//go:build (all || most || ql) && !no_ql package internal // Code generated by gen.go. DO NOT EDIT. import ( _ "github.com/xo/usql/drivers/ql" // Cznic QL driver ) usql-0.19.19/internal/ramsql.go000066400000000000000000000002621476173253300163340ustar00rootroot00000000000000//go:build (all || most || ramsql) && !no_ramsql package internal // Code generated by gen.go. DO NOT EDIT. import ( _ "github.com/xo/usql/drivers/ramsql" // RamSQL driver ) usql-0.19.19/internal/sapase.go000066400000000000000000000002631476173253300163120ustar00rootroot00000000000000//go:build (all || most || sapase) && !no_sapase package internal // Code generated by gen.go. DO NOT EDIT. import ( _ "github.com/xo/usql/drivers/sapase" // SAP ASE driver ) usql-0.19.19/internal/saphana.go000066400000000000000000000002671476173253300164550ustar00rootroot00000000000000//go:build (all || most || saphana) && !no_saphana package internal // Code generated by gen.go. DO NOT EDIT. import ( _ "github.com/xo/usql/drivers/saphana" // SAP HANA driver ) usql-0.19.19/internal/snowflake.go000066400000000000000000000002761476173253300170330ustar00rootroot00000000000000//go:build (all || most || snowflake) && !no_snowflake package internal // Code generated by gen.go. DO NOT EDIT. import ( _ "github.com/xo/usql/drivers/snowflake" // Snowflake driver ) usql-0.19.19/internal/spanner.go000066400000000000000000000002751476173253300165070ustar00rootroot00000000000000//go:build (all || most || spanner) && !no_spanner package internal // Code generated by gen.go. DO NOT EDIT. import ( _ "github.com/xo/usql/drivers/spanner" // Google Spanner driver ) usql-0.19.19/internal/sqlite3.go000066400000000000000000000002631476173253300164220ustar00rootroot00000000000000//go:build (!no_base || sqlite3) && !no_sqlite3 package internal // Code generated by gen.go. DO NOT EDIT. import ( _ "github.com/xo/usql/drivers/sqlite3" // SQLite3 driver ) usql-0.19.19/internal/sqlserver.go000066400000000000000000000003061476173253300170620ustar00rootroot00000000000000//go:build (!no_base || sqlserver) && !no_sqlserver package internal // Code generated by gen.go. DO NOT EDIT. import ( _ "github.com/xo/usql/drivers/sqlserver" // Microsoft SQL Server driver ) usql-0.19.19/internal/trino.go000066400000000000000000000002561476173253300161730ustar00rootroot00000000000000//go:build (all || most || trino) && !no_trino package internal // Code generated by gen.go. DO NOT EDIT. import ( _ "github.com/xo/usql/drivers/trino" // Trino driver ) usql-0.19.19/internal/vertica.go000066400000000000000000000002661476173253300164760ustar00rootroot00000000000000//go:build (all || most || vertica) && !no_vertica package internal // Code generated by gen.go. DO NOT EDIT. import ( _ "github.com/xo/usql/drivers/vertica" // Vertica driver ) usql-0.19.19/internal/voltdb.go000066400000000000000000000002621476173253300163270ustar00rootroot00000000000000//go:build (all || most || voltdb) && !no_voltdb package internal // Code generated by gen.go. DO NOT EDIT. import ( _ "github.com/xo/usql/drivers/voltdb" // VoltDB driver ) usql-0.19.19/internal/ydb.go000066400000000000000000000002461476173253300156150ustar00rootroot00000000000000//go:build (all || most || ydb) && !no_ydb package internal // Code generated by gen.go. DO NOT EDIT. import ( _ "github.com/xo/usql/drivers/ydb" // YDB driver ) usql-0.19.19/internal/z.go000066400000000000000000000013401476173253300153040ustar00rootroot00000000000000package internal import ( "runtime" "github.com/xo/dburl" "github.com/xo/usql/drivers" ) func init() { if runtime.GOOS == "windows" { // if no odbc driver, but we have adodb, add 'odbc' (and related // aliases) as alias for oleodbc if drivers.Registered("adodb") && !drivers.Registered("odbc") { old := dburl.Unregister("odbc") dburl.RegisterAlias("oleodbc", "odbc") for _, alias := range old.Aliases { dburl.RegisterAlias("oleodbc", alias) } } } if drivers.Registered("moderncsqlite") && !drivers.Registered("sqlite3") { old := dburl.Unregister("sqlite3") dburl.RegisterAlias("moderncsqlite", "sqlite3") for _, alias := range old.Aliases { dburl.RegisterAlias("moderncsqlite", alias) } } } usql-0.19.19/main.go000066400000000000000000000035051476173253300141500ustar00rootroot00000000000000// Command usql is the universal command-line interface for SQL databases. package main //go:generate go run gen.go import ( "context" "errors" "fmt" "io" "os" "strings" "github.com/xo/usql/drivers" "github.com/xo/usql/handler" "github.com/xo/usql/internal" "github.com/xo/usql/rline" "github.com/xo/usql/text" ) func main() { // get available drivers and known build tags available, known := drivers.Available(), internal.KnownBuildTags() // report if database is supported if len(os.Args) == 2 && strings.HasPrefix(os.Args[1], "--has-") && strings.HasSuffix(os.Args[1], "-support") { n := os.Args[1][6 : len(os.Args[1])-8] if v, ok := known[n]; ok { n = v } var out int if _, ok := available[n]; ok { out = 1 } fmt.Fprint(os.Stdout, out) return } // run if err := New(os.Args).ExecuteContext(context.Background()); err != nil && err != io.EOF && err != rline.ErrInterrupt { var he *handler.Error if !errors.As(err, &he) { fmt.Fprintf(os.Stderr, "error: %v\n", err) } var e *drivers.Error if errors.As(err, &e) && e.Err == text.ErrDriverNotAvailable { m := make(map[string]string, len(known)) for k, v := range known { m[v] = k } tag := e.Driver if t, ok := m[tag]; ok { tag = t } rev := "latest" if text.CommandVersion == "0.0.0-dev" || strings.Contains(text.CommandVersion, "-") { rev = "master" } fmt.Fprintf(os.Stderr, "\ntry:\n\n go install -tags 'most %s' github.com/xo/usql@%s\n\n", tag, rev) } switch estr := err.Error(); { case err == text.ErrWrongNumberOfArguments, strings.HasPrefix(estr, "unknown flag:"), strings.HasPrefix(estr, "unknown shorthand flag:"), strings.HasPrefix(estr, "bad flag syntax:"), strings.HasPrefix(estr, "flag needs an argument:"): fmt.Fprintln(os.Stderr, text.CommandHelpHint) } os.Exit(1) } } usql-0.19.19/main_test.go000066400000000000000000000000711476173253300152020ustar00rootroot00000000000000package main import ( _ "github.com/google/goexpect" ) usql-0.19.19/metacmd/000077500000000000000000000000001476173253300143045ustar00rootroot00000000000000usql-0.19.19/metacmd/charts/000077500000000000000000000000001476173253300155705ustar00rootroot00000000000000usql-0.19.19/metacmd/charts/charts.go000066400000000000000000000130371476173253300174070ustar00rootroot00000000000000package charts import ( "encoding/json" "fmt" "image/color" "math" "strconv" "strings" "github.com/kenshaw/colors" "github.com/xo/usql/text" ) type ChartConfig struct { Title string Subtitle string W, H int Background color.Color Type string Prec int File string } func ParseArgs(opts map[string]string) (ChartConfig, error) { cfg := ChartConfig{ Title: opts["title"], Subtitle: opts["subtitle"], W: 800, H: 600, Background: color.White, Type: opts["type"], } if size, ok := opts["size"]; ok { b, a, ok := strings.Cut(size, "x") if !ok { return ChartConfig{}, fmt.Errorf(text.ChartParseFailed, "size", "provide size as NxN") } var err error cfg.W, err = strconv.Atoi(b) if err != nil { return ChartConfig{}, fmt.Errorf(text.ChartParseFailed, "size", err) } cfg.H, err = strconv.Atoi(a) if err != nil { return ChartConfig{}, fmt.Errorf(text.ChartParseFailed, "size", err) } } if c, ok := opts["bg"]; ok { var err error cfg.Background, err = colors.Parse(c) if err != nil { return ChartConfig{}, fmt.Errorf(text.ChartParseFailed, "bg", err) } } if prec, ok := opts["prec"]; ok { p, err := strconv.Atoi(prec) if err != nil { return ChartConfig{}, fmt.Errorf(text.ChartParseFailed, "prec", err) } cfg.Prec = p } if file, ok := opts["file"]; ok { cfg.File = file } return cfg, nil } type Chart struct { Title string Subtitle string Legend []string XAxis Series YAxis Series Series []Series } type Series struct { Name string Type string Data any } func MakeChart(cfg ChartConfig, cols []string, transposed [][]string) (*Chart, error) { numCols := make([][]float64, len(cols)) for i, col := range transposed { for _, v := range col { f, err := parseFloat(v, cfg.Prec) if err != nil { numCols[i] = nil break } if numCols[i] == nil { // don't allocate slice unless we have at least some valid data numCols[i] = make([]float64, 0, len(col)) } numCols[i] = append(numCols[i], f) } } firstReg, firstNumeric := -1, -1 for i, c := range numCols { if firstReg == -1 && c == nil { firstReg = i } if firstNumeric == -1 && c != nil { firstNumeric = i } } c := &Chart{ Title: cfg.Title, Subtitle: cfg.Subtitle, } var x int var chartType string switch { case firstNumeric == -1: return nil, text.ErrNoNumericColumns case firstReg >= 0: x = firstReg chartType = "bar" default: x = firstNumeric chartType = "line" } if cfg.Type != "" { chartType = cfg.Type } c.XAxis = Series{ Name: cols[x], Type: "category", Data: transposed[x], } c.YAxis = Series{ Type: "value", } for i, col := range cols { if i == x { continue } c.Legend = append(c.Legend, col) c.Series = append(c.Series, Series{ Name: col, Type: chartType, Data: numCols[i], }) } return c, nil } /* echarts */ type echarts struct { Title *echartsTitle `json:"title,omitempty"` Legend *echartsLegend `json:"legend,omitempty"` XAxis *echartsAxis `json:"xAxis,omitempty"` YAxis *echartsAxis `json:"yAxis,omitempty"` Series []echartsAxis `json:"series,omitempty"` } type echartsTitle struct { Title string `json:"text,omitempty"` Subtext string `json:"subtext,omitempty"` } type echartsLegend struct { Data []string `json:"data,omitempty"` } type echartsAxis struct { Name string `json:"name,omitempty"` Type string `json:"type,omitempty"` Data any `json:"data,omitempty"` } func (c Chart) ToEcharts() (string, error) { ec := echarts{} if c.Title != "" || c.Subtitle != "" { ec.Title = &echartsTitle{c.Title, c.Subtitle} } if len(c.Legend) > 0 { ec.Legend = &echartsLegend{c.Legend} } if c.XAxis.Data != nil || c.YAxis.Type != "" { ec.XAxis = &echartsAxis{ Name: c.XAxis.Name, Type: c.XAxis.Type, Data: c.XAxis.Data, } } if c.YAxis.Data != nil || c.YAxis.Type != "" { ec.YAxis = &echartsAxis{ Name: c.YAxis.Name, Type: c.YAxis.Type, Data: c.YAxis.Data, } } if len(c.Series) > 0 { ec.Series = make([]echartsAxis, 0, len(c.Series)) for _, s := range c.Series { ec.Series = append(ec.Series, echartsAxis{ Name: s.Name, Type: s.Type, Data: s.Data, }) } } buf, err := json.Marshal(ec) if err != nil { return "", err } return string(buf), nil } func parseFloat(v string, prec int) (f float64, err error) { f, err = strconv.ParseFloat(v, 64) if err != nil || prec == 0 { return } r := math.Pow(10, float64(prec)) return math.Round(f*r) / r, nil } const basicBarTemplate = ` { "title": { "text": {{ printf "%q" .Title }}, "subtext": {{ printf "%q" .Subtitle }} }, {{- if .Legend }} "legend": { "data": [ {{ range .Legend }}{{ printf "%q" . }}{{ end }} ] }, {{- end }} "xAxis": [ { "type": "category", "data": [ "Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec" ] } ], "yAxis": [ { "type": "value" } ], "series": [ { "name": "Rainfall", "type": "bar", "data": [ 2, 4.9, 7, 23.2, 25.6, 76.7, 135.6, 162.2, 32.6, 20, 6.4, 3.3 ], }, { "name": "Evaporation", "type": "bar", "data": [ 2.6, 5.9, 9, 26.4, 28.7, 70.7, 175.6, 182.2, 48.7, 18.8, 6, 2.3 ], } ] } ` usql-0.19.19/metacmd/cmds.go000066400000000000000000000632571476173253300155760ustar00rootroot00000000000000package metacmd import ( "bytes" "context" "database/sql" "fmt" "io" "os" "os/exec" "os/signal" "sort" "strconv" "strings" "time" "github.com/xo/dburl" "github.com/xo/usql/drivers" "github.com/xo/usql/env" "github.com/xo/usql/text" "golang.org/x/exp/maps" ) // Cmd is a command implementation. type Cmd struct { Section Section Descs []Desc Process func(*Params) error } // cmds is the set of commands. var cmds []Cmd // cmdMap is the map of commands and their aliases. var cmdMap map[string]Metacmd // sectMap is the map of sections to its respective commands. var sectMap map[Section][]Metacmd func init() { cmds = []Cmd{ Question: { Section: SectionHelp, Descs: []Desc{ {"?", "[commands]", "show help on backslash commands"}, {"?", "options", "show help on " + text.CommandName + " command-line options"}, {"?", "variables", "show help on special " + text.CommandName + " variables"}, }, Process: func(p *Params) error { name, err := p.Get(false) if err != nil { return err } stdout, stderr := p.Handler.IO().Stdout(), p.Handler.IO().Stderr() var cmd *exec.Cmd var wc io.WriteCloser if pager := env.Get("PAGER"); p.Handler.IO().Interactive() && pager != "" { if wc, cmd, err = env.Pipe(stdout, stderr, pager); err != nil { return err } stdout = wc } switch name = strings.TrimSpace(strings.ToLower(name)); { case name == "options": Usage(stdout, true) case name == "variables": env.Listing(stdout) default: Listing(stdout) } if cmd != nil { if err := wc.Close(); err != nil { return err } return cmd.Wait() } return nil }, }, Quit: { Section: SectionGeneral, Descs: []Desc{ {"q", "", "quit " + text.CommandName}, {"quit", "", ""}, }, Process: func(p *Params) error { p.Option.Quit = true return nil }, }, Copyright: { Section: SectionGeneral, Descs: []Desc{ {"copyright", "", "show " + text.CommandName + " usage and distribution terms"}, }, Process: func(p *Params) error { stdout := p.Handler.IO().Stdout() if typ := env.TermGraphics(); typ.Available() { typ.Encode(stdout, text.Logo) } fmt.Fprintln(stdout, text.Copyright) return nil }, }, ConnectionInfo: { Section: SectionConnection, Descs: []Desc{ {"conninfo", "", "display information about the current database connection"}, }, Process: func(p *Params) error { s := text.NotConnected if db, u := p.Handler.DB(), p.Handler.URL(); db != nil && u != nil { s = fmt.Sprintf(text.ConnInfo, u.Driver, u.DSN) } fmt.Fprintln(p.Handler.IO().Stdout(), s) return nil }, }, Drivers: { Section: SectionGeneral, Descs: []Desc{ {"drivers", "", "display information about available database drivers"}, }, Process: func(p *Params) error { stdout, stderr := p.Handler.IO().Stdout(), p.Handler.IO().Stderr() var cmd *exec.Cmd var wc io.WriteCloser if pager := env.Get("PAGER"); p.Handler.IO().Interactive() && pager != "" { var err error if wc, cmd, err = env.Pipe(stdout, stderr, pager); err != nil { return err } stdout = wc } available := drivers.Available() names := make([]string, len(available)) var z int for k := range available { names[z] = k z++ } sort.Strings(names) fmt.Fprintln(stdout, text.AvailableDrivers) for _, n := range names { s := " " + n driver, aliases := dburl.SchemeDriverAndAliases(n) if driver != n { s += " (" + driver + ")" } if len(aliases) > 0 { if len(aliases) > 0 { s += " [" + strings.Join(aliases, ", ") + "]" } } fmt.Fprintln(stdout, s) } if cmd != nil { if err := wc.Close(); err != nil { return err } return cmd.Wait() } return nil }, }, Connect: { Section: SectionConnection, Descs: []Desc{ {"c", "DSN", "connect to database url"}, {"c", "DRIVER PARAMS...", "connect to database with driver and parameters"}, {"connect", "", ""}, }, Process: func(p *Params) error { vals, err := p.GetAll(true) if err != nil { return err } ctx, cancel := signal.NotifyContext(context.Background(), os.Interrupt) defer cancel() return p.Handler.Open(ctx, vals...) }, }, SetConnVar: { Section: SectionConnection, Descs: []Desc{ {"cset", "[NAME [DSN]]", "set named connection, or list all if no parameters"}, {"cset", "NAME DRIVER PARAMS...", "define named connection for database driver"}, }, Process: func(p *Params) error { ok, n, err := p.GetOK(true) switch { case err != nil: return err case ok: vals, err := p.GetAll(true) if err != nil { return err } return env.Cset(n, vals...) } vals := env.Call() keys := maps.Keys(vals) sort.Strings(keys) out := p.Handler.IO().Stdout() for _, k := range keys { fmt.Fprintln(out, k, "=", "'"+strings.Join(vals[k], " ")+"'") } return nil }, }, Disconnect: { Section: SectionConnection, Descs: []Desc{ {"Z", "", "close database connection"}, {"disconnect", "", ""}, }, Process: func(p *Params) error { return p.Handler.Close() }, }, Password: { Section: SectionConnection, Descs: []Desc{ {"password", "[USERNAME]", "change the password for a user"}, {"passwd", "", ""}, }, Process: func(p *Params) error { username, err := p.Get(true) if err != nil { return err } user, err := p.Handler.ChangePassword(username) switch { case err == text.ErrPasswordNotSupportedByDriver || err == text.ErrNotConnected: return err case err != nil: return fmt.Errorf(text.PasswordChangeFailed, user, err) } // p.Handler.Print(text.PasswordChangeSucceeded, user) return nil }, }, Exec: { Section: SectionQueryExecute, Descs: []Desc{ {"g", "[(OPTIONS)] [FILE] or ;", "execute query (and send results to file or |pipe)"}, {"G", "[(OPTIONS)] [FILE]", "as \\g, but forces vertical output mode"}, {"gx", "[(OPTIONS)] [FILE]", "as \\g, but forces expanded output mode"}, {"gexec", "", "execute query and execute each value of the result"}, {"gset", "[PREFIX]", "execute query and store results in " + text.CommandName + " variables"}, {"crosstabview", "[(OPTIONS)] [COLUMNS]", "execute query and display results in crosstab"}, {"watch", "[(OPTIONS)] [DURATION]", "execute query every specified interval"}, }, Process: func(p *Params) error { p.Option.Exec = ExecOnly switch p.Name { case "g": params, err := p.GetAll(true) if err != nil { return err } p.Option.ParseParams(params, "pipe") case "gexec": p.Option.Exec = ExecExec case "gset": p.Option.Exec = ExecSet params, err := p.GetAll(true) if err != nil { return err } p.Option.ParseParams(params, "prefix") case "G": params, err := p.GetAll(true) if err != nil { return err } p.Option.ParseParams(params, "pipe") p.Option.Params["format"] = "vertical" case "gx": params, err := p.GetAll(true) if err != nil { return err } p.Option.ParseParams(params, "pipe") p.Option.Params["expanded"] = "on" case "crosstabview": p.Option.Exec = ExecCrosstab for i := 0; i < 4; i++ { ok, col, err := p.GetOK(true) if err != nil { return err } p.Option.Crosstab = append(p.Option.Crosstab, col) if !ok { break } } case "watch": p.Option.Exec = ExecWatch p.Option.Watch = 2 * time.Second ok, s, err := p.GetOK(true) switch { case err != nil: return err case ok: d, err := time.ParseDuration(s) if err != nil { if f, err := strconv.ParseFloat(s, 64); err == nil { d = time.Duration(f * float64(time.Second)) } } if d == 0 { return text.ErrInvalidWatchDuration } p.Option.Watch = d } } return nil }, }, Bind: { Section: SectionQueryExecute, Descs: []Desc{ {"bind", "[PARAM]...", "set query parameters"}, }, Process: func(p *Params) error { bind, err := p.GetAll(true) if err != nil { return err } var v []interface{} if n := len(bind); n != 0 { v = make([]interface{}, len(bind)) for i := 0; i < n; i++ { v[i] = bind[i] } } p.Handler.Bind(v) return nil }, }, Edit: { Section: SectionQueryBuffer, Descs: []Desc{ {"e", "[FILE] [LINE]", "edit the query buffer (or file) with external editor"}, {"edit", "", ""}, }, Process: func(p *Params) error { // get last statement s, buf := p.Handler.Last(), p.Handler.Buf() if buf.Len != 0 { s = buf.String() } path, err := p.Get(true) if err != nil { return err } line, err := p.Get(true) if err != nil { return err } // reset if no error n, err := env.EditFile(p.Handler.User(), path, line, s) if err != nil { return err } // save edited buffer to history p.Handler.IO().Save(string(n)) buf.Reset(n) return nil }, }, Print: { Section: SectionQueryBuffer, Descs: []Desc{ {"p", "", "show the contents of the query buffer"}, {"print", "", ""}, {"raw", "", "show the raw (non-interpolated) contents of the query buffer"}, }, Process: func(p *Params) error { // get last statement var s string if p.Name == "raw" { s = p.Handler.LastRaw() } else { s = p.Handler.Last() } // use current statement buf if not empty buf := p.Handler.Buf() switch { case buf.Len != 0 && p.Name == "raw": s = buf.RawString() case buf.Len != 0: s = buf.String() } switch { case s == "": s = text.QueryBufferEmpty case p.Handler.IO().Interactive() && env.All()["SYNTAX_HL"] == "true": b := new(bytes.Buffer) if p.Handler.Highlight(b, s) == nil { s = b.String() } } fmt.Fprintln(p.Handler.IO().Stdout(), s) return nil }, }, Reset: { Section: SectionQueryBuffer, Descs: []Desc{ {"r", "", "reset (clear) the query buffer"}, {"reset", "", ""}, }, Process: func(p *Params) error { p.Handler.Reset(nil) p.Handler.Print(text.QueryBufferReset) return nil }, }, Echo: { Section: SectionInputOutput, Descs: []Desc{ {"echo", "[-n] [STRING]", "write string to standard output (-n for no newline)"}, {"qecho", "[-n] [STRING]", "write string to \\o output stream (-n for no newline)"}, {"warn", "[-n] [STRING]", "write string to standard error (-n for no newline)"}, }, Process: func(p *Params) error { ok, n, err := p.GetOptional(true) if err != nil { return err } f := fmt.Fprintln var vals []string switch { case ok && n == "n": f = fmt.Fprint case ok: vals = append(vals, "-"+n) default: vals = append(vals, n) } v, err := p.GetAll(true) if err != nil { return err } out := p.Handler.IO().Stdout() switch o := p.Handler.GetOutput(); { case p.Name == "qecho" && o != nil: out = o case p.Name == "warn": out = p.Handler.IO().Stderr() } f(out, strings.Join(append(vals, v...), " ")) return nil }, }, Write: { Section: SectionQueryBuffer, Descs: []Desc{ {"w", "FILE", "write query buffer to file"}, {"write", "", ""}, }, Process: func(p *Params) error { // get last statement s, buf := p.Handler.Last(), p.Handler.Buf() if buf.Len != 0 { s = buf.String() } file, err := p.Get(true) if err != nil { return err } return os.WriteFile(file, []byte(strings.TrimSuffix(s, "\n")+"\n"), 0o644) }, }, ChangeDir: { Section: SectionOperatingSystem, Descs: []Desc{ {"cd", "[DIR]", "change the current working directory"}, }, Process: func(p *Params) error { dir, err := p.Get(true) if err != nil { return err } return env.Chdir(p.Handler.User(), dir) }, }, GetEnv: { Section: SectionOperatingSystem, Descs: []Desc{ {"getenv", "VARNAME ENVVAR", "fetch environment variable"}, }, Process: func(p *Params) error { n, err := p.Get(true) switch { case err != nil: return err case n == "": return text.ErrMissingRequiredArgument } v, err := p.Get(true) switch { case err != nil: return err case v == "": return text.ErrMissingRequiredArgument } value, _ := env.Getenv(v) return env.Set(n, value) }, }, SetEnv: { Section: SectionOperatingSystem, Descs: []Desc{ {"setenv", "NAME [VALUE]", "set or unset environment variable"}, }, Process: func(p *Params) error { n, err := p.Get(true) if err != nil { return err } v, err := p.Get(true) if err != nil { return err } return os.Setenv(n, v) }, }, Timing: { Section: SectionOperatingSystem, Descs: []Desc{ {"timing", "[on|off]", "toggle timing of commands"}, }, Process: func(p *Params) error { v, err := p.Get(true) if err != nil { return err } if v == "" { p.Handler.SetTiming(!p.Handler.GetTiming()) } else { s, err := env.ParseBool(v, "\\timing") if err != nil { stderr := p.Handler.IO().Stderr() fmt.Fprintf(stderr, "error: %v", err) fmt.Fprintln(stderr) } var b bool if s == "on" { b = true } p.Handler.SetTiming(b) } setting := "off" if p.Handler.GetTiming() { setting = "on" } p.Handler.Print(text.TimingSet, setting) return nil }, }, Shell: { Section: SectionOperatingSystem, Descs: []Desc{ {"!", "[COMMAND]", "execute command in shell or start interactive shell"}, }, Process: func(p *Params) error { return env.Shell(p.GetRaw()) }, }, Out: { Section: SectionInputOutput, Descs: []Desc{ {"o", "[FILE]", "send all query results to file or |pipe"}, {"out", "", ""}, }, Process: func(p *Params) error { p.Handler.SetOutput(nil) params, err := p.GetAll(true) if err != nil { return err } pipe := strings.Join(params, " ") if pipe == "" { return nil } var out io.WriteCloser if pipe[0] == '|' { out, _, err = env.Pipe(p.Handler.IO().Stdout(), p.Handler.IO().Stderr(), pipe[1:]) } else { out, err = os.OpenFile(pipe, os.O_TRUNC|os.O_CREATE|os.O_WRONLY, 0o644) } if err != nil { return err } p.Handler.SetOutput(out) return nil }, }, Include: { Section: SectionInputOutput, Descs: []Desc{ {"i", "FILE", "execute commands from file"}, {"ir", "FILE", "as \\i, but relative to location of current script"}, {"include", "", ""}, {"include_relative", "", ""}, }, Process: func(p *Params) error { path, err := p.Get(true) if err != nil { return err } relative := p.Name == "ir" || p.Name == "include_relative" if err := p.Handler.Include(path, relative); err != nil { return fmt.Errorf("%s: %v", path, err) } return nil }, }, Transact: { Section: SectionTransaction, Descs: []Desc{ {"begin", "", "begin a transaction"}, {"begin", "[-read-only] [ISOLATION]", "begin a transaction with isolation level"}, {"commit", "", "commit current transaction"}, {"rollback", "", "rollback (abort) current transaction"}, {"abort", "", ""}, }, Process: func(p *Params) error { switch p.Name { case "commit": return p.Handler.Commit() case "rollback", "abort": return p.Handler.Rollback() } // read begin params readOnly := false ok, n, err := p.GetOptional(true) if ok { if n != "read-only" { return fmt.Errorf(text.InvalidOption, n) } readOnly = true if n, err = p.Get(true); err != nil { return err } } // build tx options var txOpts *sql.TxOptions if readOnly || n != "" { isolation := sql.LevelDefault switch strings.ToLower(n) { case "default", "": case "read-uncommitted": isolation = sql.LevelReadUncommitted case "read-committed": isolation = sql.LevelReadCommitted case "write-committed": isolation = sql.LevelWriteCommitted case "repeatable-read": isolation = sql.LevelRepeatableRead case "snapshot": isolation = sql.LevelSnapshot case "serializable": isolation = sql.LevelSerializable case "linearizable": isolation = sql.LevelLinearizable default: return text.ErrInvalidIsolationLevel } txOpts = &sql.TxOptions{ Isolation: isolation, ReadOnly: readOnly, } } // begin return p.Handler.Begin(txOpts) }, }, Prompt: { Section: SectionVariables, Descs: []Desc{ {"prompt", "[-TYPE] VAR [PROMPT]", "prompt user to set variable"}, }, Process: func(p *Params) error { typ := "string" ok, n, err := p.GetOptional(true) if err != nil { return err } if ok { typ = n n, err = p.Get(true) if err != nil { return err } } if n == "" { return text.ErrMissingRequiredArgument } if err := env.ValidIdentifier(n); err != nil { return err } vals, err := p.GetAll(true) if err != nil { return err } v, err := p.Handler.ReadVar(typ, strings.Join(vals, " ")) if err != nil { return err } return env.Set(n, v) }, }, SetVar: { Section: SectionVariables, Descs: []Desc{ {"set", "[NAME [VALUE]]", "set internal variable, or list all if no parameters"}, }, Process: func(p *Params) error { ok, n, err := p.GetOK(true) switch { case err != nil: return err case ok: vals, err := p.GetAll(true) if err != nil { return err } return env.Set(n, strings.Join(vals, " ")) } vals := env.All() keys := maps.Keys(vals) sort.Strings(keys) out := p.Handler.IO().Stdout() for _, k := range keys { fmt.Fprintln(out, k, "=", "'"+vals[k]+"'") } return nil }, }, Unset: { Section: SectionVariables, Descs: []Desc{ {"unset", "NAME", "unset (delete) internal variable"}, }, Process: func(p *Params) error { n, err := p.Get(true) if err != nil { return err } return env.Unset(n) }, }, SetPrintVar: { Section: SectionFormatting, Descs: []Desc{ {"pset", "[NAME [VALUE]]", "set table output option"}, {"a", "", "toggle between unaligned and aligned output mode"}, {"C", "[STRING]", "set table title, or unset if none"}, {"f", "[STRING]", "show or set field separator for unaligned query output"}, {"H", "", "toggle HTML output mode"}, {"T", "[STRING]", "set HTML
tag attributes, or unset if none"}, {"t", "[on|off]", "show only rows"}, {"x", "[on|off|auto]", "toggle expanded output"}, }, Process: func(p *Params) error { var ok bool var val string var err error switch p.Name { case "a", "H": default: ok, val, err = p.GetOK(true) if err != nil { return err } } // display variables if p.Name == "pset" && !ok { return env.Pwrite(p.Handler.IO().Stdout()) } var field, extra string switch p.Name { case "pset": field = val ok, val, err = p.GetOK(true) if err != nil { return err } case "a": field = "format" case "C": field = "title" case "f": field = "fieldsep" case "H": field, extra = "format", "html" case "t": field = "tuples_only" case "T": field = "tableattr" case "x": field = "expanded" } if !ok { if val, err = env.Ptoggle(field, extra); err != nil { return err } } else { if val, err = env.Pset(field, val); err != nil { return err } } // special replacement name for expanded field, when 'auto' if field == "expanded" && val == "auto" { field = "expanded_auto" } // format output mask := text.FormatFieldNameSetMap[field] unsetMask := text.FormatFieldNameUnsetMap[field] switch { case strings.Contains(mask, "%d"): i, _ := strconv.Atoi(val) p.Handler.Print(mask, i) case unsetMask != "" && val == "": p.Handler.Print(unsetMask) case !strings.Contains(mask, "%"): p.Handler.Print(mask) default: if field == "time" { val = fmt.Sprintf("%q", val) if tfmt := env.GoTime(); tfmt != val { val = fmt.Sprintf("%s (%q)", val, tfmt) } } p.Handler.Print(mask, val) } return nil }, }, Describe: { Section: SectionInformational, Descs: []Desc{ {"d[S+]", "[NAME]", "list tables, views, and sequences or describe table, view, sequence, or index"}, {"da[S+]", "[PATTERN]", "list aggregates"}, {"df[S+]", "[PATTERN]", "list functions"}, {"di[S+]", "[PATTERN]", "list indexes"}, {"dm[S+]", "[PATTERN]", "list materialized views"}, {"dn[S+]", "[PATTERN]", "list schemas"}, {"dp[S]", "[PATTERN]", "list table, view, and sequence access privileges"}, {"ds[S+]", "[PATTERN]", "list sequences"}, {"dt[S+]", "[PATTERN]", "list tables"}, {"dv[S+]", "[PATTERN]", "list views"}, {"l[+]", "", "list databases"}, }, Process: func(p *Params) error { ctx, cancel := signal.NotifyContext(context.Background(), os.Interrupt) defer cancel() m, err := p.Handler.MetadataWriter(ctx) if err != nil { return err } verbose := strings.ContainsRune(p.Name, '+') showSystem := strings.ContainsRune(p.Name, 'S') name := strings.TrimRight(p.Name, "S+") pattern, err := p.Get(true) if err != nil { return err } switch name { case "d": if pattern != "" { return m.DescribeTableDetails(p.Handler.URL(), pattern, verbose, showSystem) } return m.ListTables(p.Handler.URL(), "tvmsE", pattern, verbose, showSystem) case "df", "da": return m.DescribeFunctions(p.Handler.URL(), name, pattern, verbose, showSystem) case "dt", "dtv", "dtm", "dts", "dv", "dm", "ds": return m.ListTables(p.Handler.URL(), name, pattern, verbose, showSystem) case "dn": return m.ListSchemas(p.Handler.URL(), pattern, verbose, showSystem) case "di": return m.ListIndexes(p.Handler.URL(), pattern, verbose, showSystem) case "l": return m.ListAllDbs(p.Handler.URL(), pattern, verbose) case "dp": return m.ListPrivilegeSummaries(p.Handler.URL(), pattern, showSystem) } return nil }, }, Stats: { Section: SectionInformational, Descs: []Desc{ {"ss[+]", "[TABLE|QUERY] [k]", "show stats for a table or a query"}, }, Process: func(p *Params) error { ctx, cancel := signal.NotifyContext(context.Background(), os.Interrupt) defer cancel() m, err := p.Handler.MetadataWriter(ctx) if err != nil { return err } verbose := strings.ContainsRune(p.Name, '+') name := strings.TrimRight(p.Name, "+") pattern, err := p.Get(true) if err != nil { return err } k := 0 if verbose { k = 3 } if name == "ss" { name = "sswnulhmkf" } ok, val, err := p.GetOK(true) if err != nil { return err } if ok { verbose = true k, err = strconv.Atoi(val) if err != nil { return err } } return m.ShowStats(p.Handler.URL(), name, pattern, verbose, k) }, }, Copy: { Section: SectionInputOutput, Descs: []Desc{ {"copy", "SRC DST QUERY TABLE", "copy query from source url to table on destination url"}, {"copy", "SRC DST QUERY TABLE(A,...)", "copy query from source url to columns of table on destination url"}, }, Process: func(p *Params) error { ctx := context.Background() stdout, stderr := p.Handler.IO().Stdout, p.Handler.IO().Stderr srcDsn, err := p.Get(true) if err != nil { return err } srcURL, err := dburl.Parse(srcDsn) if err != nil { return err } destDsn, err := p.Get(true) if err != nil { return err } destURL, err := dburl.Parse(destDsn) if err != nil { return err } query, err := p.Get(true) if err != nil { return err } table, err := p.Get(true) if err != nil { return err } src, err := drivers.Open(ctx, srcURL, stdout, stderr) if err != nil { return err } defer src.Close() dest, err := drivers.Open(ctx, destURL, stdout, stderr) if err != nil { return err } defer dest.Close() ctx, cancel := signal.NotifyContext(ctx, os.Interrupt) defer cancel() // get the result set r, err := src.QueryContext(ctx, query) if err != nil { return err } defer r.Close() n, err := drivers.Copy(ctx, destURL, stdout, stderr, r, table) if err != nil { return err } p.Handler.Print("COPY %d", n) return nil }, }, } // set up map cmdMap = make(map[string]Metacmd, len(cmds)) sectMap = make(map[Section][]Metacmd, len(SectionOrder)) for i, c := range cmds { mc := Metacmd(i) if mc == None { continue } name := c.Descs[0].Name if pos := strings.IndexRune(name, '['); pos != -1 { mods := strings.TrimRight(name[pos+1:], "]") name = name[:pos] cmdMap[name+mods] = mc if len(mods) > 1 { for _, r := range mods { cmdMap[name+string(r)] = mc } } } cmdMap[name] = mc for _, d := range c.Descs { if pos := strings.IndexRune(d.Name, '['); pos != -1 { mods := strings.TrimRight(d.Name[pos+1:], "]") d.Name = d.Name[:pos] cmdMap[d.Name+mods] = mc if len(mods) > 1 { for _, r := range mods { cmdMap[d.Name+string(r)] = mc } } } cmdMap[d.Name] = mc } sectMap[c.Section] = append(sectMap[c.Section], mc) } } // Usage is used by the [Question] command to display command line options. var Usage = func(io.Writer, bool) { } usql-0.19.19/metacmd/metacmd.go000066400000000000000000000054301476173253300162470ustar00rootroot00000000000000// Package metacmd contains meta information and implementation for usql's // backslash (\) commands. package metacmd import ( "github.com/xo/usql/stmt" "github.com/xo/usql/text" ) // Metacmd represents a command and associated meta information about it. type Metacmd uint // Decode converts a command name (or alias) into a Runner. func Decode(name string, params *stmt.Params) (Runner, error) { mc, ok := cmdMap[name] if !ok || name == "" { return nil, text.ErrUnknownCommand } cmd := cmds[mc] return RunnerFunc(func(h Handler) (Option, error) { p := &Params{ Handler: h, Name: name, Params: params, } err := cmd.Process(p) return p.Option, err }), nil } // Command types. const ( // None is an empty command. None Metacmd = iota // Question is question meta command (\?) Question // Quit is the quit meta command (\?). Quit // Copyright is the copyright meta command (\copyright). Copyright // Connect is the connect meta command (\c, \connect). Connect // SetConnVar is the set conn var command (\cset). SetConnVar // Copy is the copy meta command (\copy). Copy // Disconnect is the disconnect meta command (\Z). Disconnect // Password is the change password meta command (\password). Password // ConnectionInfo is the connection info meta command (\conninfo). ConnectionInfo // Drivers is the driver info meta command (\drivers). Drivers // Describe is the describe meta command (\d and variants). Describe // Exec is the execute meta command (\g and variants). Exec // Bind is the bind meta command (\bind). Bind // Edit is the edit query buffer meta command (\e). Edit // Print is the print query buffer meta command (\p, \print, \raw). Print // Reset is the reset query buffer meta command (\r, \reset). Reset // Echo is the echo meta command (\echo, \warn, \qecho). Echo // Write is the write meta command (\w). Write // ChangeDir is the system change directory meta command (\cd). ChangeDir // GetEnv is the system get environment variable meta command (\getenv). GetEnv // SetEnv is the system set environment variable meta command (\setenv). SetEnv // Shell is the system shell exec meta command (\!). Shell // Out is the switch output meta command (\o). Out // Include is the system include file meta command (\i and variants). Include // Transact is the transaction meta command (\begin, \commit, \rollback). Transact // Prompt is the variable prompt meta command (\prompt). Prompt // SetVar is the set variable meta command (\set). SetVar // Unset is the variable unset meta command (\unset). Unset // SetPrintVar is the set print variable meta commands (\pset, \a, \C, \f, \H, \t, \T, \x). SetPrintVar // Timing is the timing meta command (\timing). Timing // Stats is the show stats meta command (\ss and variants). Stats ) usql-0.19.19/metacmd/section.go000066400000000000000000000047671476173253300163150ustar00rootroot00000000000000package metacmd import ( "fmt" "io" "strings" ) // Desc holds information about a command or alias description. type Desc struct { Name string Params string Desc string } // Section is a meta command section. type Section string // Meta command section types. const ( SectionGeneral Section = "General" SectionQueryExecute Section = "Query Execute" SectionQueryBuffer Section = "Query Buffer" SectionHelp Section = "Help" SectionTransaction Section = "Transaction" SectionInputOutput Section = "Input/Output" SectionInformational Section = "Informational" SectionFormatting Section = "Formatting" SectionConnection Section = "Connection" SectionOperatingSystem Section = "Operating System" SectionVariables Section = "Variables" // SectionLargeObjects Section = "Large Objects" ) // String satisfies stringer. func (s Section) String() string { return string(s) } // SectionOrder is the order of sections to display via Listing. var SectionOrder = []Section{ SectionGeneral, SectionQueryExecute, SectionQueryBuffer, SectionHelp, SectionInputOutput, SectionInformational, SectionFormatting, SectionTransaction, SectionConnection, SectionOperatingSystem, SectionVariables, } // Listing writes the formatted command listing to w, separated into different // sections for all known commands. func Listing(w io.Writer) { sectionDescs := make(map[Section][][]string, len(SectionOrder)) var plen int for _, section := range SectionOrder { var descs [][]string for _, c := range sectMap[section] { cmd := cmds[c] for i, d := range cmd.Descs { if d.Desc == "" && d.Params == "" { continue } s, opts := optText(cmd.Descs[i]) descs, plen = add(descs, ` \`+strings.TrimSpace(d.Name)+opts, s, plen) } } sectionDescs[section] = descs } for i, section := range SectionOrder { if i != 0 { fmt.Fprintln(w) } fmt.Fprintln(w, section) for _, line := range sectionDescs[section] { fmt.Fprintln(w, rpad(line[0], plen), "", line[1]) } } } // rpad right pads a string. func rpad(s string, l int) string { return s + strings.Repeat(" ", l-len(s)) } // add adds b, c to a, returning the max of pad or len(b). func add(a [][]string, b, c string, pad int) ([][]string, int) { return append(a, []string{b, c}), max(pad, len(b)) } // optText returns a string and the opt text. func optText(desc Desc) (string, string) { if desc.Params != "" { return desc.Desc, " " + desc.Params } return desc.Desc, desc.Params } usql-0.19.19/metacmd/types.go000066400000000000000000000132451476173253300160040ustar00rootroot00000000000000package metacmd import ( "context" "database/sql" "io" "os/user" "strings" "time" "github.com/xo/dburl" "github.com/xo/usql/drivers" "github.com/xo/usql/drivers/metadata" "github.com/xo/usql/env" "github.com/xo/usql/rline" "github.com/xo/usql/stmt" "github.com/xo/usql/text" ) // Handler is the shared interface for a command handler. type Handler interface { // IO handles the handler's IO. IO() rline.IO // User returns the current user. User() *user.User // URL returns the current database URL. URL() *dburl.URL // DB returns the current database connection. DB() drivers.DB // Last returns the last executed query. Last() string // LastRaw returns the last raw (non-interpolated) query. LastRaw() string // Buf returns the current query buffer. Buf() *stmt.Stmt // Reset resets the last and current query buffer. Reset([]rune) // Bind binds query parameters. Bind([]interface{}) // Open opens a database connection. Open(context.Context, ...string) error // Close closes the current database connection. Close() error // ChangePassword changes the password for a user. ChangePassword(string) (string, error) // ReadVar reads a variable of a specified type. ReadVar(string, string) (string, error) // Include includes a file. Include(string, bool) error // Begin begins a transaction. Begin(*sql.TxOptions) error // Commit commits the current transaction. Commit() error // Rollback aborts the current transaction. Rollback() error // Highlight highlights the statement. Highlight(io.Writer, string) error // GetTiming mode. GetTiming() bool // SetTiming mode. SetTiming(bool) // GetOutput writer. GetOutput() io.Writer // SetOutput writer. SetOutput(io.WriteCloser) // MetadataWriter retrieves the metadata writer for the handler. MetadataWriter(context.Context) (metadata.Writer, error) // Print formats according to a format specifier and writes to handler's standard output. Print(string, ...interface{}) } // Runner is a runner interface type. type Runner interface { Run(Handler) (Option, error) } // RunnerFunc is a type wrapper for a single func satisfying Runner.Run. type RunnerFunc func(Handler) (Option, error) // Run satisfies the Runner interface. func (f RunnerFunc) Run(h Handler) (Option, error) { return f(h) } // ExecType represents the type of execution requested. type ExecType int const ( // ExecNone indicates no execution. ExecNone ExecType = iota // ExecOnly indicates plain execution only (\g). ExecOnly // ExecPipe indicates execution and piping results (\g |file) ExecPipe // ExecSet indicates execution and setting the resulting columns as // variables (\gset). ExecSet // ExecExec indicates execution and executing the resulting rows (\gexec). ExecExec // ExecCrosstab indicates execution using crosstabview (\crosstabview). ExecCrosstab // ExecWatch indicates repeated execution with a fixed time interval. ExecWatch ) // Option contains parsed result options of a metacmd. type Option struct { // Quit instructs the handling code to quit. Quit bool // Exec informs the handling code of the type of execution. Exec ExecType // Params are accompanying string parameters for execution. Params map[string]string // Crosstab are the crosstab column parameters. Crosstab []string // Watch is the watch duration interval. Watch time.Duration } func (opt *Option) ParseParams(params []string, defaultKey string) error { if opt.Params == nil { opt.Params = make(map[string]string, len(params)) } formatOptions := false for i, param := range params { if len(param) == 0 { continue } if !formatOptions { if param[0] == '(' { formatOptions = true } else { opt.Params[defaultKey] = strings.Join(params[i:], " ") return nil } } parts := strings.SplitN(param, "=", 2) if len(parts) == 1 { return text.ErrInvalidFormatOption } opt.Params[strings.TrimLeft(parts[0], "(")] = strings.TrimRight(parts[1], ")") if formatOptions && param[len(param)-1] == ')' { formatOptions = false } } return nil } // Params wraps metacmd parameters. type Params struct { // Handler is the process handler. Handler Handler // Name is the name of the metacmd. Name string // Params are the actual statement parameters. Params *stmt.Params // Option contains resulting command execution options. Option Option } // Get returns the next command parameter, using env.Unquote to decode quoted // strings. func (p *Params) Get(exec bool) (string, error) { _, v, err := p.Params.Get(env.Unquote( p.Handler.User(), exec, env.All(), )) if err != nil { return "", err } return v, nil } // GetOK returns the next command parameter, using env.Unquote to decode quoted // strings. func (p *Params) GetOK(exec bool) (bool, string, error) { return p.Params.Get(env.Unquote( p.Handler.User(), exec, env.All(), )) } // GetOptional returns the next command parameter, using env.Unquote to decode // quoted strings, returns true when the value is prefixed with a "-", along // with the value sans the "-" prefix. Otherwise returns false and the value. func (p *Params) GetOptional(exec bool) (bool, string, error) { v, err := p.Get(exec) if err != nil { return false, "", err } if len(v) > 0 && v[0] == '-' { return true, v[1:], nil } return false, v, nil } // GetAll gets all remaining command parameters using env.Unquote to decode // quoted strings. func (p *Params) GetAll(exec bool) ([]string, error) { return p.Params.GetAll(env.Unquote( p.Handler.User(), exec, env.All(), )) } // GetRaw gets the remaining command parameters as a raw string. // // Note: no other processing is done to interpolate variables or to decode // string values. func (p *Params) GetRaw() string { return p.Params.GetRaw() } usql-0.19.19/rline/000077500000000000000000000000001476173253300140035ustar00rootroot00000000000000usql-0.19.19/rline/rline.go000066400000000000000000000115701476173253300154470ustar00rootroot00000000000000// Package rline provides a readline implementation for usql. package rline import ( "errors" "io" "os" "github.com/gohxs/readline" ) var ( // ErrInterrupt is the interrupt error. ErrInterrupt = readline.ErrInterrupt // ErrPasswordNotAvailable is the password not available error. ErrPasswordNotAvailable = errors.New("password not available") ) // IO is the common input/output interface. type IO interface { // Next returns the next line of runes (excluding '\n') from the input. Next() ([]rune, error) // Close closes the IO. Close() error // Stdout is the IO's standard out. Stdout() io.Writer // Stderr is the IO's standard error out. Stderr() io.Writer // Interactive determines if the IO is an interactive terminal. Interactive() bool // Cygwin determines if the IO is a Cygwin interactive terminal. Cygwin() bool // Prompt sets the prompt for the next interactive line read. Prompt(string) // Completer sets the auto-completer. Completer(readline.AutoCompleter) // Save saves a line of history. Save(string) error // Password prompts for a password. Password(string) (string, error) // SetOutput sets the output filter func. SetOutput(func(string) string) } // Rline provides a type compatible with the IO interface. type Rline struct { Inst *readline.Instance N func() ([]rune, error) C func() error Out io.Writer Err io.Writer Int bool Cyg bool P func(string) A func(readline.AutoCompleter) S func(string) error Pw func(string) (string, error) } // Next returns the next line of runes (excluding '\n') from the input. func (l *Rline) Next() ([]rune, error) { if l.N != nil { return l.N() } return nil, io.EOF } // Close closes the IO. func (l *Rline) Close() error { if l.C != nil { return l.C() } return nil } // Stdout is the IO's standard out. func (l *Rline) Stdout() io.Writer { return l.Out } // Stderr is the IO's standard error out. func (l *Rline) Stderr() io.Writer { return l.Err } // Interactive determines if the IO is an interactive terminal. func (l *Rline) Interactive() bool { return l.Int } // Cygwin determines if the IO is a Cygwin interactive terminal. func (l *Rline) Cygwin() bool { return l.Cyg } // Prompt sets the prompt for the next interactive line read. func (l *Rline) Prompt(s string) { if l.P != nil { l.P(s) } } // Completer sets the auto-completer. func (l *Rline) Completer(a readline.AutoCompleter) { if l.A != nil { l.A(a) } } // Save saves a line of history. func (l *Rline) Save(s string) error { if l.S != nil { return l.S(s) } return nil } // Password prompts for a password. func (l *Rline) Password(prompt string) (string, error) { if l.Pw != nil { return l.Pw(prompt) } return "", ErrPasswordNotAvailable } // SetOutput sets the output format func. func (l *Rline) SetOutput(f func(string) string) { l.Inst.Config.Output = f } // New creates a new readline input/output handler. func New(interactive, cygwin, forceNonInteractive bool, out, histfile string) (IO, error) { var closers []func() error // configure stdin var stdin io.ReadCloser switch { case forceNonInteractive: interactive, cygwin = false, false case cygwin: stdin = os.Stdin default: stdin = readline.Stdin } // configure stdout var stdout io.WriteCloser switch { case out != "": var err error stdout, err = os.OpenFile(out, os.O_TRUNC|os.O_CREATE|os.O_WRONLY, 0o644) if err != nil { return nil, err } closers = append(closers, stdout.Close) interactive = false case cygwin: stdout = os.Stdout default: stdout = readline.Stdout } // configure stderr var stderr io.Writer = os.Stderr if !cygwin { stderr = readline.Stderr } if interactive { // wrap it with cancelable stdin stdin = readline.NewCancelableStdin(stdin) } // create readline instance l, err := readline.NewEx(&readline.Config{ HistoryFile: histfile, DisableAutoSaveHistory: true, InterruptPrompt: "^C", HistorySearchFold: true, Stdin: stdin, Stdout: stdout, Stderr: stderr, FuncIsTerminal: func() bool { return interactive || cygwin }, FuncFilterInputRune: func(r rune) (rune, bool) { if r == readline.CharCtrlZ { return r, false } return r, true }, }) if err != nil { return nil, err } closers = append(closers, l.Close) n := l.Operation.Runes pw := func(prompt string) (string, error) { buf, err := l.ReadPassword(prompt) if err != nil { return "", err } return string(buf), nil } if forceNonInteractive { n, pw = nil, nil } return &Rline{ Inst: l, N: n, C: func() error { for _, f := range closers { _ = f() } return nil }, Out: stdout, Err: stderr, Int: interactive || cygwin, Cyg: cygwin, P: l.SetPrompt, A: func(a readline.AutoCompleter) { cfg := l.Config.Clone() cfg.AutoComplete = a l.SetConfig(cfg) }, S: l.SaveHistory, Pw: pw, }, nil } usql-0.19.19/run.go000066400000000000000000000400751476173253300140330ustar00rootroot00000000000000//go:debug x509negativeserial=1 package main import ( "context" "errors" "fmt" "io" "os" "os/user" "path/filepath" "strings" "github.com/go-git/go-billy/v5" "github.com/go-git/go-billy/v5/memfs" "github.com/go-git/go-billy/v5/osfs" "github.com/mattn/go-isatty" "github.com/spf13/cobra" "github.com/spf13/pflag" "github.com/spf13/viper" "github.com/xo/dburl" "github.com/xo/usql/env" "github.com/xo/usql/handler" "github.com/xo/usql/metacmd" "github.com/xo/usql/rline" "github.com/xo/usql/text" ) // ContextExecutor is the command context. type ContextExecutor interface { ExecuteContext(context.Context) error } // New builds the command context. func New(cliargs []string) ContextExecutor { args := &Args{} var ( bashCompletion bool zshCompletion bool fishCompletion bool powershellCompletion bool noDescriptions bool badHelp bool ) v := viper.New() c := &cobra.Command{ Use: text.CommandName + " [flags]... [DSN]", Short: text.Short(), Version: text.CommandVersion, SilenceErrors: true, SilenceUsage: true, DisableAutoGenTag: true, DisableSuggestions: true, Args: func(_ *cobra.Command, cliargs []string) error { if len(cliargs) > 1 { return text.ErrWrongNumberOfArguments } return nil }, PersistentPreRunE: func(cmd *cobra.Command, _ []string) error { commandUpper := text.CommandUpper() configFile := strings.TrimSpace(os.Getenv(commandUpper + "_CONFIG")) cmd.Flags().VisitAll(func(f *pflag.Flag) { if s := strings.TrimSpace(f.Value.String()); f.Name == "config" && s != "" { configFile = s } }) if configFile != "" { v.SetConfigFile(configFile) } else { v.SetConfigName(text.ConfigName) if configDir, err := os.UserConfigDir(); err == nil { v.AddConfigPath(filepath.Join(configDir, text.CommandName)) } } if err := v.ReadInConfig(); err != nil { if _, ok := err.(viper.ConfigFileNotFoundError); !ok { return err } } v.SetEnvPrefix(commandUpper) v.AutomaticEnv() cmd.Flags().VisitAll(func(f *pflag.Flag) { if f.Name == "config" { return } _ = v.BindEnv(f.Name, commandUpper+"_"+strings.ToUpper(strings.ReplaceAll(f.Name, "-", "_"))) if !f.Changed && v.IsSet(f.Name) { _ = cmd.Flags().Set(f.Name, fmt.Sprintf("%v", v.Get(f.Name))) } }) // unhide params switch { case bashCompletion, zshCompletion, fishCompletion, powershellCompletion, cmd.Name() == "__complete": flags := cmd.Root().Flags() for _, name := range []string{"no-psqlrc", "no-usqlrc", "var", "variable"} { flags.Lookup(name).Hidden = false } } return nil }, RunE: func(cmd *cobra.Command, cliargs []string) error { // completions and short circuits switch { case bashCompletion: return cmd.GenBashCompletionV2(os.Stdout, !noDescriptions) case zshCompletion: if noDescriptions { return cmd.GenZshCompletionNoDesc(os.Stdout) } return cmd.GenZshCompletion(os.Stdout) case fishCompletion: return cmd.GenFishCompletion(os.Stdout, !noDescriptions) case powershellCompletion: if noDescriptions { return cmd.GenPowerShellCompletion(os.Stdout) } return cmd.GenPowerShellCompletionWithDesc(os.Stdout) case badHelp: return errors.New("unknown shorthand flag: 'h' in -h") } // run if len(cliargs) > 0 { args.DSN = cliargs[0] } // create charts chroot var err error if args.Charts, err = chartsFS(v); err != nil { return err } // fmt.Fprintf(os.Stderr, "\n\n%v\n\n", args.Charts) args.Connections = v.GetStringMap("connections") args.Init = v.GetString("init") return Run(cmd.Context(), args) }, } c.SetVersionTemplate("{{ .Name }} {{ .Version }}\n") c.SetArgs(cliargs[1:]) c.SetUsageTemplate(text.UsageTemplate) flags := c.Flags() flags.SortFlags = false // completions / short circuits flags.BoolVar(&bashCompletion, "completion-script-bash", false, "output bash completion script and exit") flags.BoolVar(&zshCompletion, "completion-script-zsh", false, "output zsh completion script and exit") flags.BoolVar(&fishCompletion, "completion-script-fish", false, "output fish completion script and exit") flags.BoolVar(&powershellCompletion, "completion-script-powershell", false, "output powershell completion script and exit") flags.BoolVar(&noDescriptions, "no-descriptions", false, "disable descriptions in completion scripts") flags.BoolVarP(&badHelp, "bad-help", "h", false, "bad help") // command / file flags flags.VarP(commandOrFile{args, true}, "command", "c", "run only single command (SQL or internal) and exit") flags.VarP(commandOrFile{args, false}, "file", "f", "execute commands from file and exit") // general flags flags.BoolVarP(&args.NoPassword, "no-password", "w", false, "never prompt for password") flags.BoolVarP(&args.NoInit, "no-init", "X", false, "do not execute initialization scripts (aliases: --no-rc --no-psqlrc --no-usqlrc)") flags.BoolVar(&args.NoInit, "no-rc", false, "do not read startup file") flags.BoolVar(&args.NoInit, "no-psqlrc", false, "do not read startup file") flags.BoolVar(&args.NoInit, "no-usqlrc", false, "do not read startup file") flags.VarP(filevar{&args.Out}, "out", "o", "output file") flags.BoolVarP(&args.ForcePassword, "password", "W", false, "force password prompt (should happen automatically)") flags.BoolVarP(&args.SingleTransaction, "single-transaction", "1", false, "execute as a single transaction (if non-interactive)") ss := func(v *[]string, name, short, usage, placeholder string, vals ...string) { f := flags.VarPF(vs{v, vals, placeholder}, name, short, usage) if placeholder == "" { f.DefValue, f.NoOptDefVal = "true", "true" } } // set ss(&args.Vars, "set", "v", `set variable NAME to VALUE (see \set command, aliases: --var --variable)`, "NAME=VALUE") ss(&args.Vars, "var", "", "set variable NAME to VALUE", "NAME=VALUE") ss(&args.Vars, "variable", "", "set variable NAME to VALUE", "NAME=VALUE") // cset ss(&args.Cvars, "cset", "N", `set named connection NAME to DSN (see \cset command)`, "NAME=DSN") // pset ss(&args.Pvars, "pset", "P", `set printing option VAR to ARG (see \pset command)`, "VAR=ARG") // pset flags ss(&args.Pvars, "field-separator", "F", `field separator for unaligned and CSV output (default "|" and ",")`, "FIELD-SEPARATOR", "fieldsep=%q", "csv_fieldsep=%q") ss(&args.Pvars, "record-separator", "R", `record separator for unaligned and CSV output (default \n)`, "RECORD-SEPARATOR", "recordsep=%q") ss(&args.Pvars, "table-attr", "T", "set HTML table tag attributes (e.g., width, border)", "TABLE-ATTR", "tableattr=%q") // pset bools ss(&args.Pvars, "no-align", "A", "unaligned table output mode", "", "format=unaligned") ss(&args.Pvars, "html", "H", "HTML table output mode", "", "format=html") ss(&args.Pvars, "tuples-only", "t", "print rows only", "", "tuples_only=on") ss(&args.Pvars, "expanded", "x", "turn on expanded table output", "", "expanded=on") ss(&args.Pvars, "field-separator-zero", "z", "set field separator for unaligned and CSV output to zero byte", "", "fieldsep_zero=on") ss(&args.Pvars, "record-separator-zero", "0", "set record separator for unaligned and CSV output to zero byte", "", "recordsep_zero=on") ss(&args.Pvars, "json", "J", "JSON output mode", "", "format=json") ss(&args.Pvars, "csv", "C", "CSV output mode", "", "format=csv") ss(&args.Pvars, "vertical", "G", "vertical output mode", "", "format=vertical") // set bools ss(&args.Vars, "quiet", "q", "run quietly (no messages, only query output)", "", "QUIET=on") // app config _ = flags.StringP("config", "", "", "config file") // manually set --version, see github.com/spf13/cobra/command.go _ = flags.BoolP("version", "V", false, "output version information, then exit") _ = flags.SetAnnotation("version", cobra.FlagSetByCobraAnnotation, []string{"true"}) // manually set --help, see github.com/spf13/cobra/command.go _ = flags.BoolP("help", "?", false, "show this help, then exit") _ = c.Flags().SetAnnotation("help", cobra.FlagSetByCobraAnnotation, []string{"true"}) // mark hidden for _, name := range []string{ "no-rc", "no-psqlrc", "no-usqlrc", "var", "variable", "completion-script-bash", "completion-script-zsh", "completion-script-fish", "completion-script-powershell", "no-descriptions", "bad-help", } { flags.Lookup(name).Hidden = true } // expose to metacmd metacmd.Usage = func(w io.Writer, banner bool) { s := c.UsageString() if banner { s = text.Short() + "\n\n" + s } _, _ = w.Write([]byte(s)) } return c } // Run runs the application. func Run(ctx context.Context, args *Args) error { // get user u, err := user.Current() if err != nil { return err } // get working directory wd, err := os.Getwd() if err != nil { return err } // determine if interactive interactive := isatty.IsTerminal(os.Stdout.Fd()) && isatty.IsTerminal(os.Stdin.Fd()) cygwin := isatty.IsCygwinTerminal(os.Stdout.Fd()) && isatty.IsCygwinTerminal(os.Stdin.Fd()) forceNonInteractive := len(args.CommandOrFiles) != 0 // enable term graphics if !forceNonInteractive && interactive && !cygwin { // NOTE: this is done here and not in the env.init() package, because // NOTE: we need to determine if it is interactive first, otherwise it // NOTE: could mess up the non-interactive output with control characters var typ string if s, _ := env.Getenv(text.CommandUpper()+"_TERM_GRAPHICS", "TERM_GRAPHICS"); s != "" { typ = s } if err := env.Set("TERM_GRAPHICS", typ); err != nil { return err } } // configured named connections for name, v := range args.Connections { if err := setConn(name, v); err != nil && !forceNonInteractive && interactive { fmt.Fprintln(os.Stderr, fmt.Sprintf(text.InvalidNamedConnection, name, err)) } } // fmt.Fprintf(os.Stdout, "VARS: %v\nCVARS: %v\nPVARS: %v\n", args.Vars, args.Cvars, args.Pvars) // set vars for _, v := range args.Vars { if i := strings.Index(v, "="); i != -1 { _ = env.Set(v[:i], v[i+1:]) } else { _ = env.Unset(v) } } // set cvars for _, v := range args.Cvars { if i := strings.Index(v, "="); i != -1 { s := v[i+1:] if c := s[0]; c == '\'' || c == '"' { if s, err = env.Dequote(s, c); err != nil { return err } } if err = env.Cset(v[:i], s); err != nil { return err } } else { if err = env.Cset(v, ""); err != nil { return err } } } // set pvars for _, v := range args.Pvars { if i := strings.Index(v, "="); i != -1 { s := v[i+1:] if c := s[0]; c == '\'' || c == '"' { if s, err = env.Dequote(s, c); err != nil { return err } } if _, err = env.Pset(v[:i], s); err != nil { return err } } else { if _, err = env.Ptoggle(v, ""); err != nil { return err } } } // create input/output l, err := rline.New(interactive, cygwin, forceNonInteractive, args.Out, env.HistoryFile(u)) if err != nil { return err } defer l.Close() // create handler h := handler.New(l, u, wd, args.Charts, args.NoPassword) // force password dsn := args.DSN if args.ForcePassword { if dsn, err = h.Password(dsn); err != nil { return err } } // open dsn if err = h.Open(ctx, dsn); err != nil { return err } // start transaction if args.SingleTransaction { if h.IO().Interactive() { return text.ErrSingleTransactionCannotBeUsedWithInteractiveMode } if err = h.BeginTx(ctx, nil); err != nil { return err } } // init script if !args.NoInit { // rc file if rc := env.RCFile(u); rc != "" { if err = h.Include(rc, false); err != nil && err != text.ErrNoSuchFileOrDirectory { return err } } if s := strings.TrimSpace(args.Init); s != "" { h.Reset([]rune(s + "\n")) } } // setup runner f := h.Run if len(args.CommandOrFiles) != 0 { f = runCommandOrFiles(h, args.CommandOrFiles) } // run if err = f(); err != nil { return err } // commit if args.SingleTransaction { return h.Commit() } return nil } // Args are the command line arguments. type Args struct { DSN string CommandOrFiles []CommandOrFile Out string ForcePassword bool NoPassword bool NoInit bool SingleTransaction bool Vars []string Cvars []string Pvars []string Charts billy.Filesystem Connections map[string]interface{} Init string } // CommandOrFile is a special type to deal with interspersed -c, -f, // command-line options, to ensure proper order execution. type CommandOrFile struct { Command bool Value string } // commandOrFile provides a [pflag.Value] to wrap the command or file value in // [Args]. type commandOrFile struct { args *Args command bool } // Set satisfies the [pflag.Value] interface. func (c commandOrFile) Set(value string) error { c.args.CommandOrFiles = append(c.args.CommandOrFiles, CommandOrFile{ Command: c.command, Value: value, }) return nil } // String satisfies the [pflag.Value] interface. func (c commandOrFile) String() string { return "" } // Type satisfies the [pflag.Value] interface. func (c commandOrFile) Type() string { if c.command { return "COMMAND" } return "FILE" } // vs handles setting vars with predefined values. type vs struct { vars *[]string vals []string typ string } // Set satisfies the [pflag.Value] interface. func (p vs) Set(value string) error { if len(p.vals) != 0 { for _, v := range p.vals { if strings.Contains(v, "%") { *p.vars = append(*p.vars, fmt.Sprintf(v, value)) } else { *p.vars = append(*p.vars, v) } } } else { *p.vars = append(*p.vars, value) } return nil } // String satisfies the [pflag.Value] interface. func (vs) String() string { return "" } // Type satisfies the [pflag.Value] interface. func (p vs) Type() string { if p.typ == "" { return "bool" } return p.typ } // filevar is a file var. type filevar struct { v *string } // Set satisfies the [pflag.Value] interface. func (p filevar) Set(value string) error { *p.v = value return nil } // String satisfies the [pflag.Value] interface. func (filevar) String() string { return "" } // Type satisfies the [pflag.Value] interface. func (filevar) Type() string { return "FILE" } // chartsFS creates a filesystem for charts. func chartsFS(v *viper.Viper) (billy.Filesystem, error) { var configDir string if s := v.ConfigFileUsed(); s != "" { configDir = filepath.Dir(s) } else { var err error if configDir, err = os.UserConfigDir(); err != nil { return nil, err } configDir = filepath.Join(configDir, text.CommandName) } chartsPath := "charts" if s := v.GetString("charts_path"); s != "" { chartsPath = s } fs := osfs.New(configDir, osfs.WithBoundOS()) switch fi, err := fs.Stat(chartsPath); { case err != nil && os.IsNotExist(err) && chartsPath == "charts": return memfs.New(), nil case err != nil && os.IsNotExist(err): fmt.Fprintln(os.Stderr, fmt.Sprintf(text.ChartsPathDoesNotExist, chartsPath)) return memfs.New(), nil case err != nil: return nil, err case !fi.IsDir(): fmt.Fprintln(os.Stderr, fmt.Sprintf(text.ChartsPathIsNotADirectory, chartsPath)) return memfs.New(), nil } return fs.Chroot(chartsPath) } // setConn sets a named connection. func setConn(name string, v interface{}) error { switch x := v.(type) { case string: return env.Cset(name, x) case []interface{}: return env.Cset(name, convSlice(x)...) case map[string]interface{}: urlstr, err := dburl.BuildURL(x) if err != nil { return err } return env.Cset(name, urlstr) } return text.ErrInvalidConfig } // runCommandOrFiles processes all the supplied commands or files. func runCommandOrFiles(h *handler.Handler, commandsOrFiles []CommandOrFile) func() error { return func() error { for _, c := range commandsOrFiles { h.SetSingleLineMode(c.Command) if c.Command { h.Reset([]rune(c.Value)) if err := h.Run(); err != nil { return err } } else { if err := h.Include(c.Value, false); err != nil { return err } } } return nil } } // convSlice converts a generic slice to a string slice. func convSlice(v []interface{}) []string { s := make([]string, len(v)) for i, x := range v { s[i] = fmt.Sprintf("%s", x) } return s } usql-0.19.19/stmt/000077500000000000000000000000001476173253300136615ustar00rootroot00000000000000usql-0.19.19/stmt/params.go000066400000000000000000000044221476173253300154750ustar00rootroot00000000000000package stmt import ( "unicode" "github.com/xo/usql/text" ) // Params holds information about command parameters. type Params struct { R []rune Len int } // DecodeParams decodes command parameters. func DecodeParams(params string) *Params { r := []rune(params) return &Params{ R: r, Len: len(r), } } // GetRaw reads all remaining runes. No substitution or whitespace removal is // performed. func (p *Params) GetRaw() string { s := string(p.R) p.R, p.Len = p.R[:0], 0 return s } // Get reads the next command parameter using the provided substitution func. // True indicates there are runes remaining in the command parameters to // process. func (p *Params) Get(f func(string, bool) (bool, string, error)) (bool, string, error) { i, _ := findNonSpace(p.R, 0, p.Len) if i >= p.Len { return false, "", nil } var ok bool var quote rune start := i loop: for ; i < p.Len; i++ { c, next := p.R[i], grab(p.R, i+1, p.Len) switch { case quote != 0: start := i - 1 i, ok = readString(p.R, i, p.Len, quote, "") if !ok { break loop } ok, z, err := f(string(p.R[start:i+1]), false) switch { case err != nil: return false, "", err case ok: p.R, p.Len = substitute(p.R, start, p.Len, i-start+1, z) i = start + len(z) - 1 } quote = 0 // start of single, double, or backtick string case c == '\'' || c == '"' || c == '`': quote = c case c == ':' && next != ':': if v := readVar(p.R, i, p.Len); v != nil { n := v.String() ok, z, err := f(n[1:], true) switch { case err != nil: return false, "", err case ok: p.R, p.Len = substitute(p.R, v.I, p.Len, len(n), z) i = v.I + len(z) - 1 default: i += len(n) - 1 } } case unicode.IsSpace(c): break loop } } if quote != 0 { return false, "", text.ErrUnterminatedQuotedString } v := string(p.R[start:i]) p.R = p.R[i:] p.Len = len(p.R) return true, v, nil } // GetAll retrieves all remaining command parameters using the provided // substitution func. Will return on the first encountered error. func (p *Params) GetAll(f func(string, bool) (bool, string, error)) ([]string, error) { var s []string for { ok, v, err := p.Get(f) if err != nil { return s, err } if !ok { break } s = append(s, v) } return s, nil } usql-0.19.19/stmt/params_test.go000066400000000000000000000070541476173253300165400ustar00rootroot00000000000000package stmt import ( "os/user" "reflect" "testing" "github.com/xo/usql/env" "github.com/xo/usql/text" ) func TestDecodeParamsGetRaw(t *testing.T) { const exp = ` 'a string' "another string" ` p := DecodeParams(exp) s := p.GetRaw() if s != exp { t.Errorf("expected %q, got: %q", exp, s) } u, err := user.Current() if err != nil { t.Fatalf("expected no error, got: %v", err) } unquote := testUnquote(u, t, 0, exp) ok, s, err := p.Get(unquote) if err != nil { t.Fatalf("expected no error, got: %v", err) } if s != "" { t.Errorf("expected empty string, got: %q", s) } if ok { t.Errorf("expected ok=false, got: %t", ok) } v, err := p.GetAll(unquote) if err != nil { t.Fatalf("expected no error, got: %v", err) } if len(v) != 0 { t.Errorf("expected v to have length 0, got: %d", len(v)) } } func TestDecodeParamsGetAll(t *testing.T) { u, err := user.Current() if err != nil { t.Fatalf("expected no error, got: %v", err) } tests := []struct { s string exp []string err error }{ {``, nil, nil}, {` `, nil, nil}, {` :foo`, []string{`bar`}, nil}, {` :'foo`, nil, text.ErrUnterminatedQuotedString}, {`:'foo'`, []string{`'bar'`}, nil}, {`:'foo':foo`, []string{`'bar'bar`}, nil}, {`:'foo':foo:"foo"`, []string{`'bar'bar"bar"`}, nil}, {`:'foo':foo:foo`, []string{`'bar'barbar`}, nil}, {` :'foo':foo:foo`, []string{`'bar'barbar`}, nil}, {` :'foo':yes:foo`, []string{`'bar':yesbar`}, nil}, {` :foo `, []string{`bar`}, nil}, {`:foo:foo`, []string{`barbar`}, nil}, {` :foo:foo `, []string{`barbar`}, nil}, {` :foo:foo `, []string{`barbar`}, nil}, {`'hello'`, []string{`hello`}, nil}, // 14 {` 'hello''yes' `, []string{`hello'yes`}, nil}, {` 'hello\'...\'yes' `, []string{`hello'...'yes`}, nil}, {` "hello\'...\'yes" `, nil, text.ErrInvalidQuotedString}, {` "hello\"...\"yes" `, nil, text.ErrInvalidQuotedString}, {` 'hello':'yes' `, []string{`hello:'yes'`}, nil}, {` :'foo `, nil, text.ErrUnterminatedQuotedString}, {` :'foo bar`, nil, text.ErrUnterminatedQuotedString}, {` :'foo bar`, nil, text.ErrUnterminatedQuotedString}, {` :'foo bar `, nil, text.ErrUnterminatedQuotedString}, {" `foo", nil, text.ErrUnterminatedQuotedString}, {" `foo bar`", []string{"foo bar"}, nil}, {" `foo :foo`", []string{"foo :foo"}, nil}, {` :'foo':"foo"`, []string{`'bar'"bar"`}, nil}, {` :'foo' :"foo" `, []string{`'bar'`, `"bar"`}, nil}, {` :'foo' :"foo"`, []string{`'bar'`, `"bar"`}, nil}, {` :'foo' :"foo"`, []string{`'bar'`, `"bar"`}, nil}, {` :'foo' :"foo" `, []string{`'bar'`, `"bar"`}, nil}, {` :'foo' :"foo" :foo `, []string{`'bar'`, `"bar"`, `bar`}, nil}, {` :'foo':foo:"foo" `, []string{`'bar'bar"bar"`}, nil}, // 30 {` :'foo''yes':'foo' `, []string{`'bar'yes'bar'`}, nil}, {` :'foo' 'yes' :'foo' `, []string{`'bar'`, `yes`, `'bar'`}, nil}, {` 'yes':'foo':"foo"'blah''no' "\ntest" `, []string{`yes'bar'"bar"blah'no`, "\ntest"}, nil}, } for i, test := range tests { vals, err := DecodeParams(test.s).GetAll(testUnquote(u, t, i, test.s)) if err != test.err { t.Fatalf("test %d for %q expected err %v, got: %v", i, test.s, test.err, err) } if !reflect.DeepEqual(vals, test.exp) { t.Errorf("test %d for %q expected %v, got: %v", i, test.s, test.exp, vals) } } } func testUnquote(u *user.User, t *testing.T, i int, teststr string) func(string, bool) (bool, string, error) { f := env.Unquote(u, false, env.Vars{ "foo": "bar", }) return func(s string, isvar bool) (bool, string, error) { // t.Logf("test %d %q s: %q, isvar: %t", i, teststr, s, isvar) return f(s, isvar) } } usql-0.19.19/stmt/parse.go000066400000000000000000000206761476173253300153350ustar00rootroot00000000000000package stmt import ( "regexp" "unicode" ) // prefixCount is the number of words to extract from a prefix. const prefixCount = 6 // grab grabs i from r, or returns 0 if i >= end. func grab(r []rune, i, end int) rune { if i < end { return r[i] } return 0 } // findSpace finds first space rune in r, returning end if not found. func findSpace(r []rune, i, end int) (int, bool) { for ; i < end; i++ { if IsSpaceOrControl(r[i]) { return i, true } } return i, false } // findNonSpace finds first non space rune in r, returning end if not found. func findNonSpace(r []rune, i, end int) (int, bool) { for ; i < end; i++ { if !IsSpaceOrControl(r[i]) { return i, true } } return i, false } // findRune finds the next rune c in r, returning end if not found. func findRune(r []rune, i, end int, c rune) (int, bool) { for ; i < end; i++ { if r[i] == c { return i, true } } return i, false } // isEmptyLine returns true when r is empty or composed of only whitespace. func isEmptyLine(r []rune, i, end int) bool { _, ok := findNonSpace(r, i, end) return !ok } // identifierRE is a regexp that matches dollar tag identifiers ($tag$). var identifierRE = regexp.MustCompile(`(?i)^[a-z_][a-z0-9_]{0,127}$`) // readDollarAndTag reads a dollar style $tag$ in r, starting at i, returning // the enclosed "tag" and position, or -1 if the dollar and tag was invalid. func readDollarAndTag(r []rune, i, end int) (string, int, bool) { start, found := i, false i++ for ; i < end; i++ { if r[i] == '$' { found = true break } if i-start > 128 { break } } if !found { return "", i, false } // check valid identifier id := string(r[start+1 : i]) if id != "" && !identifierRE.MatchString(id) { return "", i, false } return id, i, true } // readString seeks to the end of a string returning the position and whether // or not the string's end was found. // // If the string's terminator was not found, then the result will be the passed // end. func readString(r []rune, i, end int, quote rune, tag string) (int, bool) { var prev, c, next rune for ; i < end; i++ { c, next = r[i], grab(r, i+1, end) switch { case quote == '\'' && c == '\\': i++ prev = 0 continue case quote == '\'' && c == '\'' && next == '\'': i++ continue case quote == '\'' && c == '\'' && prev != '\'', quote == '"' && c == '"', quote == '`' && c == '`': return i, true case quote == '$' && c == '$': if id, pos, ok := readDollarAndTag(r, i, end); ok && tag == id { return pos, true } } prev = c } return end, false } // readMultilineComment finds the end of a multiline comment (ie, '*/'). func readMultilineComment(r []rune, i, end int) (int, bool) { i++ for ; i < end; i++ { if r[i-1] == '*' && r[i] == '/' { return i, true } } return end, false } // readStringVar reads a string quoted variable. func readStringVar(r []rune, i, end int) *Var { start, q := i, grab(r, i+1, end) for i += 2; i < end; i++ { c := grab(r, i, end) if c == q { if i-start < 3 { return nil } return &Var{ I: start, End: i + 1, Quote: q, Name: string(r[start+2 : i]), } } /* // this is commented out, because need to determine what should be // the "right" behavior ... should we only allow "identifiers"? else if c != '_' && !unicode.IsLetter(c) && !unicode.IsNumber(c) { return nil } */ } return nil } // readVar reads variable from r. func readVar(r []rune, i, end int) *Var { if grab(r, i, end) != ':' || grab(r, i+1, end) == ':' { return nil } if end-i < 2 { return nil } if c := grab(r, i+1, end); c == '"' || c == '\'' { return readStringVar(r, i, end) } start := i i++ for ; i < end; i++ { if c := grab(r, i, end); c != '_' && !unicode.IsLetter(c) && !unicode.IsNumber(c) { break } } if i-start < 2 { return nil } return &Var{ I: start, End: i, Name: string(r[start+1 : i]), } } // readCommand reads the command and any parameters from r, returning the // offset from i for the end of command, and the end of the command parameters. // // A command is defined as the first non-blank text after \, followed by // parameters up to either the next \ or a control character (for example, \n): func readCommand(r []rune, i, end int) (int, int) { command: // find end of command for ; i < end; i++ { next := grab(r, i+1, end) switch { case next == 0: return end, end case next == '\\' || unicode.IsControl(next): i++ return i, i case unicode.IsSpace(next): i++ break command } } cmd, quote := i, rune(0) params: // find end of params for ; i < end; i++ { c, next := r[i], grab(r, i+1, end) switch { case next == 0: return cmd, end case quote == 0 && (c == '\'' || c == '"' || c == '`'): quote = c case quote != 0 && c == quote: quote = 0 // skip escaped case quote != 0 && c == '\\' && (next == quote || next == '\\'): i++ case quote == 0 && (c == '\\' || unicode.IsControl(c)): break params } } // log.Printf(">>> params: %q remaining: %q", string(r[cmd:i]), string(r[i:end])) return cmd, i } // findPrefix finds the prefix in r up to n words. func findPrefix(r []rune, n int, allowCComments, allowHashComments, allowMultilineComments bool) string { var s []rune var words int loop: for i, end := 0, len(r); i < end; i++ { // skip space + control characters if j, _ := findNonSpace(r, i, end); i != j { r, end, i = r[j:], end-j, 0 } // grab current and next character c, next := grab(r, i, end), grab(r, i+1, end) switch { // do nothing case c == 0: // statement terminator case c == ';': break loop // single line comments '--' and '//' case c == '-' && next == '-', c == '/' && next == '/' && allowCComments, c == '#' && allowHashComments: if i != 0 { s, words = appendUpperRunes(s, r[:i], ' '), words+1 } // find line end if i, _ = findRune(r, i, end, '\n'); i >= end { break } r, end, i = r[i+1:], end-i-1, -1 // multiline comments '/*' '*/' case c == '/' && next == '*' && allowMultilineComments: if i != 0 { s, words = appendUpperRunes(s, r[:i]), words+1 } // find comment end '*/' for i += 2; i < end; i++ { if grab(r, i, end) == '*' && grab(r, i+1, end) == '/' { r, end, i = r[i+2:], end-i-2, -1 break } } // add space when remaining runes begin with space, and previous // captured word did not if sl := len(s); end > 0 && sl != 0 && IsSpaceOrControl(r[0]) && !IsSpaceOrControl(s[sl-1]) { s = append(s, ' ') } // end of statement, max words, or punctuation that can be ignored case words == n || !unicode.IsLetter(c): break loop // ignore remaining, as no prefix can come after case next != '/' && !unicode.IsLetter(next): s, words = appendUpperRunes(s, r[:i+1], ' '), words+1 if next == 0 { break } if next == ';' { break loop } r, end, i = r[i+2:], end-i-2, -1 } } // trim right ' ', if any if sl := len(s); sl != 0 && s[sl-1] == ' ' { return string(s[:sl-1]) } return string(s) } // FindPrefix finds the first 6 prefix words in s. func FindPrefix(s string, allowCComments, allowHashComments, allowMultilineComments bool) string { return findPrefix([]rune(s), prefixCount, allowCComments, allowHashComments, allowMultilineComments) } // substitute substitutes n runes in r starting at i with the runes in s. // Dynamically grows r if necessary. func substitute(r []rune, i, end, n int, s string) ([]rune, int) { sr, rcap := []rune(s), cap(r) sn := len(sr) // grow ... tlen := len(r) + sn - n if tlen > rcap { z := make([]rune, tlen) copy(z, r) r = z } else { r = r[:rcap] } // substitute copy(r[i+sn:], r[i+n:]) copy(r[i:], sr) return r[:tlen], tlen } // substituteVar substitutes part of r, based on v, with s. func substituteVar(r []rune, v *Var, s string) ([]rune, int) { sr, rcap := []rune(s), cap(r) v.Len = len(sr) // grow ... tlen := len(r) + v.Len - (v.End - v.I) if tlen > rcap { z := make([]rune, tlen) copy(z, r) r = z } else { r = r[:rcap] } // substitute copy(r[v.I+v.Len:], r[v.End:]) copy(r[v.I:v.I+v.Len], sr) return r[:tlen], tlen } // appendUpperRunes creates a new []rune from s, with the runes in r on the end // converted to upper case. extra runes will be appended to the final []rune. func appendUpperRunes(s []rune, r []rune, extra ...rune) []rune { sl, rl, el := len(s), len(r), len(extra) sre := make([]rune, sl+rl+el) copy(sre[:sl], s) for i := 0; i < rl; i++ { sre[sl+i] = unicode.ToUpper(r[i]) } copy(sre[sl+rl:], extra) return sre } usql-0.19.19/stmt/parse_test.go000066400000000000000000000404731476173253300163710ustar00rootroot00000000000000package stmt import ( "reflect" "strings" "testing" ) func TestGrab(t *testing.T) { tests := []struct { s string i int exp rune }{ {"", 0, 0}, {"a", 0, 'a'}, {" a", 0, ' '}, {"a ", 1, ' '}, {"a", 1, 0}, } for i, test := range tests { z := []rune(test.s) r := grab(z, test.i, len(z)) if r != test.exp { t.Errorf("test %d expected %c, got: %c", i, test.exp, r) } } } func TestFindSpace(t *testing.T) { tests := []struct { s string i int exp int b bool }{ {"", 0, 0, false}, {" ", 0, 0, true}, {"a", 0, 1, false}, {"a ", 0, 1, true}, {" a ", 0, 0, true}, {"aaa", 0, 3, false}, {" a ", 1, 2, true}, {"aaa", 1, 3, false}, {" aaa", 1, 4, false}, } for i, test := range tests { z := []rune(test.s) n, b := findSpace(z, test.i, len(z)) if n != test.exp { t.Errorf("test %d expected %d, got: %d", i, test.exp, n) } if b != test.b { t.Errorf("test %d expected %t, got: %t", i, test.b, b) } } } func TestFindNonSpace(t *testing.T) { tests := []struct { s string i int exp int b bool }{ {"", 0, 0, false}, {" ", 0, 1, false}, {"a", 0, 0, true}, {"a ", 0, 0, true}, {" a ", 0, 1, true}, {" ", 0, 4, false}, {" a ", 1, 1, true}, {"aaa", 1, 1, true}, {" aaa", 1, 1, true}, {" aa", 1, 2, true}, {" ", 1, 4, false}, } for i, test := range tests { z := []rune(test.s) n, b := findNonSpace(z, test.i, len(z)) if n != test.exp { t.Errorf("test %d expected %d, got: %d", i, test.exp, n) } if b != test.b { t.Errorf("test %d expected %t, got: %t", i, test.b, b) } } } func TestIsEmptyLine(t *testing.T) { tests := []struct { s string i int exp bool }{ {"", 0, true}, {"a", 0, false}, {" a", 0, false}, {" a ", 0, false}, {" \na", 0, false}, {" \n\ta", 0, false}, {"a ", 1, true}, {" a", 1, false}, {" a ", 1, false}, {" \na", 1, false}, {" \n\t ", 1, true}, } for i, test := range tests { z := []rune(test.s) b := isEmptyLine(z, test.i, len(z)) if b != test.exp { t.Errorf("test %d expected %t, got: %t", i, test.exp, b) } } } func TestReadString(t *testing.T) { tests := []struct { s string i int exp string ok bool }{ {`'`, 0, ``, false}, {` '`, 1, ``, false}, {`''`, 0, `''`, true}, {`'foo' `, 0, `'foo'`, true}, {` 'foo' `, 1, `'foo'`, true}, {`"foo"`, 0, `"foo"`, true}, {"`foo`", 0, "`foo`", true}, {"`'foo'`", 0, "`'foo'`", true}, {`'foo''foo'`, 0, `'foo''foo'`, true}, {` 'foo''foo' `, 1, `'foo''foo'`, true}, {` "foo''foo" `, 1, `"foo''foo"`, true}, // escaped \" aren't allowed in strings, so the second " would be next // double quoted string {`"foo\""`, 0, `"foo\"`, true}, {` "foo\"" `, 1, `"foo\"`, true}, {`''''`, 0, `''''`, true}, {` '''' `, 1, `''''`, true}, {`''''''`, 0, `''''''`, true}, {` '''''' `, 1, `''''''`, true}, {`'''`, 0, ``, false}, {` ''' `, 1, ``, false}, {`'''''`, 0, ``, false}, {` ''''' `, 1, ``, false}, {`"fo'o"`, 0, `"fo'o"`, true}, {` "fo'o" `, 1, `"fo'o"`, true}, {`"fo''o"`, 0, `"fo''o"`, true}, {` "fo''o" `, 1, `"fo''o"`, true}, } for i, test := range tests { r := []rune(test.s) c, end := rune(strings.TrimSpace(test.s)[0]), len(r) if c != '\'' && c != '"' && c != '`' { t.Fatalf("test %d incorrect!", i) } pos, ok := readString(r, test.i+1, end, c, "") if ok != test.ok { t.Fatalf("test %d expected ok %t, got: %t", i, test.ok, ok) } if !test.ok { continue } if r[pos] != c { t.Fatalf("test %d expected last character to be %c, got: %c", i, c, r[pos]) } v := string(r[test.i : pos+1]) if n := len(v); n < 2 { t.Fatalf("test %d expected result of at least length 2, got: %d", i, n) } if v != test.exp { t.Errorf("test %d expected %q, got: %q", i, test.exp, v) } } } func TestReadCommand(t *testing.T) { tests := []struct { s string i int exp string }{ {`\c foo bar z`, 0, `\c| foo bar z|`}, // 0 {`\c foo bar z `, 0, `\c| foo bar z |`}, {`\c foo bar z `, 0, `\c| foo bar z |`}, {`\c foo bar z `, 0, `\c| foo bar z |`}, {`\c pg://blah bar z `, 0, `\c| pg://blah bar z |`}, {`\foo pg://blah bar z `, 0, `\foo| pg://blah bar z |`}, // 5 {`\a\b`, 0, `\a||\b`}, {`\a \b`, 0, `\a| |\b`}, {"\\a \n\\b", 0, "\\a| |\n\\b"}, {` \ab \bc \cd `, 5, `\bc| |\cd `}, {`\p foo \p`, 0, `\p| foo |\p`}, // 10 {`\p foo \p bar`, 0, `\p| foo |\p bar`}, {`\p\p`, 0, `\p||\p`}, {`\p \r foo`, 0, `\p| |\r foo`}, {`\print \reset foo`, 0, `\print| |\reset foo`}, {`\print \reset foo`, 9, `\reset| foo|`}, // 15 {`\print \reset foo `, 9, `\reset| foo |`}, {`\print \reset foo bar `, 9, `\reset| foo bar |`}, {`\c 'foo bar' z`, 0, `\c| 'foo bar' z|`}, {`\c foo "bar " z `, 0, `\c| foo "bar " z |`}, {"\\c `foo bar z ` ", 0, "\\c| `foo bar z ` |"}, // 20 {`\c 'foob':foo:bar'test' `, 0, `\c| 'foob':foo:bar'test' |`}, {"\\a \n\\b\\c\n", 0, "\\a| |\n\\b\\c\n"}, {`\a'foob' \b`, 0, `\a'foob'| |\b`}, {`\foo 'test' "bar"\print`, 0, `\foo| 'test' "bar"|\print`}, // 25 {`\foo 'test' "bar" \print`, 0, `\foo| 'test' "bar" |\print`}, {`\afoob' \b`, 0, `\afoob'| |\b`}, {`\afoob' '\b `, 0, `\afoob'| '\b |`}, {`\afoob' '\b '\print`, 0, `\afoob'| '\b '|\print`}, {`\afoob' '\b ' \print`, 0, `\afoob'| '\b ' |\print`}, // 30 {`\afoob' '\b ' \print `, 0, `\afoob'| '\b ' |\print `}, {"\\foo `foob'foob'\\print", 0, "\\foo| `foob'foob'\\print|"}, {"\\foo `foob'foob' \\print", 0, "\\foo| `foob'foob' \\print|"}, {`\foo "foob'foob'\\print`, 0, `\foo| "foob'foob'\\print|`}, {`\foo "foob'foob' \\print`, 0, `\foo| "foob'foob' \\print|`}, // 35 {`\foo "\""\print`, 0, `\foo| "\""|\print`}, {`\foo "\"'"\print`, 0, `\foo| "\"'"|\print`}, {`\foo "\"''"\print`, 0, `\foo| "\"''"|\print`}, } for i, test := range tests { z := []rune(test.s) if !strings.Contains(test.exp, "|") { t.Fatalf("test %d expected value is invalid (missing |): %q", i, test.exp) } v := strings.Split(test.exp, "|") if len(v) != 3 { t.Fatalf("test %d should have 3 expected values, has: %d", i, len(v)) } cmd, params := readCommand(z, test.i, len(z)) if s := string(z[test.i:cmd]); s != v[0] { t.Errorf("test %d expected command to be `%s`, got: `%s` [%d, %d]", i, v[0], s, cmd, params) } if s := string(z[cmd:params]); s != v[1] { t.Errorf("test %d expected params to be `%s`, got: `%s` [%d, %d]", i, v[1], s, cmd, params) } if s := string(z[params:]); s != v[2] { t.Errorf("test %d expected remaining to be `%s`, got: `%s`", i, v[2], s) } } } func TestFindPrefix(t *testing.T) { tests := []struct { s string w int exp string }{ {"", 4, ""}, {" ", 4, ""}, {" ", 4, ""}, {" select ", 4, "SELECT"}, {" select to ", 4, "SELECT TO"}, {" select to ", 4, "SELECT TO"}, // 5 {" select to ", 4, "SELECT TO"}, {"select into from", 2, "SELECT INTO"}, {"select into * from", 4, "SELECT INTO"}, {" select into * from ", 4, "SELECT INTO"}, {" select \t into \n * \t\t\n\n\n from ", 4, "SELECT INTO"}, // 10 {" select\n\n\tb\t\tzfrom j\n\n ", 2, "SELECT B"}, {"select/* foob */into", 4, "SELECTINTO"}, // 12 {"select/* foob */\tinto", 4, "SELECT INTO"}, {"select/* foob */ into", 4, "SELECT INTO"}, {"select/* foob */ into ", 4, "SELECT INTO"}, {"select /* foob */ into ", 4, "SELECT INTO"}, {" select /* foob */ into ", 4, "SELECT INTO"}, {" select * --test\n from where \n\nfff", 4, "SELECT"}, {"/*idreamedital*/foo//bar\n/* nothing */test\n\n\nwe made /*\n\n\n\n*/ \t it ", 5, "FOO TEST WE MADE IT"}, {" --yes\n//no\n\n\t/*whatever*/ ", 4, ""}, // 20 {"/*/*test*/*/ select ", 4, ""}, {"/*/*test*/*/ select ", 4, ""}, {"//", 4, ""}, {"-", 4, ""}, {"* select", 4, ""}, {"/**/", 4, ""}, {"--\n\t\t\thello,\t--", 4, "HELLO"}, {"/* */\n\n\n\tselect/*--\n*/\t\b\bzzz", 4, "SELECT ZZZ"}, // 28 {"n\nn\n\nn\tn", 7, "N N N N"}, {"n\nn\n\nn\tn", 1, "N"}, {"--\n/* */n/* */\nn\n--\nn\tn", 7, "N N N N"}, {"--\n/* */n\n/* */\nn\n--\nn\tn", 7, "N N N N"}, {"\n\n/* */\nn n", 7, "N N"}, {"\n\n/* */\nn/* */n", 7, "NN"}, {"\n\n/* */\nn /* */n", 7, "N N"}, {"\n\n/* */\nn/* */\nn", 7, "N N"}, {"\n\n/* */\nn/* */ n", 7, "N N"}, {"*/foob", 7, ""}, {"*/ \n --\nfoob", 7, ""}, {"--\n\n--\ntest", 7, "TEST"}, // 40 {"\b\btest", 7, "TEST"}, {"select/*\r\n\r\n*/blah", 7, "SELECTBLAH"}, {"\r\n\r\nselect from where", 8, "SELECT FROM WHERE"}, {"\r\n\b\bselect 1;create 2;", 8, "SELECT"}, {"\r\n\bbegin transaction;\ncreate x where;", 8, "BEGIN TRANSACTION"}, // 45 {"begin;test;create;awesome", 3, "BEGIN"}, {" /* */ ; begin; ", 5, ""}, {" /* foo */ test; test", 5, "TEST"}, {";test", 5, ""}, {"\b\b\t;test", 5, ""}, {"\b\t; test", 5, ""}, {"\b\tfoob; test", 5, "FOOB"}, {" TEST /*\n\t\b*/\b\t;foob", 10, "TEST"}, {"begin transaction\n\tinsert into x;\ncommit;", 6, "BEGIN TRANSACTION INSERT INTO X"}, {"--\nbegin /* */transaction/* */\n/* */\tinsert into x;--/* */\ncommit;", 6, "BEGIN TRANSACTION INSERT INTO X"}, {"#\nbegin /* */transaction/* */\n/* */\t#\ninsert into x;#\n--/* */\ncommit;", 6, "BEGIN TRANSACTION INSERT INTO X"}, } for i, test := range tests { if p := findPrefix([]rune(test.s), test.w, true, true, true); p != test.exp { t.Errorf("test %d %q expected %q, got: %q", i, test.s, test.exp, p) } } } func TestReadVar(t *testing.T) { tests := []struct { s string i int exp *Var }{ {``, 0, nil}, {`:`, 0, nil}, {` :`, 0, nil}, {`a:`, 0, nil}, {`a:a`, 0, nil}, {`: `, 0, nil}, {`: a `, 0, nil}, {`:a`, 0, v(0, 2, `a`)}, // 7 {`:ab`, 0, v(0, 3, `ab`)}, {`:a `, 0, v(0, 2, `a`)}, {`:a_ `, 0, v(0, 3, `a_`)}, {":a_\t ", 0, v(0, 3, `a_`)}, {":a_\n ", 0, v(0, 3, `a_`)}, {`:a9`, 0, v(0, 3, `a9`)}, // 13 {`:ab9`, 0, v(0, 4, `ab9`)}, {`:a 9`, 0, v(0, 2, `a`)}, {`:a_9 `, 0, v(0, 4, `a_9`)}, {":a_9\t ", 0, v(0, 4, `a_9`)}, {":a_9\n ", 0, v(0, 4, `a_9`)}, {`:a_;`, 0, v(0, 3, `a_`)}, // 19 {`:a_\`, 0, v(0, 3, `a_`)}, {`:a_$`, 0, v(0, 3, `a_`)}, {`:a_'`, 0, v(0, 3, `a_`)}, {`:a_"`, 0, v(0, 3, `a_`)}, {`:ab `, 0, v(0, 3, `ab`)}, // 24 {`:ab123 `, 0, v(0, 6, `ab123`)}, {`:ab123`, 0, v(0, 6, `ab123`)}, {`:'`, 0, nil}, // 27 {`:' `, 0, nil}, {`:' a`, 0, nil}, {`:' a `, 0, nil}, {`:"`, 0, nil}, {`:" `, 0, nil}, {`:" a`, 0, nil}, {`:" a `, 0, nil}, {`:''`, 0, nil}, // 35 {`:'' `, 0, nil}, {`:'' a`, 0, nil}, {`:""`, 0, nil}, {`:"" `, 0, nil}, {`:"" a`, 0, nil}, {`:' `, 0, nil}, // 41 {`:' `, 0, nil}, {`:" `, 0, nil}, {`:" `, 0, nil}, {`:'a'`, 0, v(0, 4, `a`, `'`)}, // 45 {`:'a' `, 0, v(0, 4, `a`, `'`)}, {`:'ab'`, 0, v(0, 5, `ab`, `'`)}, {`:'ab' `, 0, v(0, 5, `ab`, `'`)}, {`:'ab ' `, 0, v(0, 7, `ab `, `'`)}, {`:"a"`, 0, v(0, 4, `a`, `"`)}, // 50 {`:"a" `, 0, v(0, 4, `a`, `"`)}, {`:"ab"`, 0, v(0, 5, `ab`, `"`)}, {`:"ab" `, 0, v(0, 5, `ab`, `"`)}, {`:"ab " `, 0, v(0, 7, `ab `, `"`)}, {`:型`, 0, v(0, 2, "型")}, // 55 {`:'型'`, 0, v(0, 4, "型", `'`)}, {`:"型"`, 0, v(0, 4, "型", `"`)}, {` :型 `, 1, v(1, 3, "型")}, {` :'型' `, 1, v(1, 5, "型", `'`)}, {` :"型" `, 1, v(1, 5, "型", `"`)}, {`:型示師`, 0, v(0, 4, "型示師")}, // 61 {`:'型示師'`, 0, v(0, 6, "型示師", `'`)}, {`:"型示師"`, 0, v(0, 6, "型示師", `"`)}, {` :型示師 `, 1, v(1, 5, "型示師")}, {` :'型示師' `, 1, v(1, 7, "型示師", `'`)}, {` :"型示師" `, 1, v(1, 7, "型示師", `"`)}, } for i, test := range tests { z := []rune(test.s) v := readVar(z, test.i, len(z)) if !reflect.DeepEqual(v, test.exp) { t.Errorf("test %d expected %#v, got: %#v", i, test.exp, v) } if test.exp != nil && v != nil { n := string(z[v.I+1 : v.End]) if v.Quote != 0 { if c := rune(n[0]); c != v.Quote { t.Errorf("test %d expected var to start with quote %c, got: %c", i, c, v.Quote) } if c := rune(n[len(n)-1]); c != v.Quote { t.Errorf("test %d expected var to end with quote %c, got: %c", i, c, v.Quote) } n = n[1 : len(n)-1] } if n != test.exp.Name { t.Errorf("test %d expected var name of `%s`, got: `%s`", i, test.exp.Name, n) } } } } func TestSubstitute(t *testing.T) { a512 := sl(512, 'a') b512 := sl(512, 'a') b512 = b512[:1] + "b" + b512[2:] if len(b512) != 512 { t.Fatalf("b512 should be length 512, is: %d", len(b512)) } tests := []struct { s string i int n int t string exp string }{ {"", 0, 0, "", ""}, {"a", 0, 1, "b", "b"}, {"ab", 1, 1, "cd", "acd"}, {"", 0, 0, "ab", "ab"}, {"abc", 1, 2, "d", "ad"}, {a512, 1, 1, "b", b512}, {"foo", 0, 1, "bar", "baroo"}, } for i, test := range tests { r := []rune(test.s) r, rlen := substitute(r, test.i, len(r), test.n, test.t) if rlen != len(test.exp) { t.Errorf("test %d expected length %d, got: %d", i, len(test.exp), rlen) } if s := string(r); s != test.exp { t.Errorf("test %d expected %q, got %q", i, test.exp, s) } } } func TestSubstituteVar(t *testing.T) { a512 := sl(512, 'a') tests := []struct { s string v *Var sub string exp string }{ {`:a`, v(0, 2, `a`), `x`, `x`}, {` :a`, v(1, 3, `a`), `x`, ` x`}, {`:a `, v(0, 2, `a`), `x`, `x `}, {` :a `, v(1, 3, `a`), `x`, ` x `}, {` :'a' `, v(1, 5, `a`, `'`), `'x'`, ` 'x' `}, {` :"a" `, v(1, 5, "a", `"`), `"x"`, ` "x" `}, {`:a`, v(0, 2, `a`), ``, ``}, // 6 {` :a`, v(1, 3, `a`), ``, ` `}, {`:a `, v(0, 2, `a`), ``, ` `}, {` :a `, v(1, 3, `a`), ``, ` `}, {` :'a' `, v(1, 5, `a`, `'`), ``, ` `}, {` :"a" `, v(1, 5, "a", `"`), "", ` `}, {` :aaa `, v(1, 5, "aaa"), "", " "}, // 12 {` :aaa `, v(1, 5, "aaa"), a512, " " + a512 + " "}, {` :` + a512 + ` `, v(1, len(a512)+2, a512), "", " "}, {`:foo`, v(0, 4, "foo"), "这是一个", `这是一个`}, // 15 {`:foo `, v(0, 4, "foo"), "这是一个", `这是一个 `}, {` :foo`, v(1, 5, "foo"), "这是一个", ` 这是一个`}, {` :foo `, v(1, 5, "foo"), "这是一个", ` 这是一个 `}, {`:'foo'`, v(0, 6, `foo`, `'`), `'这是一个'`, `'这是一个'`}, // 19 {`:'foo' `, v(0, 6, `foo`, `'`), `'这是一个'`, `'这是一个' `}, {` :'foo'`, v(1, 7, `foo`, `'`), `'这是一个'`, ` '这是一个'`}, {` :'foo' `, v(1, 7, `foo`, `'`), `'这是一个'`, ` '这是一个' `}, {`:"foo"`, v(0, 6, `foo`, `"`), `"这是一个"`, `"这是一个"`}, // 23 {`:"foo" `, v(0, 6, `foo`, `"`), `"这是一个"`, `"这是一个" `}, {` :"foo"`, v(1, 7, `foo`, `"`), `"这是一个"`, ` "这是一个"`}, {` :"foo" `, v(1, 7, `foo`, `"`), `"这是一个"`, ` "这是一个" `}, {`:型`, v(0, 2, `型`), `x`, `x`}, // 27 {` :型`, v(1, 3, `型`), `x`, ` x`}, {`:型 `, v(0, 2, `型`), `x`, `x `}, {` :型 `, v(1, 3, `型`), `x`, ` x `}, {` :'型' `, v(1, 5, `型`, `'`), `'x'`, ` 'x' `}, {` :"型" `, v(1, 5, "型", `"`), `"x"`, ` "x" `}, {`:型`, v(0, 2, `型`), ``, ``}, // 33 {` :型`, v(1, 3, `型`), ``, ` `}, {`:型 `, v(0, 2, `型`), ``, ` `}, {` :型 `, v(1, 3, `型`), ``, ` `}, {` :'型' `, v(1, 5, `型`, `'`), ``, ` `}, {` :"型" `, v(1, 5, "型", `"`), "", ` `}, {`:型示師`, v(0, 4, `型示師`), `本門台初埼本門台初埼`, `本門台初埼本門台初埼`}, // 39 {` :型示師`, v(1, 5, `型示師`), `本門台初埼本門台初埼`, ` 本門台初埼本門台初埼`}, {`:型示師 `, v(0, 4, `型示師`), `本門台初埼本門台初埼`, `本門台初埼本門台初埼 `}, {` :型示師 `, v(1, 5, `型示師`), `本門台初埼本門台初埼`, ` 本門台初埼本門台初埼 `}, {` :型示師 `, v(1, 5, `型示師`), `本門台初埼本門台初埼`, ` 本門台初埼本門台初埼 `}, {` :'型示師' `, v(1, 7, `型示師`), `'本門台初埼本門台初埼'`, ` '本門台初埼本門台初埼' `}, {` :"型示師" `, v(1, 7, `型示師`), `"本門台初埼本門台初埼"`, ` "本門台初埼本門台初埼" `}, } for i, test := range tests { z := []rune(test.s) y, l := substituteVar(z, test.v, test.sub) if sl := len([]rune(test.sub)); test.v.Len != sl { t.Errorf("test %d, expected v.Len to be %d, got: %d", i, sl, test.v.Len) } if el := len([]rune(test.exp)); l != el { t.Errorf("test %d expected l==%d, got: %d", i, el, l) } if s := string(y); s != test.exp { t.Errorf("test %d expected `%s`, got: `%s`", i, test.exp, s) } } } func v(i, end int, n string, x ...string) *Var { z := &Var{ I: i, End: end, Name: n, } if len(x) != 0 { z.Quote = []rune(x[0])[0] } return z } usql-0.19.19/stmt/stmt.go000066400000000000000000000264261476173253300152110ustar00rootroot00000000000000// Package stmt contains a statement buffer implementation. package stmt import ( "bytes" "unicode" ) // MinCapIncrease is the minimum amount by which to grow a Stmt.Buf. const MinCapIncrease = 512 // Var holds information about a variable. type Var struct { // I is where the variable starts (ie, ':') in Stmt.Buf. I int // End is where the variable ends in Stmt.Buf. End int // Quote is the quote character used if the variable was quoted, 0 // otherwise. Quote rune // Name is the actual variable name excluding ':' and any enclosing quote // characters. Name string // Len is the length of the replaced variable. Len int // Defined indicates whether the variable has been defined. Defined bool } // String satisfies the fmt.Stringer interface. func (v *Var) String() string { var q string switch { case v.Quote == '\\': return "\\" + v.Name case v.Quote != 0: q = string(v.Quote) } return ":" + q + v.Name + q } // Stmt is a reusable statement buffer that handles reading and parsing // SQL-like statements. type Stmt struct { // f is the rune source. f func() ([]rune, error) // allowDollar allows dollar quoted strings (ie, $$ ... $$ or $tag$ ... $tag$). allowDollar bool // allowMultilineComments allows multiline comments (ie, /* ... */) allowMultilineComments bool // allowCComments allows C-style comments (ie, // ... ) allowCComments bool // allowHashComments allows hash comments (ie, # ... ) allowHashComments bool // Buf is the statement buffer Buf []rune // Len is the current len of any statement in Buf. Len int // Prefix is the detected prefix of the statement. Prefix string // Vars is the list of encountered variables. Vars []*Var // r is the unprocessed runes. r []rune // rlen is the number of unprocessed runes. rlen int // quote indicates currently parsing a quoted string. quote rune // quoteDollarTag is the parsed tag of a dollar quoted string quoteDollarTag string // multilineComment is state of multiline comment processing multilineComment bool // balanceCount is the balanced paren count balanceCount int // ready indicates that a complete statement has been parsed ready bool } // New creates a new Stmt using the supplied rune source f. func New(f func() ([]rune, error), opts ...Option) *Stmt { b := &Stmt{ f: f, } // apply opts for _, o := range opts { o(b) } return b } // String satisfies fmt.Stringer. func (b *Stmt) String() string { return string(b.Buf) } // RawString returns the non-interpolated version of the statement buffer. func (b *Stmt) RawString() string { if b.Len == 0 { return "" } s, z := string(b.Buf), new(bytes.Buffer) var i int // deinterpolate vars for _, v := range b.Vars { if !v.Defined { continue } if len(s) > i { z.WriteString(s[i:v.I]) } if v.Quote != '\\' { z.WriteRune(':') } if v.Quote != 0 { z.WriteRune(v.Quote) } z.WriteString(v.Name) if v.Quote != 0 && v.Quote != '\\' { z.WriteRune(v.Quote) } i = v.I + v.Len } // add remaining if len(s) > i { z.WriteString(s[i:]) } return z.String() } // Ready returns true when the statement buffer contains a non empty, balanced // statement that has been properly terminated (ie, ended with a semicolon). func (b *Stmt) Ready() bool { return b.ready } // Reset resets the statement buffer. func (b *Stmt) Reset(r []rune) { // reset buf b.Buf, b.Len, b.Prefix, b.Vars = nil, 0, "", nil // quote state b.quote, b.quoteDollarTag = 0, "" // multicomment state b.multilineComment = false // balance state b.balanceCount = 0 // ready state b.ready = false if r != nil { b.r, b.rlen = r, len(r) } } // lineend is the slice to use when appending a line. var lineend = []rune{'\n'} // Next reads the next statement from the rune source, returning when either // the statement has been terminated, or a meta command has been read from the // rune source. After a call to Next, the collected statement is available in // Stmt.Buf, or call Stmt.String() to convert it to a string. // // After a call to Next, Reset should be called if the extracted statement was // executed (ie, processed). Note that the rune source supplied to New will be // called again only after any remaining collected runes have been processed. // // Example: // // buf := stmt.New(runeSrc) // for { // cmd, params, err := buf.Next(unquoteFunc) // if err { /* ... */ } // // execute, quit := buf.Ready() || cmd == "g", cmd == "q" // // // process command ... // switch cmd { // /* ... */ // } // // if quit { // break // } // // if execute { // s := buf.String() // res, err := db.Query(s) // /* handle database ... */ // buf.Reset(nil) // } // } func (b *Stmt) Next(unquote func(string, bool) (bool, string, error)) (string, string, error) { var err error var i int // no runes to process, grab more if b.rlen == 0 { b.r, err = b.f() if err != nil { return "", "", err } b.rlen = len(b.r) } var cmd, params string var ok bool parse: for ; i < b.rlen; i++ { // log.Printf(">> (%c) %d", b.r[i], i) // grab c, next c, next := b.r[i], grab(b.r, i+1, b.rlen) switch { // find end of string case b.quote != 0: i, ok = readString(b.r, i, b.rlen, b.quote, b.quoteDollarTag) if ok { b.quote, b.quoteDollarTag = 0, "" } // find end of multiline comment case b.multilineComment: i, ok = readMultilineComment(b.r, i, b.rlen) b.multilineComment = !ok // start of single or double quoted string case c == '\'' || c == '"': b.quote = c // start of dollar quoted string literal (postgres) case b.allowDollar && c == '$' && (next == '$' || next == '_' || unicode.IsLetter(next)): var id string id, i, ok = readDollarAndTag(b.r, i, b.rlen) if ok { b.quote, b.quoteDollarTag = '$', id } // start of sql comment, skip to end of line case c == '-' && next == '-': i = b.rlen // start of c-style comment, skip to end of line case b.allowCComments && c == '/' && next == '/': i = b.rlen // start of hash comment, skip to end of line case b.allowHashComments && c == '#': i = b.rlen // start of multiline comment case b.allowMultilineComments && c == '/' && next == '*': b.multilineComment = true i++ // variable declaration case c == ':' && next != ':': if v := readVar(b.r, i, b.rlen); v != nil { var q string if v.Quote != 0 { q = string(v.Quote) } b.Vars = append(b.Vars, v) if ok, z, _ := unquote(q+v.Name+q, true); ok { v.Defined = true b.r, b.rlen = substituteVar(b.r, v, z) i-- } if b.Len != 0 { v.I += b.Len + 1 } } // unbalance case c == '(': b.balanceCount++ // balance case c == ')': b.balanceCount = max(0, b.balanceCount-1) // continue processing quoted string, multiline comment, or unbalanced statements case b.quote != 0 || b.multilineComment || b.balanceCount != 0: // skip escaped backslash, semicolon, colon case c == '\\' && (next == '\\' || next == ';' || next == ':'): // FIXME: the below works, but it may not make sense to keep this enabled. // FIXME: also, the behavior is slightly different than psql v := &Var{ I: i, End: i + 2, Quote: '\\', Name: string(next), } b.Vars = append(b.Vars, v) b.r, b.rlen = substituteVar(b.r, v, string(next)) if b.Len != 0 { v.I += b.Len + 1 } // start of command case c == '\\': // parse command and params end positions cend, pend := readCommand(b.r, i, b.rlen) cmd, params = string(b.r[i:cend]), string(b.r[cend:pend]) // remove command and params from r b.r = append(b.r[:i], b.r[pend:]...) b.rlen = len(b.r) break parse // terminated case c == ';': b.ready = true i++ break parse } } // fix i -- i will be +1 when passing the length, which is a problem as the // '\n' will get copied from the source. i = min(i, b.rlen) // append line to buf when: // 1. in a quoted string (ie, ', ", or $) // 2. in a multiline comment // 3. line is not empty // // DO NOT append to buf when: // 1. line is empty/whitespace and not in a string/multiline comment empty := isEmptyLine(b.r, 0, i) appendLine := b.quote != 0 || b.multilineComment || !empty if !b.multilineComment && cmd != "" && empty { appendLine = false } if appendLine { // skip leading space when empty st := 0 if b.Len == 0 { st, _ = findNonSpace(b.r, 0, i) } // log.Printf(">> appending: `%s`", string(r[st:i])) b.Append(b.r[st:i], lineend) } // set prefix b.Prefix = findPrefix(b.Buf, prefixCount, b.allowCComments, b.allowHashComments, b.allowMultilineComments) // reset r b.r = b.r[i:] b.rlen = len(b.r) // log.Printf("returning from NEXT: `%s`", string(b.Buf)) // log.Printf(">>>>>>>>>>>> REMAIN: `%s`", string(b.r)) // log.Printf(">>>>>>>>>>>> CMD: `%s`", cmd) // log.Printf(">>>>>>>>>>>> PARAMS: %v", params) return cmd, params, nil } // Append appends r to b.Buf separated by sep when b.Buf is not already empty. // // Dynamically grows b.Buf as necessary to accommodate r and the separator. // Specifically, when b.Buf is not empty, b.Buf will grow by increments of // MinCapIncrease. // // After a call to Append, b.Len will be len(b.Buf)+len(sep)+len(r). Call Reset // to reset the Buf. func (b *Stmt) Append(r, sep []rune) { rlen := len(r) // initial if b.Buf == nil { b.Buf, b.Len = r, rlen return } blen, seplen := b.Len, len(sep) tlen := blen + rlen + seplen // grow if bcap := cap(b.Buf); tlen > bcap { n := tlen + 2*rlen n += MinCapIncrease - (n % MinCapIncrease) z := make([]rune, blen, n) copy(z, b.Buf) b.Buf = z } b.Buf = b.Buf[:tlen] copy(b.Buf[blen:], sep) copy(b.Buf[blen+seplen:], r) b.Len = tlen } // AppendString is a util func wrapping Append. func (b *Stmt) AppendString(s, sep string) { b.Append([]rune(s), []rune(sep)) } // State returns a string representing the state of statement parsing. func (b *Stmt) State() string { switch { case b.quote != 0: return string(b.quote) case b.multilineComment: return "*" case b.balanceCount != 0: return "(" case b.Len != 0: return "-" } return "=" } // Option is a statement buffer option. type Option func(*Stmt) // WithAllowDollar is a statement buffer option to set allowing dollar strings (ie, // $$text$$ or $tag$text$tag$). func WithAllowDollar(enable bool) Option { return func(b *Stmt) { b.allowDollar = enable } } // WithAllowMultilineComments is a statement buffer option to set allowing multiline comments // (ie, /* ... */). func WithAllowMultilineComments(enable bool) Option { return func(b *Stmt) { b.allowMultilineComments = enable } } // WithAllowCComments is a statement buffer option to set allowing C-style comments // (ie, // ...). func WithAllowCComments(enable bool) Option { return func(b *Stmt) { b.allowCComments = enable } } // WithAllowHashComments is a statement buffer option to set allowing hash comments // (ie, # ...). func WithAllowHashComments(enable bool) Option { return func(b *Stmt) { b.allowHashComments = enable } } // IsSpaceOrControl is a special test for either a space or a control (ie, \b) // characters. func IsSpaceOrControl(r rune) bool { return unicode.IsSpace(r) || unicode.IsControl(r) } // RunesLastIndex returns the last index in r of needle, or -1 if not found. func RunesLastIndex(r []rune, needle rune) int { i := len(r) - 1 for ; i >= 0; i-- { if r[i] == needle { return i } } return i } usql-0.19.19/stmt/stmt_test.go000066400000000000000000000316431476173253300162450ustar00rootroot00000000000000package stmt import ( "io" "os/user" "reflect" "strings" "testing" "github.com/xo/usql/env" ) func sl(n int, r rune) string { z := make([]rune, n) for i := 0; i < n; i++ { z[i] = r } return string(z) } func TestAppend(t *testing.T) { a512 := sl(512, 'a') // b1024 := sl(1024, 'b') tests := []struct { s []string exp string l int c int }{ {[]string{""}, "", 0, 0}, {[]string{"", ""}, "\n", 1, MinCapIncrease}, {[]string{"", "", ""}, "\n\n", 2, MinCapIncrease}, {[]string{"", "", "", ""}, "\n\n\n", 3, MinCapIncrease}, {[]string{"a", ""}, "a\n", 2, 2}, // 4 {[]string{"a", "b", ""}, "a\nb\n", 4, MinCapIncrease}, {[]string{"a", "b", "c", ""}, "a\nb\nc\n", 6, MinCapIncrease}, {[]string{"", "a", ""}, "\na\n", 3, MinCapIncrease}, // 7 {[]string{"", "a", "b", ""}, "\na\nb\n", 5, MinCapIncrease}, {[]string{"", "a", "b", "c", ""}, "\na\nb\nc\n", 7, MinCapIncrease}, {[]string{"", "foo"}, "\nfoo", 4, MinCapIncrease}, // 10 {[]string{"", "foo", ""}, "\nfoo\n", 5, MinCapIncrease}, {[]string{"foo", "", "bar"}, "foo\n\nbar", 8, MinCapIncrease}, {[]string{"", "foo", "bar"}, "\nfoo\nbar", 8, MinCapIncrease}, {[]string{a512}, a512, 512, 512}, // 14 {[]string{a512, a512}, a512 + "\n" + a512, 1025, 5 * MinCapIncrease}, {[]string{a512, a512, a512}, a512 + "\n" + a512 + "\n" + a512, 1538, 5 * MinCapIncrease}, {[]string{a512, ""}, a512 + "\n", 513, 2 * MinCapIncrease}, // 17 {[]string{a512, "", "foo"}, a512 + "\n\nfoo", 517, 2 * MinCapIncrease}, } for i, test := range tests { b := new(Stmt) for _, s := range test.s { b.AppendString(s, "\n") } if s := b.String(); s != test.exp { t.Errorf("test %d expected result of `%s`, got: `%s`", i, test.exp, s) } if b.Len != test.l { t.Errorf("test %d expected resulting len of %d, got: %d", i, test.l, b.Len) } if c := cap(b.Buf); c != test.c { t.Errorf("test %d expected resulting cap of %d, got: %d", i, test.c, c) } b.Reset(nil) if b.Len != 0 { t.Errorf("test %d expected after reset len of 0, got: %d", i, b.Len) } b.AppendString("", "\n") if s := b.String(); s != "" { t.Errorf("test %d expected after reset appending an empty string would result in empty string, got: `%s`", i, s) } } } func TestVariedSeparator(t *testing.T) { b := new(Stmt) b.AppendString("foo", "\n") b.AppendString("foo", "bar") if b.Len != 9 { t.Errorf("expected len of 9, got: %d", b.Len) } if s := b.String(); s != "foobarfoo" { t.Errorf("expected `%s`, got: `%s`", "foobarfoo", s) } if c := cap(b.Buf); c != MinCapIncrease { t.Errorf("expected cap of %d, got: %d", MinCapIncrease, c) } } func TestNextResetState(t *testing.T) { u, err := user.Current() if err != nil { t.Fatalf("expected no error, got: %v", err) } unquote := env.Unquote(u, false, env.Vars{}) tests := []struct { s string stmts []string cmds []string state string vars []string }{ {``, nil, []string{`|`}, `=`, nil}, // 0 {`;`, []string{`;`}, []string{`|`}, `=`, nil}, {` ; `, []string{`;`}, []string{`|`, `|`}, `=`, nil}, {` \v `, nil, []string{`\v| `}, `=`, nil}, {` \v \p`, nil, []string{`\v| `, `\p|`}, `=`, nil}, {` \v foo \p`, nil, []string{`\v| foo `, `\p|`}, `=`, nil}, // 5 {` \v foo bar \p zz`, nil, []string{`\v| foo bar `, `\p| zz`}, `=`, nil}, {` \very foo bar \print zz`, nil, []string{`\very| foo bar `, `\print| zz`}, `=`, nil}, {`select 1;`, []string{`select 1;`}, []string{`|`}, `=`, nil}, {`select 1\g`, []string{`select 1`}, []string{`\g|`}, `=`, nil}, {`select 1 \g`, []string{`select 1 `}, []string{`\g|`}, `=`, nil}, // 10 {` select 1 \g`, []string{`select 1 `}, []string{`\g|`}, `=`, nil}, {` select 1 \g `, []string{`select 1 `}, []string{`\g| `}, `=`, nil}, {`select 1; select 1\g`, []string{`select 1;`, `select 1`}, []string{`|`, `\g|`}, `=`, nil}, {"select 1\n\\g", []string{`select 1`}, []string{`|`, `\g|`}, `=`, nil}, {"select 1 \\g\n\n\n\n\\v", []string{`select 1 `}, []string{`\g|`, `|`, `|`, `|`, `\v|`}, `=`, nil}, // 15 {"select 1 \\g\n\n\n\n\\v foob \\p zzz \n\n", []string{`select 1 `}, []string{`\g|`, `|`, `|`, `|`, `\v| foob `, `\p| zzz `, `|`, `|`}, `=`, nil}, {" select 1 \\g \\p \n select (15)\\g", []string{`select 1 `, `select (15)`}, []string{`\g| `, `\p| `, `\g|`}, `=`, nil}, {" select 1 ( \\g ) \n ;", []string{"select 1 ( \\g ) \n ;"}, []string{`|`, `|`}, `=`, nil}, { // 19 " select 1\n;select 2\\g select 3; \\p \\z foo bar ", []string{"select 1\n;", "select 2"}, []string{`|`, `|`, `\g| select 3; `, `\p| `, `\z| foo bar `}, "=", nil, }, { // 20 " select 1\\g\n\n\tselect 2\\g\n select 3; \\p \\z foo bar \\p\\p select * from; \n\\p", []string{`select 1`, `select 2`, `select 3;`}, []string{`\g|`, `|`, `\g|`, `|`, `\p| `, `\z| foo bar `, `\p|`, `\p| select * from; `, `\p|`}, "=", nil, }, {"select '';", []string{"select '';"}, []string{"|"}, "=", nil}, // 21 {"select 'a''b\nz';", []string{"select 'a''b\nz';"}, []string{"|", "|"}, "=", nil}, {"select 'a' 'b\nz';", []string{"select 'a' 'b\nz';"}, []string{"|", "|"}, "=", nil}, {"select \"\";", []string{"select \"\";"}, []string{"|"}, "=", nil}, {"select \"\n\";", []string{"select \"\n\";"}, []string{"|", "|"}, "=", nil}, // 25 {"select $$$$;", []string{"select $$$$;"}, []string{"|"}, "=", nil}, {"select $$\nfoob(\n$$;", []string{"select $$\nfoob(\n$$;"}, []string{"|", "|", "|"}, "=", nil}, {"select $tag$$tag$;", []string{"select $tag$$tag$;"}, []string{"|"}, "=", nil}, {"select $tag$\n\n$tag$;", []string{"select $tag$\n\n$tag$;"}, []string{"|", "|", "|"}, "=", nil}, {"select $tag$\n(\n$tag$;", []string{"select $tag$\n(\n$tag$;"}, []string{"|", "|", "|"}, "=", nil}, // 30 {"select $tag$\n\\v(\n$tag$;", []string{"select $tag$\n\\v(\n$tag$;"}, []string{"|", "|", "|"}, "=", nil}, {"select $tag$\n\\v(\n$tag$\\g", []string{"select $tag$\n\\v(\n$tag$"}, []string{"|", "|", `\g|`}, "=", nil}, {"select $$\n\\v(\n$tag$$zz$$\\g$$\\g", []string{"select $$\n\\v(\n$tag$$zz$$\\g$$"}, []string{"|", "|", `\g|`}, "=", nil}, {"select * --\n\\v", nil, []string{"|", `\v|`}, "-", nil}, // 34 {"select--", nil, []string{"|"}, "-", nil}, {"select --", nil, []string{"|"}, "-", nil}, {"select /**/", nil, []string{"|"}, "-", nil}, {"select/* */", nil, []string{"|"}, "-", nil}, {"select/*", nil, []string{"|"}, "*", nil}, {"select /*", nil, []string{"|"}, "*", nil}, {"select * /**/", nil, []string{"|"}, "-", nil}, {"select * /* \n\n\n--*/\n;", []string{"select * /* \n\n\n--*/\n;"}, []string{"|", "|", "|", "|", "|"}, "=", nil}, {"select * /* \n\n\n--*/\n", nil, []string{"|", "|", "|", "|", "|"}, "-", nil}, // 43 {"select * /* \n\n\n--\n", nil, []string{"|", "|", "|", "|", "|"}, "*", nil}, {"\\p \\p\nselect (", nil, []string{`\p| `, `\p|`, "|"}, "(", nil}, // 45 {"\\p \\p\nselect ()", nil, []string{`\p| `, `\p|`, "|"}, "-", nil}, {"\n \t\t \n", nil, []string{"|", "|", "|"}, "=", nil}, {"\n foob \t\t \n", nil, []string{"|", "|", "|"}, "-", nil}, {"$$", nil, []string{"|"}, "$", nil}, {"$$foo", nil, []string{"|"}, "$", nil}, // 50 {"'", nil, []string{"|"}, "'", nil}, {"(((()()", nil, []string{"|"}, "(", nil}, {"\"", nil, []string{"|"}, "\"", nil}, {"\"foo", nil, []string{"|"}, "\"", nil}, {":a :b", nil, []string{"|"}, "-", []string{"a", "b"}}, // 55 {`select :'a b' :"foo bar"`, nil, []string{"|"}, "-", []string{"a b", "foo bar"}}, {`select :a:b;`, []string{"select :a:b;"}, []string{"|"}, "=", []string{"a", "b"}}, {"select :'a\n:foo:bar", nil, []string{"|", "|"}, "'", nil}, // 58 {"select :''\n:foo:bar\\g", []string{"select :''\n:foo:bar"}, []string{"|", `\g|`}, "=", []string{"foo", "bar"}}, {"select :''\n:foo :bar\\g", []string{"select :''\n:foo :bar"}, []string{"|", `\g|`}, "=", []string{"foo", "bar"}}, // 60 {"select :''\n :foo :bar \\g", []string{"select :''\n :foo :bar "}, []string{"|", `\g|`}, "=", []string{"foo", "bar"}}, {"select :'a\n:'foo':\"bar\"", nil, []string{"|", "|"}, "'", nil}, // 62 {"select :''\n:'foo':\"bar\"\\g", []string{"select :''\n:'foo':\"bar\""}, []string{"|", `\g|`}, "=", []string{"foo", "bar"}}, {"select :''\n:'foo' :\"bar\"\\g", []string{"select :''\n:'foo' :\"bar\""}, []string{"|", `\g|`}, "=", []string{"foo", "bar"}}, {"select :''\n :'foo' :\"bar\" \\g", []string{"select :''\n :'foo' :\"bar\" "}, []string{"|", `\g|`}, "=", []string{"foo", "bar"}}, {`select 1\echo 'pg://':foo'/':bar`, nil, []string{`\echo| 'pg://':foo'/':bar`}, "-", nil}, // 66 {`select :'foo'\echo 'pg://':bar'/' `, nil, []string{`\echo| 'pg://':bar'/' `}, "-", []string{"foo"}}, {`select 1\g '\g`, []string{`select 1`}, []string{`\g| '\g`}, "=", nil}, {`select 1\g "\g`, []string{`select 1`}, []string{`\g| "\g`}, "=", nil}, {"select 1\\g `\\g", []string{`select 1`}, []string{"\\g| `\\g"}, "=", nil}, // 70 {`select 1\g '\g `, []string{`select 1`}, []string{`\g| '\g `}, "=", nil}, {`select 1\g "\g `, []string{`select 1`}, []string{`\g| "\g `}, "=", nil}, {"select 1\\g `\\g ", []string{`select 1`}, []string{"\\g| `\\g "}, "=", nil}, {"select $$\\g$$\\g", []string{`select $$\g$$`}, []string{`\g|`}, "=", nil}, {"select $1\\bind a b c\\g", []string{`select $1`}, []string{`\bind| a b c`, `\g|`}, "=", nil}, {"select $1 \\bind a b c \\g", []string{`select $1 `}, []string{`\bind| a b c `, `\g|`}, "=", nil}, {"select $2, $a$ foo $a$, $1 \\bind a b \\g", []string{`select $2, $a$ foo $a$, $1 `}, []string{`\bind| a b `, `\g|`}, "=", nil}, } for i, test := range tests { b := New(sp(test.s, "\n"), WithAllowDollar(true), WithAllowMultilineComments(true), WithAllowCComments(true)) var stmts, cmds, aparams []string var vars []*Var loop: for { cmd, params, err := b.Next(unquote) switch { case err == io.EOF: break loop case err != nil: t.Fatalf("test %d did not expect error, got: %v", i, err) } vars = append(vars, b.Vars...) if b.Ready() || cmd == `\g` { stmts = append(stmts, b.String()) b.Reset(nil) } cmds = append(cmds, cmd) aparams = append(aparams, params) } if len(stmts) != len(test.stmts) { t.Logf(">> %#v // %#v", test.stmts, stmts) t.Fatalf("test %d expected %d statements, got: %d", i, len(test.stmts), len(stmts)) } if !reflect.DeepEqual(stmts, test.stmts) { t.Logf(">> %#v // %#v", test.stmts, stmts) t.Fatalf("test %d expected statements %s, got: %s", i, jj(test.stmts), jj(stmts)) } if cz := cc(cmds, aparams); !reflect.DeepEqual(cz, test.cmds) { t.Logf(">> cmds: %#v, aparams: %#v, cz: %#v, test.cmds: %#v", cmds, aparams, cz, test.cmds) t.Fatalf("test %d expected commands %v, got: %v", i, jj(test.cmds), jj(cz)) } if st := b.State(); st != test.state { t.Fatalf("test %d expected end parse state `%s`, got: `%s`", i, test.state, st) } if len(vars) != len(test.vars) { t.Fatalf("test %d expected %d vars, got: %d", i, len(test.vars), len(vars)) } for _, n := range test.vars { if !hasVar(vars, n) { t.Fatalf("test %d missing variable `%s`", i, n) } } b.Reset(nil) if len(b.Buf) != 0 { t.Fatalf("test %d after reset b.Buf should have len %d, got: %d", i, 0, len(b.Buf)) } if b.Len != 0 { t.Fatalf("test %d after reset should have len %d, got: %d", i, 0, b.Len) } if len(b.Vars) != 0 { t.Fatalf("test %d after reset should have len(vars) == 0, got: %d", i, len(b.Vars)) } if b.Prefix != "" { t.Fatalf("test %d after reset should have empty prefix, got: %s", i, b.Prefix) } if b.quote != 0 || b.quoteDollarTag != "" || b.multilineComment || b.balanceCount != 0 { t.Fatalf("test %d after reset should have a cleared parse state", i) } if st := b.State(); st != "=" { t.Fatalf("test %d after reset should have state `=`, got: `%s`", i, st) } if b.ready { t.Fatalf("test %d after reset should not be ready", i) } } } func TestEmptyVariablesRawString(t *testing.T) { stmt := new(Stmt) stmt.AppendString("select ", "\n") stmt.Prefix = "SELECT" v := &Var{ I: 7, End: 9, Name: "a", Len: 0, } stmt.Vars = append(stmt.Vars, v) if exp, got := "select ", stmt.RawString(); exp != got { t.Fatalf("Defined=false, expected: %s, got: %s", exp, got) } v.Defined = true if exp, got := "select :a", stmt.RawString(); exp != got { t.Fatalf("Defined=true, expected: %s, got: %s", exp, got) } } // cc combines commands with params. func cc(cmds []string, params []string) []string { if len(cmds) == 0 { return []string{"|"} } z := make([]string, len(cmds)) if len(cmds) != len(params) { panic("length of params should be same as cmds") } for i := 0; i < len(cmds); i++ { z[i] = cmds[i] + "|" + params[i] } return z } func jj(s []string) string { return "[`" + strings.Join(s, "`,`") + "`]" } func sp(a, sep string) func() ([]rune, error) { s := strings.Split(a, sep) return func() ([]rune, error) { if len(s) > 0 { z := s[0] s = s[1:] return []rune(z), nil } return nil, io.EOF } } func hasVar(vars []*Var, n string) bool { for _, v := range vars { if v.Name == n { return true } } return false } usql-0.19.19/styles/000077500000000000000000000000001476173253300142155ustar00rootroot00000000000000usql-0.19.19/styles/styles.go000066400000000000000000000017171476173253300160750ustar00rootroot00000000000000// Package styles provides chroma styles based on the chroma styles but removing // the backgrounds. package styles import ( "sync" "github.com/alecthomas/chroma/v2" cstyles "github.com/alecthomas/chroma/v2/styles" ) // styles is the set of styles with their background colors removed. var styles = struct { styles map[string]*chroma.Style sync.Mutex }{ styles: make(map[string]*chroma.Style), } // Get retrieves the equivalent chroma style. func Get(name string) *chroma.Style { styles.Lock() defer styles.Unlock() if _, ok := styles.styles[name]; !ok { // get original style s := cstyles.Get(name) // create new entry map m := make(chroma.StyleEntries) for _, typ := range s.Types() { // skip background if typ == chroma.Background { continue } z := s.Get(typ) // unset background z.Background = chroma.Colour(0) m[typ] = z.String() } styles.styles[name] = chroma.MustNewStyle(s.Name, m) } return styles.styles[name] } usql-0.19.19/testcli.go000066400000000000000000000065631476173253300147020ustar00rootroot00000000000000//go:build ignore // Command testcli runs goexpect tests against a built usql binary. package main import ( "bytes" "context" "flag" "fmt" "io" "log" "os" "regexp" "time" gexpect "github.com/google/goexpect" ) func main() { binpath := flag.String("binpath", "./usql", "bin path") deadline := flag.Duration("deadline", 5*time.Minute, "total execution deadline") timeout := flag.Duration("timeout", 2*time.Minute, "individual test timeout") re := flag.String("run", "", "test name regexp to run") flag.Parse() if err := run(context.Background(), *binpath, *deadline, *timeout, *re); err != nil { fmt.Fprintf(os.Stderr, "error: %v\n", err) os.Exit(1) } } func run(ctx context.Context, binpath string, deadline, timeout time.Duration, re string) error { ctx, cancel := context.WithDeadline(context.Background(), time.Now().Add(deadline)) defer cancel() tests, err := cliTests() if err != nil { return err } var nameRE *regexp.Regexp if re != "" { nameRE, err = regexp.Compile(re) if err != nil { return err } } for _, test := range tests { if nameRE != nil && !nameRE.MatchString(test.name) { log.Printf(">>> SKIPPING: %s", test.name) continue } log.Printf(">>> RUNNING: %s", test.name) if err := test.do(ctx, binpath, timeout); err != nil { return fmt.Errorf("test %s: %v", test.name, err) } log.Printf(">>> COMPLETED: %s", test.name) } return nil } type Test struct { name string script string args []string env []string } func cliTests() ([]Test, error) { env := append(os.Environ(), "TERM=xterm-256color") return []Test{ { "complex/postgres", "./contrib/postgres/test.sql", []string{"pgsql://postgres:P4ssw0rd@localhost", "--set=PAGER=''", "--pset=pager=off"}, env, }, { "complex/mysql", "./contrib/mysql/test.sql", []string{"my://root:P4ssw0rd@localhost", "--set=PAGER=''", "--pset=pager=off"}, env, }, { "complex/sqlite3", "./contrib/sqlite3/test.sql", []string{"sqlite:./testdata/sqlite3_test.db", "--set=PAGER=''", "--pset=pager=off"}, env, }, { "complex/moderncsqlite", "./contrib/sqlite3/test.sql", []string{"mq:./testdata/moderncsqlite_test.db", "--set=PAGER=''", "--pset=pager=off"}, env, }, { "complex/sqlserver", "./contrib/sqlserver/test.sql", []string{"sqlserver://sa:Adm1nP@ssw0rd@localhost/", "--set=PAGER=''", "--pset=pager=off"}, env, }, { "complex/cassandra", "./contrib/cassandra/test.sql", []string{"ca://cassandra:cassandra@localhost", "--set=PAGER=''", "--pset=pager=off"}, env, }, { "copy/a_bit_of_everything", "./testdata/copy.sql", []string{"--set=PAGER=''", "--pset=pager=off"}, env, }, }, nil } func (test Test) do(ctx context.Context, binpath string, timeout time.Duration) error { exp, errch, err := gexpect.SpawnWithArgs( append([]string{binpath}, test.args...), timeout, gexpect.SetEnv(test.env), gexpect.Tee(&noopWriteCloser{os.Stdout}), ) if err != nil { return err } buf, err := os.ReadFile(test.script) if err != nil { return err } for _, line := range bytes.Split(buf, []byte{'\n'}) { if err := exp.Send(string(line) + "\n"); err != nil { return err } } select { case <-ctx.Done(): defer exp.Close() return ctx.Err() case err := <-errch: defer exp.Close() return err } } type noopWriteCloser struct { io.Writer } func (*noopWriteCloser) Close() error { return nil } usql-0.19.19/testdata/000077500000000000000000000000001476173253300145035ustar00rootroot00000000000000usql-0.19.19/testdata/booktest/000077500000000000000000000000001476173253300163355ustar00rootroot00000000000000usql-0.19.19/testdata/booktest/authors.csv000066400000000000000000000000551476173253300205370ustar00rootroot00000000000000author_id,name 1,Isaac Asimov 2,Stephen King usql-0.19.19/testdata/booktest/books.csv000066400000000000000000000000701476173253300201640ustar00rootroot00000000000000book_id,author_id,title 1,1,I Robot 2,2,Carrie 3,2,Cujo usql-0.19.19/testdata/copy.sql000066400000000000000000000040701476173253300161770ustar00rootroot00000000000000\set PGDB pg://postgres:P4ssw0rd@localhost \set MYDB my://root:P4ssw0rd@localhost \set SQDB sq:./testdata/copy_test.db \set MSDB ms://sa:Adm1nP@ssw0rd@localhost/ \connect :PGDB drop table if exists a_bit_of_everything; create table a_bit_of_everything ( a_id serial primary key, a_blob bytea, a_bool boolean, a_date timestamp with time zone, a_double double precision, a_int integer, a_text text ); insert into a_bit_of_everything (a_blob, a_bool, a_date, a_double, a_int, a_text) values (E'more\ntext'::bytea, true, now(), 32.0, 0, 'some text'), (E'other\ntext'::bytea, false, now()+interval '3 days', 64.0, 128, 'foobar') ; select * from a_bit_of_everything; \connect :MYDB drop database if exists testdb; create database testdb; use testdb; drop table if exists a_bit_of_everything; create table a_bit_of_everything ( a_id integer not null auto_increment primary key, a_blob blob, a_bool boolean, a_date datetime, a_double double, a_int integer, a_text text ); \copy :PGDB :MYDB/testdb 'select * from a_bit_of_everything' 'a_bit_of_everything(a_id, a_blob, a_bool, a_date, a_double, a_int, a_text)' \connect :MYDB/testdb select * from a_bit_of_everything; \! rm -f ./testdata/test3.db \connect :SQDB create table a_bit_of_everything ( a_id integer primary key autoincrement, a_blob blob, a_bool boolean, a_date datetime, a_double double precision, a_int integer, a_text text ); \copy :PGDB :SQDB 'select * from a_bit_of_everything' 'a_bit_of_everything(a_id, a_blob, a_bool, a_date, a_double, a_int, a_text)' \connect :SQDB select * from a_bit_of_everything; \connect :MSDB drop table if exists a_bit_of_everything; create table a_bit_of_everything ( -- a_id integer identity(1,1) primary key, -- doesn't work currently a_id integer, a_blob varbinary(max), a_bool bit, a_date datetime2, a_double double precision, a_int integer, a_text text ); \copy :PGDB :MSDB 'select * from a_bit_of_everything' 'a_bit_of_everything(a_id, a_blob, a_bool, a_date, a_double, a_int, a_text)' \connect :MSDB select * from a_bit_of_everything; \quit usql-0.19.19/testdata/duckdb.db000066400000000000000000000300001476173253300162370ustar00rootroot00000000000000g'MLhqDUCK@v0.9.1401c806usql-0.19.19/testdata/duckdb.db.wal000066400000000000000000000000001476173253300170160ustar00rootroot00000000000000usql-0.19.19/testdata/inc_test.sql000066400000000000000000000000661476173253300170360ustar00rootroot00000000000000select 'testdata/inc_test.sql'; \i sub/inc_test2.sql usql-0.19.19/testdata/inc_test_z.sql000066400000000000000000000000421476173253300173610ustar00rootroot00000000000000select 'testdata/inc_test_z.sql'; usql-0.19.19/testdata/numbers.sql000066400000000000000000000002531476173253300166770ustar00rootroot00000000000000select '1251258098.1555901285'::numeric; select '1251258098.1555901285'::float4; select '1251258098.1555901285'::float8; select '1251258098.1555901285'::double precision; usql-0.19.19/testdata/quotes.sql000066400000000000000000000021251476173253300165440ustar00rootroot00000000000000-- echo all \set ECHO all -- conditional variables display FALSE when name is not set \unset foo \echo :{?foo} -- conditional variables display TRUE when name is set \set foo 'bar' \echo :{?foo} -- single quoted strings will decode '' as ' and decode \n, \t, \b, \r, \f, \digits octals, \xdigits (standard escapes) \set foo 'bar''bar\r\n' -- single quoted variables escape ' but does not escape special characters \echo :'foo' -- double quoted variables do not escape ' or special characters \echo :"foo" -- single quoted strings decode any other standard escape (\) as literal \set foo 'bar\'''bar' \echo :foo \echo :'foo' -- single quoted variables escape \ with E'' style strings \set foo 'bar\\\'' \echo :foo \echo :'foo' \echo :"foo" -- backticks interpolate unquoted variables \set foo 'bar' \echo `echo :foo` -- backticks interpolate single quoted variables \echo `echo :'foo'` -- backticks do not interpolate double quoted variables \echo `echo :"foo"` -- backticks have error messages for single quoted variables containing \r or \n when using :'' syntax \set foo 'bar\r\n' \echo `echo :'foo'` usql-0.19.19/testdata/sub/000077500000000000000000000000001476173253300152745ustar00rootroot00000000000000usql-0.19.19/testdata/sub/inc_test2.sql000066400000000000000000000003271476173253300177110ustar00rootroot00000000000000select 'testdata/sub/inc_test2.sql'; select 'from testdata/sub/inc_test2.sql, doing: \i inc_test_z.sql'; \i inc_test_z.sql select 'from testdata/sub/inc_test2.sql, doing: \ir inc_test_z.sql'; \ir inc_test_z.sql usql-0.19.19/testdata/sub/inc_test_z.sql000066400000000000000000000000451476173253300201550ustar00rootroot00000000000000select 'testdata/sub/inc_test_z.sql' usql-0.19.19/text/000077500000000000000000000000001476173253300136565ustar00rootroot00000000000000usql-0.19.19/text/errors.go000066400000000000000000000121561476173253300155260ustar00rootroot00000000000000package text import ( "errors" ) var ( // ErrNotConnected is the not connected error. ErrNotConnected = errors.New("not connected") // ErrNoSuchFileOrDirectory is the no such file or directory error. ErrNoSuchFileOrDirectory = errors.New("no such file or directory") // ErrCannotIncludeDirectories is the cannot include directories error. ErrCannotIncludeDirectories = errors.New("cannot include directories") // ErrMissingDSN is the missing dsn error. ErrMissingDSN = errors.New("missing dsn") // ErrNoPreviousTransactionExists is the no previous transaction exists error. ErrNoPreviousTransactionExists = errors.New("no previous transaction exists") // ErrPreviousTransactionExists is the previous transaction exists error. ErrPreviousTransactionExists = errors.New("previous transaction exists") // ErrPasswordAttemptsExhausted is the exhausted password attempts error. ErrPasswordAttemptsExhausted = errors.New("password attempts exhausted") // ErrSingleTransactionCannotBeUsedWithInteractiveMode is the single transaction cannot be used with interactive mode error. ErrSingleTransactionCannotBeUsedWithInteractiveMode = errors.New("--single-transaction cannot be used with interactive mode") // ErrNoEditorDefined is the no editor defined error. ErrNoEditorDefined = errors.New("no editor defined") // ErrUnknownCommand is the unknown command error. ErrUnknownCommand = errors.New("unknown command") // ErrMissingRequiredArgument is the missing required argument error. ErrMissingRequiredArgument = errors.New("missing required argument") // ErrDriverNotAvailable is the driver not available error. ErrDriverNotAvailable = errors.New("driver not available") // ErrPasswordNotSupportedByDriver is the password not supported by driver error. ErrPasswordNotSupportedByDriver = errors.New(`\password not supported by driver`) // ErrUnterminatedQuotedString is the unterminated quoted string error. ErrUnterminatedQuotedString = errors.New("unterminated quoted string") // ErrNoShellAvailable is the no SHELL available error. ErrNoShellAvailable = errors.New("no SHELL available") // ErrNotInteractive is the not interactive error. ErrNotInteractive = errors.New("not interactive") // ErrInvalidType is the invalid type error. ErrInvalidType = errors.New("invalid -TYPE: TYPE must be password, string, int, uint, float, or bool") // ErrInvalidIdentifier is the invalid identifier error. ErrInvalidIdentifier = errors.New("invalid identifier") // ErrInvalidValue is the invalid value error. ErrInvalidValue = errors.New("invalid value") // ErrTooManyRows is the too many rows error. ErrTooManyRows = errors.New("too many rows") // ErrInvalidFormatType is the invalid format type error. ErrInvalidFormatType = errors.New(`\pset: allowed formats are unaligned, aligned, wrapped, html, asciidoc, latex, latex-longtable, troff-ms, json, csv`) // ErrInvalidFormatPagerType is the invalid format pager error. ErrInvalidFormatPagerType = errors.New(`\pset: allowed pager values are on, off, always`) // ErrInvalidFormatExpandedType is the invalid format expanded error. ErrInvalidFormatExpandedType = errors.New(`\pset: allowed expanded values are on, off, auto`) // ErrInvalidFormatLineStyle is the invalid format line style error. ErrInvalidFormatLineStyle = errors.New(`\pset: allowed line styles are ascii, old-ascii, unicode`) // ErrInvalidFormatBorderLineStyle is the invalid format border line style error. ErrInvalidFormatBorderLineStyle = errors.New(`\pset: allowed Unicode border line styles are single, double`) // ErrInvalidTimezoneLocation is the invalid timezone location error. ErrInvalidTimezoneLocation = errors.New(`\pset: invalid timezone location`) // ErrGraphicsNotSupported is the graphics not supported error. ErrGraphicsNotSupported = errors.New(`\chart: graphics not supported in terminal`) // ErrNoNumericColumns is the no numeric columns error. ErrNoNumericColumns = errors.New(`\chart: no numeric columns found`) // ErrInvalidQuotedString is the invalid quoted string error. ErrInvalidQuotedString = errors.New(`invalid quoted string`) // ErrInvalidFormatOption is the invalid format option error. ErrInvalidFormatOption = errors.New("invalid format option") // ErrInvalidWatchDuration is the invalid watch duration error. ErrInvalidWatchDuration = errors.New("invalid watch duration") // ErrUnableToNormalizeURL is the unable to normalize URL error. ErrUnableToNormalizeURL = errors.New("unable to normalize URL") // ErrInvalidIsolationLevel is the invalid isolation level error. ErrInvalidIsolationLevel = errors.New("invalid isolation level") // ErrNotSupported is the not supported error. ErrNotSupported = errors.New("not supported") // ErrWrongNumberOfArguments is the wrong number of arguments error. ErrWrongNumberOfArguments = errors.New("wrong number of arguments") // ErrUnknownFileType is the unknown file type error. ErrUnknownFileType = errors.New("unknown file type") // ErrNamedConnectionIsNotAURL is the named connection is not a url error. ErrNamedConnectionIsNotAURL = errors.New("named connection is not a url") // ErrInvalidConfig is the invalid config error. ErrInvalidConfig = errors.New("invalid config") ) usql-0.19.19/text/license.go000066400000000000000000000022661476173253300156350ustar00rootroot00000000000000package text // Code generated by gen.go. DO NOT EDIT. // License contains the license text for usql. const License = `The MIT License (MIT) Copyright (c) 2016-2024 Kenneth Shaw Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.` usql-0.19.19/text/logo.png000066400000000000000000000241221476173253300153250ustar00rootroot00000000000000PNG  IHDR~8y pHYsQYOtEXtSoftwarewww.inkscape.org< IDATx{xTչgVAz xmUsjնr*$ ڞBz=$Q=V5_KjVQk xAЂ2ADe޳'yzA2k.QU1Ƙ|1Kpc%8c1y1Ƙd cL^g1&/Y3a`1?>Ҵe{d$zߐ_q1{^G[f{\GKp)Jc1y1Ƙd cL^g1&/Y3,cK1%g1c[SG%]G~"_E' 9dKpՔ?>CGuK/:"\\𥀣5)g1^{orHk=*'Ur3,cKOQB$ |WFcLz.˸Qg1U.LQV}cL?O N,cL N}Jpj cgOp6Ei1&'8ŧԏ1Ƙ<~Sҏuc^+&_q}cL^?Z_zc1&/B'1 }cL^? oӛ>c1&"C/ͼK?cD NGXa7/ꕾm70Npr}cU% c1;5,}b`UUUBJ?>̕( eddn ї1tu:ӎ:aʆ?2XґV"Lղ+xँBY8Yƥ&X1&OUD^d̆°c"࢜+ J~"枔2nZ$ym& 0(W ['"h6E"`WP9B uu_D/FAwu6-76(AO|(<)頇ȁz(|{(eAf}v_ }1(@tKV|lɰ.5%8eߦjN͐{=9LH!\ m>mFq99v4M*ΐT/hY0P,QTOD垃o,xC#.aL*:ʾޖ)-zsd4Jbʋ*JeeM7uɛT~I(8E泳>W7l&+jޢnzk I5zq}"}4q(>E=ƅ-Yq'ݶ;睷%R^J:F(/"ߥY 0,hJ"i@%B/P^|;T3c2BE'ztnjvʔ}Q<G68GTQRn]..IӤʳUAIfzDa HT7VjIio)Sz يF:N`>` .K9% Zs{ ؑÖe7yNpC#RPnG;;I4.K'7=aBDD#9̈VVᷮg3xtմ]'yr]7+]xi͑O͚IG 9$irB37_WnS?VlD8#( ;l"~%8s}tUC{#kQ2 ֖!4xiŲ1xrFvKDzj+EMDjj%yK*iHPJ(OL?iSd!Ї'O+W$7 GdA|G `)=ewZO:$p(iRwڻV<&5iEz =dѼg_O?F&1ev`kܣ"JZ Ҕߙk84I$A[Mk"cWVl>JI&`*zg\I H:3|eռ. ,ϼq>.LS|mhCF0O3Iΐ̰xiY긏:+U(nwt]yy[u1x$ ?V\wԞr+d{GyK/YAo yeWe mr ^@|DҝDZ:M֔V~d1Yf|?>I,gu܎4߬Xɰ]g LR4Maɲ+lnRNp;ޛ^\YYN~R<[yHAہ}|Nm24NENy q:*/]/pn(7*9HB3ďJA6,Y|'ORn܏3PD1$G'1pxPs(Q%", hQh#n!|Y|ܭ<5V§ olVEE:7.~Ī pI…Gp} z*/[gڼX,rqL4eŧ)Mg! p,aia]{@Z8HgRD b\h;[3=}~lٯ0xY0 ClLmp07qL42 ZTŮ[=K)I~Zy6i,=,e)}`| *ݤSlۋMS->yh>P ՌN7gׯ+fY 6q[O9~G*)m-D,;.䶻Af}R\_}{ qB [c(c_,Yx;OOs:FCzR-5u~qfN;hvk|@<Es(Mz7{ĚDWS)HrI웵/SG<{\r>田؛ eEYozU+ *R_IܬMO6M<[)ĠhNLU|i\lҐQWgǝ&8i`~4e,YVXY4 IEu53d l?Hв?|,}BAou'FD9Bfk\Uq[7]OgOpQ޴eu)CBꂎ\/IO;7շCMƞ?|3/EY͝&8lb ·] ;%qYNRwo<SMWrш3fwҟ)ҬGGs.QTYɻ(sp8(< $*mmP'L(|[On "T_87esLX,=iJś=WVXf|ӯߤ?ݽ %XJu]e)lM. v* O z뗪~@z+O-sڴnN1V$8(t9>(݀,Ds"eK\]X Q*h畝 3٠FEKyH =6I8 ŷԼ/XG5On**Nn,+Rc7;{~)v-U LDX6%6zlјCUM`ۇ$̨$bs]͝&+,}℆%˟BN-&_,֧\7:eO}Bn${y썲lxtHzKp{_oޒ]uZ^.m%8YD_:r'6aO},>glp ȶf-xiENX^x;+OErNN=Q@dm:-~>8ȷބ{Z{l㺶uuLs=X`>@i2}#^Vye? Rz[jN%H7%&l?y%ȶdQ.gӓIķiJ>D|Uͮ]UT_3\DUṀMn,+t4oS>S .B\t:R}8%X2NpP~G0xBǴfMUuמa/QHƲ ٮx;5Ñzsvp~1{ӑmR?i;Yx7ܧ W/*X/|kҵЌ [ ⡸>-x.s;%"*p]y|rz[-Z%+f+ltTuO>^@y#>v'ܧ5 'A⺚E4 K_/VN=guյg".4,T敿y면((^%eprWCz[|3?tU(kʻwA\EpJYt N$nT<&5 ,?n+7 C*-_>{)Omgç1>Iʌ:^1){;0.1ݿv pp[ĩpieb'b 7A@Si"C%\>]x|h򾉈T LVJLk!/4FR0x~/kGNAF&nFXr@>R&ohNͷ[$zޤ5׻\TZQ_~z'F}Ž$HG:_p&.`cnr'uCiߛ+ C~~p}M͆훆p>M_*4^w}֯<ܚo5<%Hy.*^qVF'N>Axjjo5` I:<[ B8wnບ#Jǻ8s| nΓ6R"%Q .G\/fm`EFy2cӋS۪FҎ@c|r&u0RcV5eaUѐkyuY\v=v܈3@0)e0zܙ1#\% 1>1VӹSӛ "\}Puɓs潗!a`$ʳYjr޸3/օ+HWǰӁݾ 7m?|EGgXTӟωhb94rQen3ϫx#8 (\d!,_gJ,$%b*/cƎ6dm%+]-v(>{nc|7Uog $k*rl1!7W88])7z8zit9گ3 а0{rZO9d؄Xlxf>e (iDc ⵤ#/]ׁ3B sY%q=x?\·'L+Ol8%QNd?gaB+DGQtzB<@U'nkʮ; 808rݠf }g\u@iqt"ኜ:٦JIpqi}rTWj>LE|R),#w\[TT_3&^ZcIo r3AמT8639F$ ͽ$v{iIv~H]vW,-厢9MnEsjn*+/U\ܫT?lAQzN>~Ư C؇{ u7pz4{uE&!ZW8r=sj>㵖iMW5_J]XC\so/Kp!9i i|z&WP5_7wܹ-aQ|K5.8%ET l1BTT_{7"ֲ@,]GWϧ!9CUk'!Gsv_18gc .dEuտWPQn,k[vV{oz4ֈyE59MUUO2`kN)SF4g>9AЇ>EsjCYѬOS~6*r)NtYnuM{&/Y{N:6PR,f/d .G ^;O *huztQ vv#4Eu5WE (FmwF Wh7 NF~P<+pFQ}Mـ[on"Ī63xNRD_]Z~({V&[_1u;=+g-vmcsU?lg)|Ec”;6 DqT?3''L(ٷͯ1ӧgoLUz%*'!p$rD_oO Qh_>T٥Rⶈ8]킮wY y{oX)݂.2Q+P U\="GFMr-1hjIm'6m1&/Y3,cK1%Kpc%8c1y1Ƙd cL^g1&/Y3,cK1%Kpc%8c1y1Ƙd cL^g1&/Y3,cK1%Kpc%8c1y1Ƙd cL^g1&/Y3,cK1%Kpc%8c1y1Ƙ ;ci{tˆB1!1yHT511Ƙd cL^g1&/Y3,cK1%Kpc_T0IENDB`usql-0.19.19/text/text.go000066400000000000000000000142721476173253300151770ustar00rootroot00000000000000// Package text contains the text (and eventually translations) for the usql // application. package text import ( "bytes" _ "embed" "image" "image/png" "regexp" "strings" ) // Various usql text bits. var ( CommandName = `usql` CommandVersion = `0.0.0-dev` PassfileName = CommandName + `pass` ConfigName = "config" Banner = `the universal command-line interface for SQL databases` CommandHelpHint = `hint: try "` + CommandName + ` --help" for more information.` NotConnected = `(not connected)` HelpPrefix = `help` QuitPrefix = `quit` ExitPrefix = `exit` WelcomeDesc = `Type "` + HelpPrefix + `" for help.` QueryBufferEmpty = `Query buffer is empty.` QueryBufferReset = `Query buffer reset (cleared).` InvalidCommand = `Invalid command \%s. Try \? for help.` ExtraArgumentIgnored = `\%s: extra argument %q ignored` MissingRequiredArg = `\%s: missing required argument` Copyright = CommandName + ", " + Banner + ".\n\n" + License RowCount = `(%d rows)` AvailableDrivers = `Available Drivers:` ConnInfo = `Connected with driver %s (%s)` EnterPassword = `Enter password: ` EnterPreviousPassword = `Enter previous password: ` PasswordsDoNotMatch = `Passwords do not match, trying again ...` NewPassword = `Enter new password: ` ConfirmPassword = `Confirm password: ` PasswordChangeFailed = `\password for %q failed: %v` CouldNotSetVariable = `could not set variable %q` ChartParseFailed = `\chart: invalid argument for %q: %v` // PasswordChangeSucceeded = `\password succeeded for %q` HelpDesc string HelpDescShort = `Use \? for help or press control-C to clear the input buffer.` HelpBanner = `You are using ` + CommandName + ", " + Banner + `.` HelpCommandPrefix = `Type: ` HelpCommands = [][]string{ {`copyright`, `for distribution terms`}, //{`h`, `for help with SQL commands`}, {`?`, `for help with ` + CommandName + ` commands`}, {`g`, `or terminate with semicolon to execute query`}, {`q`, `to quit`}, } QuitDesc = `Use \q to quit.` UnknownFormatFieldName = `unknown option: %s` FormatFieldInvalid = `unrecognized value %q for "%s"` FormatFieldInvalidValue = `unrecognized value %q for "%s": %s expected` FormatFieldNameSetMap = map[string]string{ `border`: `Border style is %d.`, `columns`: `Target width is %d.`, `expanded`: `Expanded display is %s.`, `expanded_auto`: `Expanded display is used automatically.`, `fieldsep`: `Field separator is %q.`, `fieldsep_zero`: `Field separator is zero byte.`, `footer`: `Default footer is %s.`, `format`: `Output format is %s.`, `linestyle`: `Line style is %s.`, `locale`: `Locale is %q.`, `null`: `Null display is %q.`, `numericlocale`: `Locale-adjusted numeric output is %s.`, `pager`: `Pager usage is %s.`, `pager_min_lines`: `Pager won't be used for less than %d line(s).`, `recordsep`: `Field separator is %q.`, `recordsep_zero`: `Record separator is zero byte.`, `tableattr`: `Table attributes are %q.`, `time`: `Time display is %s.`, `title`: `Title is %q.`, `tuples_only`: `Tuples only is %s.`, `unicode_border_linestyle`: `Unicode border line style is %q.`, `unicode_column_linestyle`: `Unicode column line style is %q.`, `unicode_header_linestyle`: `Unicode header line style is %q.`, } FormatFieldNameUnsetMap = map[string]string{ `tableattr`: `Table attributes unset.`, `title`: `Title is unset.`, } TimingSet = `Timing is %s.` TimingDesc = `Time: %0.3f ms` InvalidValue = `invalid -%s value %q: %s` NotSupportedByDriver = `%s not supported by %s driver` RelationNotFound = `Did not find any relation named "%s".` InvalidOption = `invalid option %q` NotificationReceived = `Asynchronous notification %q %sreceived from server process with PID %d.` NotificationPayload = `with payload %q ` UnknownShortAlias = `(unk)` InvalidNamedConnection = `warning: named connection %q was not defined: %v` ChartsPathDoesNotExist = `warning: charts_path %q does not exist` ChartsPathIsNotADirectory = `warning: charts_path %q is not a directory` UsageTemplate = `Usage: {{.UseLine}} Arguments: DSN database url or connection name Flags: {{.LocalFlags.FlagUsages | trimTrailingWhitespaces}} ` ChartUsage = `\chart: create and display charts from SQL data usage: \chart [opts] available options: help title [title] chart title subtitle [subtitle] chart subtitle size NxN chart size (width x height) bg [color] chart background color type [bar|line] chart type prec [num] data decimal precision file [path] write chart to file (svg)` ) func init() { // setup help description cmds := make([]string, len(HelpCommands)) for i, h := range HelpCommands { cmds[i] = `\` + h[0] + " " + h[1] } HelpDesc = HelpBanner + "\n" + HelpCommandPrefix + strings.Join(cmds, "\n"+strings.Repeat(" ", len(HelpCommandPrefix))) } var spaceRE = regexp.MustCompile(`\s+`) // Command returns the command name without spaces. var Command = func() string { return spaceRE.ReplaceAllString(CommandName, "") } // CommandLower returns the lower case command name without spaces. var CommandLower = func() string { return strings.ToLower(Command()) } // CommandUpper returns the upper case command name without spaces. var CommandUpper = func() string { return strings.ToUpper(Command()) } // Short returns the command name and banner. var Short = func() string { return Command() + ", " + Banner } // Logo is the logo. var Logo image.Image // LogoPng is the embedded logo. // //go:embed logo.png var LogoPng []byte func init() { var err error if Logo, err = png.Decode(bytes.NewReader(LogoPng)); err != nil { panic(err) } } usql-0.19.19/update-deps.sh000077500000000000000000000006431476173253300154470ustar00rootroot00000000000000#!/bin/bash SRC=$(realpath $(cd -P "$(dirname "${BASH_SOURCE[0]}")" && pwd)) set -e pushd $SRC &> /dev/null (set -x; go get -u -v -x $@ $(go list -tags 'all test' -f '{{ join .Imports "\n" }}' ./internal/...) ) PKGS=$(go list -tags 'all test' -f '{{ join .Imports "\n" }}'|grep 'github.com/xo/usql'|grep -v drivers|grep -v internal) (set -x; go get -u -v -x $@ $PKGS ) (set -x; go mod tidy ) popd &> /dev/null