doxx-0.1.2/.cargo_vcs_info.json 0000644 00000000136 00000000001 0012016 0 ustar { "git": { "sha1": "6a7d22c1d1a894da3dac8c0e5be54ceee13508c8" }, "path_in_vcs": "" } doxx-0.1.2/.gitattributes 0000644 0000000 0000000 00000000404 10461020230 0013477 0 ustar 0000000 0000000 # Auto detect text files and perform LF normalization * text=auto # Ensure Rust files always use LF *.rs text eol=lf *.toml text eol=lf *.md text eol=lf *.yml text eol=lf *.yaml text eol=lf # Binary files *.docx binary *.png binary *.jpg binary *.jpeg binary doxx-0.1.2/.github/dependabot.yml.disabled 0000644 0000000 0000000 00000002411 10461020230 0016542 0 ustar 0000000 0000000 # GitHub Dependabot configuration for doxx - DISABLED # See: https://docs.github.com/en/code-security/dependabot # # This is disabled during early development to avoid PR spam. # Rename to dependabot.yml when project is stable and ready for automated dependency updates. version: 2 updates: # Rust dependencies - package-ecosystem: "cargo" directory: "/" schedule: interval: "weekly" day: "monday" time: "09:00" timezone: "UTC" open-pull-requests-limit: 3 reviewers: - "bgreenwell" assignees: - "bgreenwell" commit-message: prefix: "deps" include: "scope" labels: - "dependencies" - "rust" # Group ALL updates to reduce PR noise groups: rust-dependencies: patterns: - "*" update-types: - "minor" - "patch" - "major" # GitHub Actions - package-ecosystem: "github-actions" directory: "/" schedule: interval: "weekly" day: "monday" time: "09:00" timezone: "UTC" open-pull-requests-limit: 2 reviewers: - "bgreenwell" assignees: - "bgreenwell" commit-message: prefix: "ci" include: "scope" labels: - "dependencies" - "github-actions" doxx-0.1.2/.github/workflows/ci.yml 0000644 0000000 0000000 00000002522 10461020230 0015322 0 ustar 0000000 0000000 name: CI on: push: branches: [main] pull_request: branches: [main] env: CARGO_TERM_COLOR: always jobs: test: name: Build and Test runs-on: ${{ matrix.os }} strategy: matrix: os: [ubuntu-latest, windows-latest, macos-latest] steps: - name: Checkout code uses: actions/checkout@v5 - name: Install Rust uses: dtolnay/rust-toolchain@stable with: components: rustfmt, clippy - name: Install system dependencies (Linux) if: matrix.os == 'ubuntu-latest' run: | sudo apt-get update sudo apt-get install -y libxcb-shape0-dev libxcb-xfixes0-dev - name: Cache dependencies uses: Swatinem/rust-cache@v2 - name: Check formatting (Unix only) if: matrix.os != 'windows-latest' run: cargo fmt --all -- --check - name: Lint with Clippy run: cargo clippy --all-targets -- -D warnings - name: Run tests run: cargo test - name: Check build run: cargo build --release # Nix build - name: Install Nix if: matrix.os != 'windows-latest' uses: cachix/install-nix-action@v31 with: nix_path: nixpkgs=channel:nixos-unstable - name: Build with Nix if: matrix.os != 'windows-latest' run: nix build doxx-0.1.2/.github/workflows/docs.yml 0000644 0000000 0000000 00000003317 10461020230 0015662 0 ustar 0000000 0000000 name: Deploy Documentation on: push: branches: ["main"] workflow_dispatch: permissions: contents: read pages: write id-token: write concurrency: group: "pages" cancel-in-progress: false jobs: build: if: ${{ github.actor == 'bgreenwell' }} runs-on: ubuntu-latest steps: - name: Checkout uses: actions/checkout@v5 - name: Install Rust uses: dtolnay/rust-toolchain@stable - name: Cache dependencies uses: Swatinem/rust-cache@v2 - name: Build documentation run: cargo doc --no-deps - name: Create redirect page run: echo '' > target/doc/index.html - name: Setup Pages id: pages uses: actions/configure-pages@v5 continue-on-error: true - name: Upload artifact uses: actions/upload-pages-artifact@v3 with: path: "./target/doc" deploy: environment: name: github-pages url: ${{ steps.deployment.outputs.page_url }} runs-on: ubuntu-latest needs: build if: success() steps: - name: Deploy to GitHub Pages id: deployment uses: actions/deploy-pages@v4 continue-on-error: true - name: Pages deployment status run: | if [ "${{ steps.deployment.outcome }}" == "failure" ]; then echo "::warning::GitHub Pages deployment failed. Please enable Pages in repository settings:" echo "::warning::1. Go to Settings > Pages" echo "::warning::2. Set Source to 'GitHub Actions'" echo "::warning::3. Re-run this workflow" else echo "Documentation deployed successfully!" fi doxx-0.1.2/.github/workflows/release.yml 0000644 0000000 0000000 00000011647 10461020230 0016357 0 ustar 0000000 0000000 name: Create Release on: push: tags: - "v*.*.*" # This workflow runs when we push a tag like v0.2.1 workflow_dispatch: permissions: contents: write jobs: create-release: if: ${{ github.actor == 'bgreenwell' }} name: Create Release runs-on: ubuntu-latest steps: - name: Checkout code uses: actions/checkout@v5 - name: Create Release id: create_release uses: softprops/action-gh-release@v1 with: tag_name: ${{ github.ref_name }} name: Release ${{ github.ref_name }} body_path: CHANGELOG.md draft: true prerelease: false generate_release_notes: true build-and-upload: name: Build and Upload Binaries needs: create-release strategy: matrix: include: - target: x86_64-pc-windows-msvc os: windows-latest asset_name_suffix: "windows-x86_64.zip" - target: x86_64-unknown-linux-musl os: ubuntu-latest asset_name_suffix: "linux-x86_64.tar.gz" - target: x86_64-apple-darwin os: macos-latest asset_name_suffix: "macos-x86_64.tar.gz" - target: aarch64-apple-darwin os: macos-latest asset_name_suffix: "macos-arm64.tar.gz" runs-on: ${{ matrix.os }} steps: - name: Checkout code uses: actions/checkout@v5 - name: Install Rust uses: dtolnay/rust-toolchain@stable with: targets: ${{ matrix.target }} - name: Install system dependencies (Linux) if: matrix.os == 'ubuntu-latest' run: | sudo apt-get update sudo apt-get install -y libxcb-shape0-dev libxcb-xfixes0-dev musl-tools - name: Cache dependencies uses: Swatinem/rust-cache@v2 with: key: ${{ matrix.target }} - name: Build release binary run: cargo build --release --target ${{ matrix.target }} - name: Create release archive (Windows) if: matrix.os == 'windows-latest' run: | $binary_path = "target/${{ matrix.target }}/release/doxx.exe" $archive_path = "doxx-${{ matrix.asset_name_suffix }}" Compress-Archive -Path $binary_path -DestinationPath $archive_path echo "ASSET_PATH=$archive_path" >> $env:GITHUB_ENV - name: Create release archive (Unix) if: matrix.os != 'windows-latest' run: | binary_path="target/${{ matrix.target }}/release/doxx" archive_path="doxx-${{ matrix.asset_name_suffix }}" tar -czf "$archive_path" -C "$(dirname "$binary_path")" "$(basename "$binary_path")" echo "ASSET_PATH=$archive_path" >> $GITHUB_ENV - name: Upload Release Asset uses: softprops/action-gh-release@v1 with: tag_name: ${{ github.ref_name }} files: ${{ env.ASSET_PATH }} generate-checksums: name: Generate Checksums needs: [create-release, build-and-upload] runs-on: ubuntu-latest steps: - name: Checkout code uses: actions/checkout@v5 - name: Download release assets uses: actions/github-script@v7 with: script: | const fs = require('fs'); const { owner, repo } = context.repo; const tag = context.ref.replace('refs/tags/', ''); // Get release by tag const release = await github.rest.repos.getReleaseByTag({ owner, repo, tag }); // Download each asset for (const asset of release.data.assets) { const response = await github.rest.repos.getReleaseAsset({ owner, repo, asset_id: asset.id, headers: { Accept: 'application/octet-stream' } }); fs.writeFileSync(asset.name, Buffer.from(response.data)); console.log(`Downloaded ${asset.name}`); } - name: Generate checksums run: | sha256sum doxx-*.* > doxx-checksums.txt cat doxx-checksums.txt - name: Upload checksums uses: softprops/action-gh-release@v1 with: tag_name: ${{ github.ref_name }} files: doxx-checksums.txt publish-cargo: name: Publish to crates.io runs-on: ubuntu-latest if: github.event.release.prerelease == false needs: [create-release] steps: - name: Checkout code uses: actions/checkout@v5 - name: Install Rust uses: dtolnay/rust-toolchain@stable - name: Install system dependencies (Linux) run: | sudo apt-get update sudo apt-get install -y libxcb-shape0-dev libxcb-xfixes0-dev - name: Cache dependencies uses: Swatinem/rust-cache@v2 - name: Publish to crates.io run: cargo publish --token ${{ secrets.CARGO_REGISTRY_TOKEN }} doxx-0.1.2/.gitignore 0000644 0000000 0000000 00000001476 10461020230 0012606 0 ustar 0000000 0000000 # Rust build artifacts /target *.pdb # IDE and editor files .idea/ .vscode/ *.swp *.swo *~ # Coverage reports lcov.info coverage/ *.profraw # Logs and temporary files *.log *.tmp *.temp # Environment variables .env .env.local # Generated documentation /doc # Project management and development notes CLAUDE.md # macOS .DS_Store .AppleDouble .LSOverride # Icon must end with two \r Icon # Thumbnails ._* # Files that might appear in the root of a volume .DocumentRevisions-V100 .fseventsd .Spotlight-V100 .TemporaryItems .Trashes .VolumeIcon.icns .com.apple.timemachine.donotpresent # Directories potentially created on remote AFP share .AppleDB .AppleDesktop Network Trash Folder Temporary Items .apdisk # Miscellaneous *_files/ *.html # Nix build artifacts result/ # Sensitive test files tests/fixtures/sample.docx doxx-0.1.2/.pre-commit-config.yaml 0000644 0000000 0000000 00000001702 10461020230 0015067 0 ustar 0000000 0000000 # Pre-commit hooks for doxx # Install with: pre-commit install # Run manually: pre-commit run --all-files repos: - repo: https://github.com/pre-commit/pre-commit-hooks rev: v4.5.0 hooks: - id: trailing-whitespace - id: end-of-file-fixer - id: check-yaml - id: check-toml - id: check-merge-conflict - id: check-added-large-files args: ['--maxkb=1000'] - repo: local hooks: - id: cargo-fmt name: Cargo format entry: cargo fmt --all -- language: system types: [rust] pass_filenames: false - id: cargo-clippy name: Cargo clippy entry: cargo clippy --all-targets --all-features -- -D warnings language: system types: [rust] pass_filenames: false - id: cargo-test name: Cargo test entry: cargo test --all-features language: system types: [rust] pass_filenames: false doxx-0.1.2/CHANGELOG.md 0000644 0000000 0000000 00000014566 10461020230 0012433 0 ustar 0000000 0000000 # Changelog All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). ## [Unreleased] ### Added - **Inline Equation Support**: Complete inline equation rendering within paragraph text - Inline equations now appear at correct positions within text (e.g., "text $A=\pi r^{2}$ more text") - Display equations remain as separate elements for proper mathematical presentation - Automatic detection of inline vs display equations based on OMML structure - LaTeX formatting with `$...$` delimiters for inline equations - Preserves exact ordering of text and equations within paragraphs - **ANSI Export Format**: Rich terminal output with colors and formatting ([#45](https://github.com/bgreenwell/doxx/issues/45)) - `--export ansi` option for ANSI-colored terminal output - `--terminal-width`/`-w` option for setting terminal width (default: $COLUMNS or 80) - `--color-depth {auto,1,4,8,24}` option for controlling color rendering depth - Perfect integration with terminal tools like `less -R`, fzf-tab, yazi, and ranger - Support for all formatting: bold, italic, underline, strikethrough, colors - **Strikethrough Support**: Complete strikethrough text formatting with `~~text~~` syntax in all export formats ([#47](https://github.com/bgreenwell/doxx/issues/47)) - Search state toggle functionality - press `S` to hide/show search results ([#50](https://github.com/bgreenwell/doxx/pull/50)) by [@Jianchi-Chen](https://github.com/Jianchi-Chen) ### Fixed - **Integration Tests for Packaging**: Fixed integration tests to use `CARGO_BIN_EXE` instead of hardcoded `cargo run` ([#60](https://github.com/bgreenwell/doxx/issues/60)) - Tests now work in Debian packaging environments - Tests work with system-wide installed binaries - Faster test execution without recompilation - Follows Rust integration testing best practices - **Text Formatting Preservation**: Fixed critical bug where text formatting (bold, italic, colors) was lost during inline equation processing - **Word Automatic List Formatting**: Fixed formatting being lost in Word automatic numbered lists (affects strikethrough, bold, italic, colors) - Empty search queries no longer match entire document, preventing performance issues ([#50](https://github.com/bgreenwell/doxx/pull/50)) by [@Jianchi-Chen](https://github.com/Jianchi-Chen) ### Changed - **Dependency Upgrade**: Updated `ratatui-image` from v1.0 to v8.0 for improved Debian packaging compatibility ([#59](https://github.com/bgreenwell/doxx/issues/59)) - Addresses Debian package compilation issues - Updated API calls to match v8.0 interface (Picker initialization methods) - All image display functionality remains unchanged - Help text updated to document new search state toggle functionality ## [0.1.2] - 2025-01-20 ### Fixed - **File Type Validation**: Added proper validation to reject non-.docx files with helpful error messages ([#40](https://github.com/bgreenwell/doxx/issues/40), [#56](https://github.com/bgreenwell/doxx/issues/56)) - Checks file extension is `.docx` before attempting to parse - Validates ZIP structure contains `word/document.xml` - Detects Excel files (`.xlsx`) specifically with clear error message: "This appears to be an Excel file" - Prevents hangs and crashes from invalid file types (Excel, ZIP archives, old Word `.doc` format) - Improves user experience with actionable error messages - **Equation Positioning (Partial Fix)**: Improved display equation positioning in document flow ([#58](https://github.com/bgreenwell/doxx/issues/58)) - Display equations now appear inline at their correct paragraph positions instead of all at document end - Added paragraph index tracking to equation extraction pipeline - Implemented `merge_display_equations()` function for intelligent equation placement - Successfully tested with user-provided equation documents - **Known Limitation**: docx-rs library doesn't parse equation-only paragraphs, so positioning may not be pixel-perfect in all cases - Full fix with complete XML parsing planned for v0.2.0 ### Changed - Improved error messages for invalid file formats with specific guidance - Enhanced equation extraction to track paragraph positions for better document structure ### Documentation - Addressed VirusTotal false positive detections with comprehensive explanation ([#46](https://github.com/bgreenwell/doxx/issues/46)) ### Notes - This release focuses on stability and critical bug fixes - Terminal width text wrapping deferred to v0.2.0 ([#45](https://github.com/bgreenwell/doxx/issues/45) - requires full text wrapping feature implementation) - All 47 tests passing across unit, integration, and specialized test suites ## [0.1.1] - 2024-XX-XX ### Added - **Comprehensive release pipeline** with automated package manager support - Cross-platform binary builds (Linux, macOS Intel/ARM, Windows) - Automated crates.io publishing on release - Homebrew formula with automatic updates - SHA256 checksums for security verification - **Enhanced installation options** in README with package manager instructions - **Release automation script** (`scripts/release.sh`) for easy version management - **Comprehensive release documentation** (RELEASE.md) ### Changed - **Updated README** to use sentence case consistently throughout - **Improved TUI image placeholder messages** to be clearer about functionality - **Enhanced Markdown export** to use actual image paths instead of placeholder text - **Modernized GitHub Actions** workflows for better reliability ### Fixed - **Platform-specific image picker initialization** on Windows (clippy compatibility) - **CSV export documentation** now clearly explains table-only extraction purpose ### Documentation - **Added detailed command line options reference** with examples and use cases - **Enhanced installation section** with multiple package manager options - **Clarified CSV export purpose** for structured data extraction workflows ## [0.1.0] - Initial Release ### Added - Basic `.docx` document parsing and viewing - Terminal UI with navigation, search, and outline views - Export functionality (text, markdown, JSON, CSV) - Table parsing and rendering with enhanced formatting - Document metadata extraction - Search functionality with highlighting - Comprehensive test suite with sample documents doxx-0.1.2/Cargo.lock 0000644 00000220756 00000000001 0010005 0 ustar # This file is automatically @generated by Cargo. # It is not intended for manual editing. version = 4 [[package]] name = "addr2line" version = "0.24.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dfbe277e56a376000877090da837660b4427aad530e3028d44e0bffe4f89a1c1" dependencies = [ "gimli", ] [[package]] name = "adler2" version = "2.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "320119579fcad9c21884f5c4861d16174d0e06250625266f50fe6898340abefa" [[package]] name = "aes" version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b169f7a6d4742236a0a00c541b845991d0ac43e546831af1249753ab4c3aa3a0" dependencies = [ "cfg-if", "cipher", "cpufeatures", ] [[package]] name = "aho-corasick" version = "1.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916" dependencies = [ "memchr", ] [[package]] name = "aligned-vec" version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dc890384c8602f339876ded803c97ad529f3842aba97f6392b3dba0dd171769b" dependencies = [ "equator", ] [[package]] name = "allocator-api2" version = "0.2.21" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923" [[package]] name = "ansi_colours" version = "1.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "14eec43e0298190790f41679fe69ef7a829d2a2ddd78c8c00339e84710e435fe" dependencies = [ "rgb", ] [[package]] name = "anstream" version = "0.6.20" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3ae563653d1938f79b1ab1b5e668c87c76a9930414574a6583a7b7e11a8e6192" dependencies = [ "anstyle", "anstyle-parse", "anstyle-query", "anstyle-wincon", "colorchoice", "is_terminal_polyfill", "utf8parse", ] [[package]] name = "anstyle" version = "1.0.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "862ed96ca487e809f1c8e5a8447f6ee2cf102f846893800b20cebdf541fc6bbd" [[package]] name = "anstyle-parse" version = "0.2.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4e7644824f0aa2c7b9384579234ef10eb7efb6a0deb83f9630a49594dd9c15c2" dependencies = [ "utf8parse", ] [[package]] name = "anstyle-query" version = "1.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9e231f6134f61b71076a3eab506c379d4f36122f2af15a9ff04415ea4c3339e2" dependencies = [ "windows-sys 0.60.2", ] [[package]] name = "anstyle-wincon" version = "3.0.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3e0633414522a32ffaac8ac6cc8f748e090c5717661fddeea04219e2344f5f2a" dependencies = [ "anstyle", "once_cell_polyfill", "windows-sys 0.60.2", ] [[package]] name = "anyhow" version = "1.0.99" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b0674a1ddeecb70197781e945de4b3b8ffb61fa939a5597bcf48503737663100" [[package]] name = "arbitrary" version = "1.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c3d036a3c4ab069c7b410a2ce876bd74808d2d0888a82667669f8e783a898bf1" dependencies = [ "derive_arbitrary", ] [[package]] name = "arboard" version = "3.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "55f533f8e0af236ffe5eb979b99381df3258853f00ba2e44b6e1955292c75227" dependencies = [ "clipboard-win", "image 0.25.6", "log", "objc2", "objc2-app-kit", "objc2-core-foundation", "objc2-core-graphics", "objc2-foundation", "parking_lot", "percent-encoding", "windows-sys 0.59.0", "x11rb", ] [[package]] name = "arg_enum_proc_macro" version = "0.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0ae92a5119aa49cdbcf6b9f893fe4e1d98b04ccbf82ee0584ad948a44a734dea" dependencies = [ "proc-macro2", "quote", "syn", ] [[package]] name = "arrayvec" version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50" [[package]] name = "autocfg" version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8" [[package]] name = "av1-grain" version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4f3efb2ca85bc610acfa917b5aaa36f3fcbebed5b3182d7f877b02531c4b80c8" dependencies = [ "anyhow", "arrayvec", "log", "nom", "num-rational", "v_frame", ] [[package]] name = "avif-serialize" version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "47c8fbc0f831f4519fe8b810b6a7a91410ec83031b8233f730a0480029f6a23f" dependencies = [ "arrayvec", ] [[package]] name = "backtrace" version = "0.3.75" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6806a6321ec58106fea15becdad98371e28d92ccbc7c8f1b3b6dd724fe8f1002" dependencies = [ "addr2line", "cfg-if", "libc", "miniz_oxide", "object", "rustc-demangle", "windows-targets 0.52.6", ] [[package]] name = "base64" version = "0.21.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567" [[package]] name = "base64" version = "0.22.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" [[package]] name = "base64-simd" version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "339abbe78e73178762e23bea9dfd08e697eb3f3301cd4be981c0f78ba5859195" dependencies = [ "outref", "vsimd", ] [[package]] name = "bit_field" version = "0.10.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dc827186963e592360843fb5ba4b973e145841266c1357f7180c43526f2e5b61" [[package]] name = "bitflags" version = "1.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" [[package]] name = "bitflags" version = "2.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1b8e56985ec62d17e9c1001dc89c88ecd7dc08e47eba5ec7c29c7b5eeecde967" [[package]] name = "bitstream-io" version = "2.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6099cdc01846bc367c4e7dd630dc5966dccf36b652fae7a74e17b640411a91b2" [[package]] name = "block-buffer" version = "0.10.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71" dependencies = [ "generic-array", ] [[package]] name = "built" version = "0.7.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "56ed6191a7e78c36abdb16ab65341eefd73d64d303fffccdbb00d51e4205967b" [[package]] name = "bumpalo" version = "3.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "46c5e41b57b8bba42a04676d81cb89e9ee8e859a1a66f80a5a72e1cb76b34d43" [[package]] name = "bytemuck" version = "1.23.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3995eaeebcdf32f91f980d360f78732ddc061097ab4e39991ae7a6ace9194677" [[package]] name = "byteorder" version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" [[package]] name = "byteorder-lite" version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8f1fe948ff07f4bd06c30984e69f5b4899c516a3ef74f34df92a2df2ab535495" [[package]] name = "bzip2" version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "49ecfb22d906f800d4fe833b6282cf4dc1c298f5057ca0b5445e5c209735ca47" dependencies = [ "bzip2-sys", ] [[package]] name = "bzip2-sys" version = "0.1.13+1.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "225bff33b2141874fe80d71e07d6eec4f85c5c216453dd96388240f96e1acc14" dependencies = [ "cc", "pkg-config", ] [[package]] name = "cassowary" version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "df8670b8c7b9dae1793364eafadf7239c40d669904660c5960d74cfd80b46a53" [[package]] name = "castaway" version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dec551ab6e7578819132c713a93c022a05d60159dc86e7a7050223577484c55a" dependencies = [ "rustversion", ] [[package]] name = "cc" version = "1.2.33" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3ee0f8803222ba5a7e2777dd72ca451868909b1ac410621b676adf07280e9b5f" dependencies = [ "jobserver", "libc", "shlex", ] [[package]] name = "cfg-expr" version = "0.15.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d067ad48b8650848b989a59a86c6c36a995d02d2bf778d45c3c5d57bc2718f02" dependencies = [ "smallvec", "target-lexicon", ] [[package]] name = "cfg-if" version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9555578bc9e57714c812a1f84e4fc5b4d21fcb063490c624de019f7464c91268" [[package]] name = "cipher" version = "0.4.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "773f3b9af64447d2ce9850330c473515014aa235e6a783b02db81ff39e4a3dad" dependencies = [ "crypto-common", "inout", ] [[package]] name = "clap" version = "4.5.45" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1fc0e74a703892159f5ae7d3aac52c8e6c392f5ae5f359c70b5881d60aaac318" dependencies = [ "clap_builder", "clap_derive", ] [[package]] name = "clap_builder" version = "4.5.44" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b3e7f4214277f3c7aa526a59dd3fbe306a370daee1f8b7b8c987069cd8e888a8" dependencies = [ "anstream", "anstyle", "clap_lex", "strsim", ] [[package]] name = "clap_derive" version = "4.5.45" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "14cb31bb0a7d536caef2639baa7fad459e15c3144efefa6dbd1c84562c4739f6" dependencies = [ "heck", "proc-macro2", "quote", "syn", ] [[package]] name = "clap_lex" version = "0.7.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b94f61472cee1439c0b966b47e3aca9ae07e45d070759512cd390ea2bebc6675" [[package]] name = "clipboard-win" version = "5.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bde03770d3df201d4fb868f2c9c59e66a3e4e2bd06692a0fe701e7103c7e84d4" dependencies = [ "error-code", ] [[package]] name = "color_quant" version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3d7b894f5411737b7867f4827955924d7c254fc9f4d91a6aad6b097804b1018b" [[package]] name = "colorchoice" version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b05b61dc5112cbb17e4b6cd61790d9845d13888356391624cbe7e41efeac1e75" [[package]] name = "compact_str" version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3b79c4069c6cad78e2e0cdfcbd26275770669fb39fd308a752dc110e83b9af32" dependencies = [ "castaway", "cfg-if", "itoa", "rustversion", "ryu", "static_assertions", ] [[package]] name = "console" version = "0.15.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "054ccb5b10f9f2cbf51eb355ca1d05c2d279ce1804688d0db74b4733a5aeafd8" dependencies = [ "encode_unicode", "libc", "once_cell", "windows-sys 0.59.0", ] [[package]] name = "constant_time_eq" version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7c74b8349d32d297c9134b8c88677813a227df8f779daa29bfc29c183fe3dca6" [[package]] name = "cpufeatures" version = "0.2.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "59ed5838eebb26a2bb2e58f6d5b5316989ae9d08bab10e0e6d103e656d1b0280" dependencies = [ "libc", ] [[package]] name = "crc" version = "3.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9710d3b3739c2e349eb44fe848ad0b7c8cb1e42bd87ee49371df2f7acaf3e675" dependencies = [ "crc-catalog", ] [[package]] name = "crc-catalog" version = "2.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "19d374276b40fb8bbdee95aef7c7fa6b5316ec764510eb64b8dd0e2ed0d7e7f5" [[package]] name = "crc32fast" version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9481c1c90cbf2ac953f07c8d4a58aa3945c425b7185c9154d67a65e4230da511" dependencies = [ "cfg-if", ] [[package]] name = "crossbeam-deque" version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9dd111b7b7f7d55b72c0a6ae361660ee5853c9af73f70c3c2ef6858b950e2e51" dependencies = [ "crossbeam-epoch", "crossbeam-utils", ] [[package]] name = "crossbeam-epoch" version = "0.9.18" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5b82ac4a3c2ca9c3460964f020e1402edd5753411d7737aa39c3714ad1b5420e" dependencies = [ "crossbeam-utils", ] [[package]] name = "crossbeam-utils" version = "0.8.21" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28" [[package]] name = "crossterm" version = "0.27.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f476fe445d41c9e991fd07515a6f463074b782242ccf4a5b7b1d1012e70824df" dependencies = [ "bitflags 2.9.1", "crossterm_winapi", "libc", "mio 0.8.11", "parking_lot", "signal-hook", "signal-hook-mio", "winapi", ] [[package]] name = "crossterm" version = "0.28.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "829d955a0bb380ef178a640b91779e3987da38c9aea133b20614cfed8cdea9c6" dependencies = [ "bitflags 2.9.1", "crossterm_winapi", "mio 1.0.4", "parking_lot", "rustix 0.38.44", "signal-hook", "signal-hook-mio", "winapi", ] [[package]] name = "crossterm_winapi" version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "acdd7c62a3665c7f6830a51635d9ac9b23ed385797f70a83bb8bafe9c572ab2b" dependencies = [ "winapi", ] [[package]] name = "crunchy" version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "460fbee9c2c2f33933d720630a6a0bac33ba7053db5344fac858d4b8952d77d5" [[package]] name = "crypto-common" version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" dependencies = [ "generic-array", "typenum", ] [[package]] name = "darling" version = "0.20.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fc7f46116c46ff9ab3eb1597a45688b6715c6e628b5c133e288e709a29bcb4ee" dependencies = [ "darling_core", "darling_macro", ] [[package]] name = "darling_core" version = "0.20.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0d00b9596d185e565c2207a0b01f8bd1a135483d02d9b7b0a54b11da8d53412e" dependencies = [ "fnv", "ident_case", "proc-macro2", "quote", "strsim", "syn", ] [[package]] name = "darling_macro" version = "0.20.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fc34b93ccb385b40dc71c6fceac4b2ad23662c7eeb248cf10d529b7e055b6ead" dependencies = [ "darling_core", "quote", "syn", ] [[package]] name = "deflate64" version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "da692b8d1080ea3045efaab14434d40468c3d8657e42abddfffca87b428f4c1b" [[package]] name = "deranged" version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9c9e6a11ca8224451684bc0d7d5a7adbf8f2fd6887261a1cfc3c0432f9d4068e" dependencies = [ "powerfmt", ] [[package]] name = "derive_arbitrary" version = "1.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1e567bd82dcff979e4b03460c307b3cdc9e96fde3d73bed1496d2bc75d9dd62a" dependencies = [ "proc-macro2", "quote", "syn", ] [[package]] name = "digest" version = "0.10.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" dependencies = [ "block-buffer", "crypto-common", "subtle", ] [[package]] name = "dirs" version = "5.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "44c45a9d03d6676652bcb5e724c7e988de1acad23a711b5217ab9cbecbec2225" dependencies = [ "dirs-sys", ] [[package]] name = "dirs-sys" version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "520f05a5cbd335fae5a99ff7a6ab8627577660ee5cfd6a94a6a929b52ff0321c" dependencies = [ "libc", "option-ext", "redox_users", "windows-sys 0.48.0", ] [[package]] name = "dispatch2" version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "89a09f22a6c6069a18470eb92d2298acf25463f14256d24778e1230d789a2aec" dependencies = [ "bitflags 2.9.1", "objc2", ] [[package]] name = "displaydoc" version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" dependencies = [ "proc-macro2", "quote", "syn", ] [[package]] name = "docx-rs" version = "0.4.18" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3f21be13b97bd2924f30323d674f5a8db382964972825abd93f30d08f21dad98" dependencies = [ "base64 0.22.1", "image 0.24.9", "serde", "serde_json", "thiserror 1.0.69", "xml-rs", "zip 0.6.6", ] [[package]] name = "doxx" version = "0.1.2" dependencies = [ "anyhow", "arboard", "clap", "crossterm 0.27.0", "dirs", "docx-rs", "image 0.25.6", "once_cell", "quick-xml", "ratatui", "ratatui-image", "regex", "serde", "serde_json", "thiserror 1.0.69", "tokio", "toml", "unicode-segmentation", "viuer", "zip 2.4.2", ] [[package]] name = "either" version = "1.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719" [[package]] name = "encode_unicode" version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "34aa73646ffb006b8f5147f3dc182bd4bcb190227ce861fc4a4844bf8e3cb2c0" [[package]] name = "equator" version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4711b213838dfee0117e3be6ac926007d7f433d7bbe33595975d4190cb07e6fc" dependencies = [ "equator-macro", ] [[package]] name = "equator-macro" version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "44f23cf4b44bfce11a86ace86f8a73ffdec849c9fd00a386a53d278bd9e81fb3" dependencies = [ "proc-macro2", "quote", "syn", ] [[package]] name = "equivalent" version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f" [[package]] name = "errno" version = "0.3.13" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "778e2ac28f6c47af28e4907f13ffd1e1ddbd400980a9abd7c8df189bf578a5ad" dependencies = [ "libc", "windows-sys 0.60.2", ] [[package]] name = "error-code" version = "3.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dea2df4cf52843e0452895c455a1a2cfbb842a1e7329671acf418fdc53ed4c59" [[package]] name = "exr" version = "1.73.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f83197f59927b46c04a183a619b7c29df34e63e63c7869320862268c0ef687e0" dependencies = [ "bit_field", "half", "lebe", "miniz_oxide", "rayon-core", "smallvec", "zune-inflate", ] [[package]] name = "fastrand" version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" [[package]] name = "fdeflate" version = "0.3.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1e6853b52649d4ac5c0bd02320cddc5ba956bdb407c4b75a2c6b75bf51500f8c" dependencies = [ "simd-adler32", ] [[package]] name = "flate2" version = "1.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4a3d7db9596fecd151c5f638c0ee5d5bd487b6e0ea232e5dc96d5250f6f94b1d" dependencies = [ "crc32fast", "miniz_oxide", ] [[package]] name = "fnv" version = "1.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" [[package]] name = "foldhash" version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2" [[package]] name = "generic-array" version = "0.14.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" dependencies = [ "typenum", "version_check", ] [[package]] name = "gethostname" version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0176e0459c2e4a1fe232f984bca6890e681076abb9934f6cea7c326f3fc47818" dependencies = [ "libc", "windows-targets 0.48.5", ] [[package]] name = "getrandom" version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "335ff9f135e4384c8150d6f27c6daed433577f86b4750418338c01a1a2528592" dependencies = [ "cfg-if", "libc", "wasi 0.11.1+wasi-snapshot-preview1", ] [[package]] name = "getrandom" version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "26145e563e54f2cadc477553f1ec5ee650b00862f0a58bcd12cbdc5f0ea2d2f4" dependencies = [ "cfg-if", "js-sys", "libc", "r-efi", "wasi 0.14.2+wasi-0.2.4", "wasm-bindgen", ] [[package]] name = "gif" version = "0.13.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4ae047235e33e2829703574b54fdec96bfbad892062d97fed2f76022287de61b" dependencies = [ "color_quant", "weezl", ] [[package]] name = "gimli" version = "0.31.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "07e28edb80900c19c28f1072f2e8aeca7fa06b23cd4169cefe1af5aa3260783f" [[package]] name = "half" version = "2.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "459196ed295495a68f7d7fe1d84f6c4b7ff0e21fe3017b2f283c6fac3ad803c9" dependencies = [ "cfg-if", "crunchy", ] [[package]] name = "hashbrown" version = "0.15.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9229cfe53dfd69f0609a49f65461bd93001ea1ef889cd5529dd176593f5338a1" dependencies = [ "allocator-api2", "equivalent", "foldhash", ] [[package]] name = "heck" version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" [[package]] name = "hmac" version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6c49c37c09c17a53d937dfbb742eb3a961d65a994e6bcdcf37e7399d0cc8ab5e" dependencies = [ "digest", ] [[package]] name = "icy_sixel" version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ccc0a9c4770bc47b0a933256a496cfb8b6531f753ea9bccb19c6dff0ff7273fc" [[package]] name = "ident_case" version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" [[package]] name = "image" version = "0.24.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5690139d2f55868e080017335e4b94cb7414274c74f1669c84fb5feba2c9f69d" dependencies = [ "bytemuck", "byteorder", "color_quant", "exr", "gif", "jpeg-decoder", "num-traits", "png", "qoi", "tiff", ] [[package]] name = "image" version = "0.25.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "db35664ce6b9810857a38a906215e75a9c879f0696556a39f59c62829710251a" dependencies = [ "bytemuck", "byteorder-lite", "color_quant", "exr", "gif", "image-webp", "num-traits", "png", "qoi", "ravif", "rayon", "rgb", "tiff", "zune-core", "zune-jpeg", ] [[package]] name = "image-webp" version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f6970fe7a5300b4b42e62c52efa0187540a5bef546c60edaf554ef595d2e6f0b" dependencies = [ "byteorder-lite", "quick-error", ] [[package]] name = "imgref" version = "1.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d0263a3d970d5c054ed9312c0057b4f3bde9c0b33836d3637361d4a9e6e7a408" [[package]] name = "indexmap" version = "2.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fe4cd85333e22411419a0bcae1297d25e58c9443848b11dc6a86fefe8c78a661" dependencies = [ "equivalent", "hashbrown", ] [[package]] name = "indoc" version = "2.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f4c7245a08504955605670dbf141fceab975f15ca21570696aebe9d2e71576bd" [[package]] name = "inout" version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "879f10e63c20629ecabbb64a8010319738c66a5cd0c29b02d63d272b03751d01" dependencies = [ "generic-array", ] [[package]] name = "instability" version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "435d80800b936787d62688c927b6490e887c7ef5ff9ce922c6c6050fca75eb9a" dependencies = [ "darling", "indoc", "proc-macro2", "quote", "syn", ] [[package]] name = "interpolate_name" version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c34819042dc3d3971c46c2190835914dfbe0c3c13f61449b2997f4e9722dfa60" dependencies = [ "proc-macro2", "quote", "syn", ] [[package]] name = "io-uring" version = "0.7.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d93587f37623a1a17d94ef2bc9ada592f5465fe7732084ab7beefabe5c77c0c4" dependencies = [ "bitflags 2.9.1", "cfg-if", "libc", ] [[package]] name = "is_terminal_polyfill" version = "1.70.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7943c866cc5cd64cbc25b2e01621d07fa8eb2a1a23160ee81ce38704e97b8ecf" [[package]] name = "itertools" version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ba291022dbbd398a455acf126c1e341954079855bc60dfdda641363bd6922569" dependencies = [ "either", ] [[package]] name = "itertools" version = "0.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "413ee7dfc52ee1a4949ceeb7dbc8a33f2d6c088194d9f922fb8318faf1f01186" dependencies = [ "either", ] [[package]] name = "itoa" version = "1.0.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c" [[package]] name = "jobserver" version = "0.1.33" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "38f262f097c174adebe41eb73d66ae9c06b2844fb0da69969647bbddd9b0538a" dependencies = [ "getrandom 0.3.3", "libc", ] [[package]] name = "jpeg-decoder" version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "00810f1d8b74be64b13dbf3db89ac67740615d6c891f0e7b6179326533011a07" dependencies = [ "rayon", ] [[package]] name = "js-sys" version = "0.3.77" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1cfaf33c695fc6e08064efbc1f72ec937429614f25eef83af942d0e227c3a28f" dependencies = [ "once_cell", "wasm-bindgen", ] [[package]] name = "lazy_static" version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" [[package]] name = "lebe" version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "03087c2bad5e1034e8cace5926dec053fb3790248370865f5117a7d0213354c8" [[package]] name = "libc" version = "0.2.175" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6a82ae493e598baaea5209805c49bbf2ea7de956d50d7da0da1164f9c6d28543" [[package]] name = "libfuzzer-sys" version = "0.4.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5037190e1f70cbeef565bd267599242926f724d3b8a9f510fd7e0b540cfa4404" dependencies = [ "arbitrary", "cc", ] [[package]] name = "libredox" version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "391290121bad3d37fbddad76d8f5d1c1c314cfc646d143d7e07a3086ddff0ce3" dependencies = [ "bitflags 2.9.1", "libc", ] [[package]] name = "linux-raw-sys" version = "0.4.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d26c52dbd32dccf2d10cac7725f8eae5296885fb5703b261f7d0a0739ec807ab" [[package]] name = "linux-raw-sys" version = "0.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cd945864f07fe9f5371a27ad7b52a172b4b499999f1d97574c9fa68373937e12" [[package]] name = "lock_api" version = "0.4.13" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "96936507f153605bddfcda068dd804796c84324ed2510809e5b2a624c81da765" dependencies = [ "autocfg", "scopeguard", ] [[package]] name = "log" version = "0.4.27" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "13dc2df351e3202783a1fe0d44375f7295ffb4049267b0f3018346dc122a1d94" [[package]] name = "loop9" version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0fae87c125b03c1d2c0150c90365d7d6bcc53fb73a9acaef207d2d065860f062" dependencies = [ "imgref", ] [[package]] name = "lru" version = "0.12.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "234cf4f4a04dc1f57e24b96cc0cd600cf2af460d4161ac5ecdd0af8e1f3b2a38" dependencies = [ "hashbrown", ] [[package]] name = "lzma-rs" version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "297e814c836ae64db86b36cf2a557ba54368d03f6afcd7d947c266692f71115e" dependencies = [ "byteorder", "crc", ] [[package]] name = "lzma-sys" version = "0.1.20" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5fda04ab3764e6cde78b9974eec4f779acaba7c4e84b36eca3cf77c581b85d27" dependencies = [ "cc", "libc", "pkg-config", ] [[package]] name = "maybe-rayon" version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8ea1f30cedd69f0a2954655f7188c6a834246d2bcf1e315e2ac40c4b24dc9519" dependencies = [ "cfg-if", "rayon", ] [[package]] name = "memchr" version = "2.7.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "32a282da65faaf38286cf3be983213fcf1d2e2a58700e808f83f4ea9a4804bc0" [[package]] name = "minimal-lexical" version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" [[package]] name = "miniz_oxide" version = "0.8.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1fa76a2c86f704bdb222d66965fb3d63269ce38518b83cb0575fca855ebb6316" dependencies = [ "adler2", "simd-adler32", ] [[package]] name = "mio" version = "0.8.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a4a650543ca06a924e8b371db273b2756685faae30f8487da1b56505a8f78b0c" dependencies = [ "libc", "log", "wasi 0.11.1+wasi-snapshot-preview1", "windows-sys 0.48.0", ] [[package]] name = "mio" version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "78bed444cc8a2160f01cbcf811ef18cac863ad68ae8ca62092e8db51d51c761c" dependencies = [ "libc", "log", "wasi 0.11.1+wasi-snapshot-preview1", "windows-sys 0.59.0", ] [[package]] name = "new_debug_unreachable" version = "1.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "650eef8c711430f1a879fdd01d4745a7deea475becfb90269c06775983bbf086" [[package]] name = "nom" version = "7.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a" dependencies = [ "memchr", "minimal-lexical", ] [[package]] name = "noop_proc_macro" version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0676bb32a98c1a483ce53e500a81ad9c3d5b3f7c920c28c24e9cb0980d0b5bc8" [[package]] name = "num-bigint" version = "0.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a5e44f723f1133c9deac646763579fdb3ac745e418f2a7af9cd0c431da1f20b9" dependencies = [ "num-integer", "num-traits", ] [[package]] name = "num-conv" version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9" [[package]] name = "num-derive" version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ed3955f1a9c7c0c15e092f9c887db08b1fc683305fdf6eb6684f22555355e202" dependencies = [ "proc-macro2", "quote", "syn", ] [[package]] name = "num-integer" version = "0.1.46" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7969661fd2958a5cb096e56c8e1ad0444ac2bbcd0061bd28660485a44879858f" dependencies = [ "num-traits", ] [[package]] name = "num-rational" version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f83d14da390562dca69fc84082e73e548e1ad308d24accdedd2720017cb37824" dependencies = [ "num-bigint", "num-integer", "num-traits", ] [[package]] name = "num-traits" version = "0.2.19" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841" dependencies = [ "autocfg", ] [[package]] name = "objc2" version = "0.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "561f357ba7f3a2a61563a186a163d0a3a5247e1089524a3981d49adb775078bc" dependencies = [ "objc2-encode", ] [[package]] name = "objc2-app-kit" version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e6f29f568bec459b0ddff777cec4fe3fd8666d82d5a40ebd0ff7e66134f89bcc" dependencies = [ "bitflags 2.9.1", "objc2", "objc2-core-graphics", "objc2-foundation", ] [[package]] name = "objc2-core-foundation" version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1c10c2894a6fed806ade6027bcd50662746363a9589d3ec9d9bef30a4e4bc166" dependencies = [ "bitflags 2.9.1", "dispatch2", "objc2", ] [[package]] name = "objc2-core-graphics" version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "989c6c68c13021b5c2d6b71456ebb0f9dc78d752e86a98da7c716f4f9470f5a4" dependencies = [ "bitflags 2.9.1", "dispatch2", "objc2", "objc2-core-foundation", "objc2-io-surface", ] [[package]] name = "objc2-encode" version = "4.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ef25abbcd74fb2609453eb695bd2f860d389e457f67dc17cafc8b8cbc89d0c33" [[package]] name = "objc2-foundation" version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "900831247d2fe1a09a683278e5384cfb8c80c79fe6b166f9d14bfdde0ea1b03c" dependencies = [ "bitflags 2.9.1", "objc2", "objc2-core-foundation", ] [[package]] name = "objc2-io-surface" version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7282e9ac92529fa3457ce90ebb15f4ecbc383e8338060960760fa2cf75420c3c" dependencies = [ "bitflags 2.9.1", "objc2", "objc2-core-foundation", ] [[package]] name = "object" version = "0.36.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "62948e14d923ea95ea2c7c86c71013138b66525b86bdc08d2dcc262bdb497b87" dependencies = [ "memchr", ] [[package]] name = "once_cell" version = "1.21.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d" [[package]] name = "once_cell_polyfill" version = "1.70.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a4895175b425cb1f87721b59f0f286c2092bd4af812243672510e1ac53e2e0ad" [[package]] name = "option-ext" version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "04744f49eae99ab78e0d5c0b603ab218f515ea8cfe5a456d7629ad883a3b6e7d" [[package]] name = "outref" version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1a80800c0488c3a21695ea981a54918fbb37abf04f4d0720c453632255e2ff0e" [[package]] name = "parking_lot" version = "0.12.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "70d58bf43669b5795d1576d0641cfb6fbb2057bf629506267a92807158584a13" dependencies = [ "lock_api", "parking_lot_core", ] [[package]] name = "parking_lot_core" version = "0.9.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bc838d2a56b5b1a6c25f55575dfc605fabb63bb2365f6c2353ef9159aa69e4a5" dependencies = [ "cfg-if", "libc", "redox_syscall", "smallvec", "windows-targets 0.52.6", ] [[package]] name = "paste" version = "1.0.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a" [[package]] name = "pbkdf2" version = "0.12.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f8ed6a7761f76e3b9f92dfb0a60a6a6477c61024b775147ff0973a02653abaf2" dependencies = [ "digest", "hmac", ] [[package]] name = "percent-encoding" version = "2.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" [[package]] name = "pin-project-lite" version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3b3cff922bd51709b605d9ead9aa71031d81447142d828eb4a6eba76fe619f9b" [[package]] name = "pkg-config" version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7edddbd0b52d732b21ad9a5fab5c704c14cd949e5e9a1ec5929a24fded1b904c" [[package]] name = "png" version = "0.17.16" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "82151a2fc869e011c153adc57cf2789ccb8d9906ce52c0b39a6b5697749d7526" dependencies = [ "bitflags 1.3.2", "crc32fast", "fdeflate", "flate2", "miniz_oxide", ] [[package]] name = "powerfmt" version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391" [[package]] name = "ppv-lite86" version = "0.2.21" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "85eae3c4ed2f50dcfe72643da4befc30deadb458a9b590d720cde2f2b1e97da9" dependencies = [ "zerocopy", ] [[package]] name = "proc-macro2" version = "1.0.97" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d61789d7719defeb74ea5fe81f2fdfdbd28a803847077cecce2ff14e1472f6f1" dependencies = [ "unicode-ident", ] [[package]] name = "profiling" version = "1.0.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3eb8486b569e12e2c32ad3e204dbaba5e4b5b216e9367044f25f1dba42341773" dependencies = [ "profiling-procmacros", ] [[package]] name = "profiling-procmacros" version = "1.0.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "52717f9a02b6965224f95ca2a81e2e0c5c43baacd28ca057577988930b6c3d5b" dependencies = [ "quote", "syn", ] [[package]] name = "qoi" version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7f6d64c71eb498fe9eae14ce4ec935c555749aef511cca85b5568910d6e48001" dependencies = [ "bytemuck", ] [[package]] name = "quick-error" version = "2.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a993555f31e5a609f617c12db6250dedcac1b0a85076912c436e6fc9b2c8e6a3" [[package]] name = "quick-xml" version = "0.36.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f7649a7b4df05aed9ea7ec6f628c67c9953a43869b8bc50929569b2999d443fe" dependencies = [ "memchr", ] [[package]] name = "quote" version = "1.0.40" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1885c039570dc00dcb4ff087a89e185fd56bae234ddc7f056a945bf36467248d" dependencies = [ "proc-macro2", ] [[package]] name = "r-efi" version = "5.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f" [[package]] name = "rand" version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" dependencies = [ "libc", "rand_chacha", "rand_core", ] [[package]] name = "rand_chacha" version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" dependencies = [ "ppv-lite86", "rand_core", ] [[package]] name = "rand_core" version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" dependencies = [ "getrandom 0.2.16", ] [[package]] name = "ratatui" version = "0.29.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "eabd94c2f37801c20583fc49dd5cd6b0ba68c716787c2dd6ed18571e1e63117b" dependencies = [ "bitflags 2.9.1", "cassowary", "compact_str", "crossterm 0.28.1", "indoc", "instability", "itertools 0.13.0", "lru", "paste", "strum", "unicode-segmentation", "unicode-truncate", "unicode-width 0.2.0", ] [[package]] name = "ratatui-image" version = "8.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4d2d8ad028fcbb171d83cfdeaf44df17bf0eae3585bdd7f89bc87af98fc71b0e" dependencies = [ "base64-simd", "icy_sixel", "image 0.25.6", "rand", "ratatui", "rustix 0.38.44", "thiserror 1.0.69", "windows", ] [[package]] name = "rav1e" version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cd87ce80a7665b1cce111f8a16c1f3929f6547ce91ade6addf4ec86a8dda5ce9" dependencies = [ "arbitrary", "arg_enum_proc_macro", "arrayvec", "av1-grain", "bitstream-io", "built", "cfg-if", "interpolate_name", "itertools 0.12.1", "libc", "libfuzzer-sys", "log", "maybe-rayon", "new_debug_unreachable", "noop_proc_macro", "num-derive", "num-traits", "once_cell", "paste", "profiling", "rand", "rand_chacha", "simd_helpers", "system-deps", "thiserror 1.0.69", "v_frame", "wasm-bindgen", ] [[package]] name = "ravif" version = "0.11.20" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5825c26fddd16ab9f515930d49028a630efec172e903483c94796cfe31893e6b" dependencies = [ "avif-serialize", "imgref", "loop9", "quick-error", "rav1e", "rayon", "rgb", ] [[package]] name = "rayon" version = "1.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "368f01d005bf8fd9b1206fb6fa653e6c4a81ceb1466406b81792d87c5677a58f" dependencies = [ "either", "rayon-core", ] [[package]] name = "rayon-core" version = "1.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "22e18b0f0062d30d4230b2e85ff77fdfe4326feb054b9783a3460d8435c8ab91" dependencies = [ "crossbeam-deque", "crossbeam-utils", ] [[package]] name = "redox_syscall" version = "0.5.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5407465600fb0548f1442edf71dd20683c6ed326200ace4b1ef0763521bb3b77" dependencies = [ "bitflags 2.9.1", ] [[package]] name = "redox_users" version = "0.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ba009ff324d1fc1b900bd1fdb31564febe58a8ccc8a6fdbb93b543d33b13ca43" dependencies = [ "getrandom 0.2.16", "libredox", "thiserror 1.0.69", ] [[package]] name = "regex" version = "1.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191" dependencies = [ "aho-corasick", "memchr", "regex-automata", "regex-syntax", ] [[package]] name = "regex-automata" version = "0.4.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "809e8dc61f6de73b46c85f4c96486310fe304c434cfa43669d7b40f711150908" dependencies = [ "aho-corasick", "memchr", "regex-syntax", ] [[package]] name = "regex-syntax" version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c" [[package]] name = "rgb" version = "0.8.52" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0c6a884d2998352bb4daf0183589aec883f16a6da1f4dde84d8e2e9a5409a1ce" dependencies = [ "bytemuck", ] [[package]] name = "rustc-demangle" version = "0.1.26" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "56f7d92ca342cea22a06f2121d944b4fd82af56988c270852495420f961d4ace" [[package]] name = "rustix" version = "0.38.44" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fdb5bc1ae2baa591800df16c9ca78619bf65c0488b41b96ccec5d11220d8c154" dependencies = [ "bitflags 2.9.1", "errno", "libc", "linux-raw-sys 0.4.15", "windows-sys 0.59.0", ] [[package]] name = "rustix" version = "1.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "11181fbabf243db407ef8df94a6ce0b2f9a733bd8be4ad02b4eda9602296cac8" dependencies = [ "bitflags 2.9.1", "errno", "libc", "linux-raw-sys 0.9.4", "windows-sys 0.60.2", ] [[package]] name = "rustversion" version = "1.0.22" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b39cdef0fa800fc44525c84ccb54a029961a8215f9619753635a9c0d2538d46d" [[package]] name = "ryu" version = "1.0.20" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f" [[package]] name = "scopeguard" version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" [[package]] name = "serde" version = "1.0.219" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5f0e2c6ed6606019b4e29e69dbaba95b11854410e5347d525002456dbbb786b6" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" version = "1.0.219" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5b0276cf7f2c73365f7157c8123c21cd9a50fbbd844757af28ca1f5925fc2a00" dependencies = [ "proc-macro2", "quote", "syn", ] [[package]] name = "serde_json" version = "1.0.142" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "030fedb782600dcbd6f02d479bf0d817ac3bb40d644745b769d6a96bc3afc5a7" dependencies = [ "itoa", "memchr", "ryu", "serde", ] [[package]] name = "serde_spanned" version = "0.6.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bf41e0cfaf7226dca15e8197172c295a782857fcb97fad1808a166870dee75a3" dependencies = [ "serde", ] [[package]] name = "sha1" version = "0.10.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba" dependencies = [ "cfg-if", "cpufeatures", "digest", ] [[package]] name = "shlex" version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" [[package]] name = "signal-hook" version = "0.3.18" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d881a16cf4426aa584979d30bd82cb33429027e42122b169753d6ef1085ed6e2" dependencies = [ "libc", "signal-hook-registry", ] [[package]] name = "signal-hook-mio" version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "34db1a06d485c9142248b7a054f034b349b212551f3dfd19c94d45a754a217cd" dependencies = [ "libc", "mio 0.8.11", "mio 1.0.4", "signal-hook", ] [[package]] name = "signal-hook-registry" version = "1.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b2a4719bff48cee6b39d12c020eeb490953ad2443b7055bd0b21fca26bd8c28b" dependencies = [ "libc", ] [[package]] name = "simd-adler32" version = "0.3.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d66dc143e6b11c1eddc06d5c423cfc97062865baf299914ab64caa38182078fe" [[package]] name = "simd_helpers" version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "95890f873bec569a0362c235787f3aca6e1e887302ba4840839bcc6459c42da6" dependencies = [ "quote", ] [[package]] name = "slab" version = "0.4.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7a2ae44ef20feb57a68b23d846850f861394c2e02dc425a50098ae8c90267589" [[package]] name = "smallvec" version = "1.15.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03" [[package]] name = "static_assertions" version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" [[package]] name = "strsim" version = "0.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" [[package]] name = "strum" version = "0.26.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8fec0f0aef304996cf250b31b5a10dee7980c85da9d759361292b8bca5a18f06" dependencies = [ "strum_macros", ] [[package]] name = "strum_macros" version = "0.26.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4c6bee85a5a24955dc440386795aa378cd9cf82acd5f764469152d2270e581be" dependencies = [ "heck", "proc-macro2", "quote", "rustversion", "syn", ] [[package]] name = "subtle" version = "2.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" [[package]] name = "syn" version = "2.0.105" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7bc3fcb250e53458e712715cf74285c1f889686520d79294a9ef3bd7aa1fc619" dependencies = [ "proc-macro2", "quote", "unicode-ident", ] [[package]] name = "system-deps" version = "6.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a3e535eb8dded36d55ec13eddacd30dec501792ff23a0b1682c38601b8cf2349" dependencies = [ "cfg-expr", "heck", "pkg-config", "toml", "version-compare", ] [[package]] name = "target-lexicon" version = "0.12.16" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "61c41af27dd6d1e27b1b16b489db798443478cef1f06a660c96db617ba5de3b1" [[package]] name = "tempfile" version = "3.21.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "15b61f8f20e3a6f7e0649d825294eaf317edce30f82cf6026e7e4cb9222a7d1e" dependencies = [ "fastrand", "getrandom 0.3.3", "once_cell", "rustix 1.0.8", "windows-sys 0.60.2", ] [[package]] name = "termcolor" version = "1.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "06794f8f6c5c898b3275aebefa6b8a1cb24cd2c6c79397ab15774837a0bc5755" dependencies = [ "winapi-util", ] [[package]] name = "thiserror" version = "1.0.69" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52" dependencies = [ "thiserror-impl 1.0.69", ] [[package]] name = "thiserror" version = "2.0.16" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3467d614147380f2e4e374161426ff399c91084acd2363eaf549172b3d5e60c0" dependencies = [ "thiserror-impl 2.0.16", ] [[package]] name = "thiserror-impl" version = "1.0.69" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" dependencies = [ "proc-macro2", "quote", "syn", ] [[package]] name = "thiserror-impl" version = "2.0.16" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6c5e1be1c48b9172ee610da68fd9cd2770e7a4056cb3fc98710ee6906f0c7960" dependencies = [ "proc-macro2", "quote", "syn", ] [[package]] name = "tiff" version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ba1310fcea54c6a9a4fd1aad794ecc02c31682f6bfbecdf460bf19533eed1e3e" dependencies = [ "flate2", "jpeg-decoder", "weezl", ] [[package]] name = "time" version = "0.3.41" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8a7619e19bc266e0f9c5e6686659d394bc57973859340060a69221e57dbc0c40" dependencies = [ "deranged", "num-conv", "powerfmt", "serde", "time-core", ] [[package]] name = "time-core" version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c9e9a38711f559d9e3ce1cdb06dd7c5b8ea546bc90052da6d06bb76da74bb07c" [[package]] name = "tokio" version = "1.47.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "89e49afdadebb872d3145a5638b59eb0691ea23e46ca484037cfab3b76b95038" dependencies = [ "backtrace", "io-uring", "libc", "mio 1.0.4", "pin-project-lite", "slab", "tokio-macros", ] [[package]] name = "tokio-macros" version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6e06d43f1345a3bcd39f6a56dbb7dcab2ba47e68e8ac134855e7e2bdbaf8cab8" dependencies = [ "proc-macro2", "quote", "syn", ] [[package]] name = "toml" version = "0.8.23" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dc1beb996b9d83529a9e75c17a1686767d148d70663143c7854d8b4a09ced362" dependencies = [ "serde", "serde_spanned", "toml_datetime", "toml_edit", ] [[package]] name = "toml_datetime" version = "0.6.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "22cddaf88f4fbc13c51aebbf5f8eceb5c7c5a9da2ac40a13519eb5b0a0e8f11c" dependencies = [ "serde", ] [[package]] name = "toml_edit" version = "0.22.27" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "41fe8c660ae4257887cf66394862d21dbca4a6ddd26f04a3560410406a2f819a" dependencies = [ "indexmap", "serde", "serde_spanned", "toml_datetime", "toml_write", "winnow", ] [[package]] name = "toml_write" version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5d99f8c9a7727884afe522e9bd5edbfc91a3312b36a77b5fb8926e4c31a41801" [[package]] name = "typenum" version = "1.18.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1dccffe3ce07af9386bfd29e80c0ab1a8205a2fc34e4bcd40364df902cfa8f3f" [[package]] name = "unicode-ident" version = "1.0.18" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5a5f39404a5da50712a4c1eecf25e90dd62b613502b7e925fd4e4d19b5c96512" [[package]] name = "unicode-segmentation" version = "1.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f6ccf251212114b54433ec949fd6a7841275f9ada20dddd2f29e9ceea4501493" [[package]] name = "unicode-truncate" version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b3644627a5af5fa321c95b9b235a72fd24cd29c648c2c379431e6628655627bf" dependencies = [ "itertools 0.13.0", "unicode-segmentation", "unicode-width 0.1.14", ] [[package]] name = "unicode-width" version = "0.1.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7dd6e30e90baa6f72411720665d41d89b9a3d039dc45b8faea1ddd07f617f6af" [[package]] name = "unicode-width" version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1fc81956842c57dac11422a97c3b8195a1ff727f06e85c84ed2e8aa277c9a0fd" [[package]] name = "utf8parse" version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" [[package]] name = "v_frame" version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "666b7727c8875d6ab5db9533418d7c764233ac9c0cff1d469aec8fa127597be2" dependencies = [ "aligned-vec", "num-traits", "wasm-bindgen", ] [[package]] name = "version-compare" version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "852e951cb7832cb45cb1169900d19760cfa39b82bc0ea9c0e5a14ae88411c98b" [[package]] name = "version_check" version = "0.9.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" [[package]] name = "viuer" version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ec2ede5c8814363f92f862892dfe71a266f6816b649ca435aed1ff5e2cf3454e" dependencies = [ "ansi_colours", "base64 0.21.7", "console", "crossterm 0.27.0", "image 0.24.9", "lazy_static", "tempfile", "termcolor", ] [[package]] name = "vsimd" version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5c3082ca00d5a5ef149bb8b555a72ae84c9c59f7250f013ac822ac2e49b19c64" [[package]] name = "wasi" version = "0.11.1+wasi-snapshot-preview1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ccf3ec651a847eb01de73ccad15eb7d99f80485de043efb2f370cd654f4ea44b" [[package]] name = "wasi" version = "0.14.2+wasi-0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9683f9a5a998d873c0d21fcbe3c083009670149a8fab228644b8bd36b2c48cb3" dependencies = [ "wit-bindgen-rt", ] [[package]] name = "wasm-bindgen" version = "0.2.100" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1edc8929d7499fc4e8f0be2262a241556cfc54a0bea223790e71446f2aab1ef5" dependencies = [ "cfg-if", "once_cell", "rustversion", "wasm-bindgen-macro", ] [[package]] name = "wasm-bindgen-backend" version = "0.2.100" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2f0a0651a5c2bc21487bde11ee802ccaf4c51935d0d3d42a6101f98161700bc6" dependencies = [ "bumpalo", "log", "proc-macro2", "quote", "syn", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-macro" version = "0.2.100" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7fe63fc6d09ed3792bd0897b314f53de8e16568c2b3f7982f468c0bf9bd0b407" dependencies = [ "quote", "wasm-bindgen-macro-support", ] [[package]] name = "wasm-bindgen-macro-support" version = "0.2.100" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8ae87ea40c9f689fc23f209965b6fb8a99ad69aeeb0231408be24920604395de" dependencies = [ "proc-macro2", "quote", "syn", "wasm-bindgen-backend", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-shared" version = "0.2.100" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1a05d73b933a847d6cccdda8f838a22ff101ad9bf93e33684f39c1f5f0eece3d" dependencies = [ "unicode-ident", ] [[package]] name = "weezl" version = "0.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a751b3277700db47d3e574514de2eced5e54dc8a5436a3bf7a0b248b2cee16f3" [[package]] name = "winapi" version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" dependencies = [ "winapi-i686-pc-windows-gnu", "winapi-x86_64-pc-windows-gnu", ] [[package]] name = "winapi-i686-pc-windows-gnu" version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" [[package]] name = "winapi-util" version = "0.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0978bf7171b3d90bac376700cb56d606feb40f251a475a5d6634613564460b22" dependencies = [ "windows-sys 0.60.2", ] [[package]] name = "winapi-x86_64-pc-windows-gnu" version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" [[package]] name = "windows" version = "0.58.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dd04d41d93c4992d421894c18c8b43496aa748dd4c081bac0dc93eb0489272b6" dependencies = [ "windows-core", "windows-targets 0.52.6", ] [[package]] name = "windows-core" version = "0.58.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6ba6d44ec8c2591c134257ce647b7ea6b20335bf6379a27dac5f1641fcf59f99" dependencies = [ "windows-implement", "windows-interface", "windows-result", "windows-strings", "windows-targets 0.52.6", ] [[package]] name = "windows-implement" version = "0.58.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2bbd5b46c938e506ecbce286b6628a02171d56153ba733b6c741fc627ec9579b" dependencies = [ "proc-macro2", "quote", "syn", ] [[package]] name = "windows-interface" version = "0.58.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "053c4c462dc91d3b1504c6fe5a726dd15e216ba718e84a0e46a88fbe5ded3515" dependencies = [ "proc-macro2", "quote", "syn", ] [[package]] name = "windows-link" version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5e6ad25900d524eaabdbbb96d20b4311e1e7ae1699af4fb28c17ae66c80d798a" [[package]] name = "windows-result" version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1d1043d8214f791817bab27572aaa8af63732e11bf84aa21a45a78d6c317ae0e" dependencies = [ "windows-targets 0.52.6", ] [[package]] name = "windows-strings" version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4cd9b125c486025df0eabcb585e62173c6c9eddcec5d117d3b6e8c30e2ee4d10" dependencies = [ "windows-result", "windows-targets 0.52.6", ] [[package]] name = "windows-sys" version = "0.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" dependencies = [ "windows-targets 0.48.5", ] [[package]] name = "windows-sys" version = "0.59.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" dependencies = [ "windows-targets 0.52.6", ] [[package]] name = "windows-sys" version = "0.60.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f2f500e4d28234f72040990ec9d39e3a6b950f9f22d3dba18416c35882612bcb" dependencies = [ "windows-targets 0.53.3", ] [[package]] name = "windows-targets" version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c" dependencies = [ "windows_aarch64_gnullvm 0.48.5", "windows_aarch64_msvc 0.48.5", "windows_i686_gnu 0.48.5", "windows_i686_msvc 0.48.5", "windows_x86_64_gnu 0.48.5", "windows_x86_64_gnullvm 0.48.5", "windows_x86_64_msvc 0.48.5", ] [[package]] name = "windows-targets" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" dependencies = [ "windows_aarch64_gnullvm 0.52.6", "windows_aarch64_msvc 0.52.6", "windows_i686_gnu 0.52.6", "windows_i686_gnullvm 0.52.6", "windows_i686_msvc 0.52.6", "windows_x86_64_gnu 0.52.6", "windows_x86_64_gnullvm 0.52.6", "windows_x86_64_msvc 0.52.6", ] [[package]] name = "windows-targets" version = "0.53.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d5fe6031c4041849d7c496a8ded650796e7b6ecc19df1a431c1a363342e5dc91" dependencies = [ "windows-link", "windows_aarch64_gnullvm 0.53.0", "windows_aarch64_msvc 0.53.0", "windows_i686_gnu 0.53.0", "windows_i686_gnullvm 0.53.0", "windows_i686_msvc 0.53.0", "windows_x86_64_gnu 0.53.0", "windows_x86_64_gnullvm 0.53.0", "windows_x86_64_msvc 0.53.0", ] [[package]] name = "windows_aarch64_gnullvm" version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" [[package]] name = "windows_aarch64_gnullvm" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" [[package]] name = "windows_aarch64_gnullvm" version = "0.53.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "86b8d5f90ddd19cb4a147a5fa63ca848db3df085e25fee3cc10b39b6eebae764" [[package]] name = "windows_aarch64_msvc" version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" [[package]] name = "windows_aarch64_msvc" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" [[package]] name = "windows_aarch64_msvc" version = "0.53.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c7651a1f62a11b8cbd5e0d42526e55f2c99886c77e007179efff86c2b137e66c" [[package]] name = "windows_i686_gnu" version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" [[package]] name = "windows_i686_gnu" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" [[package]] name = "windows_i686_gnu" version = "0.53.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c1dc67659d35f387f5f6c479dc4e28f1d4bb90ddd1a5d3da2e5d97b42d6272c3" [[package]] name = "windows_i686_gnullvm" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" [[package]] name = "windows_i686_gnullvm" version = "0.53.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9ce6ccbdedbf6d6354471319e781c0dfef054c81fbc7cf83f338a4296c0cae11" [[package]] name = "windows_i686_msvc" version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" [[package]] name = "windows_i686_msvc" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" [[package]] name = "windows_i686_msvc" version = "0.53.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "581fee95406bb13382d2f65cd4a908ca7b1e4c2f1917f143ba16efe98a589b5d" [[package]] name = "windows_x86_64_gnu" version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" [[package]] name = "windows_x86_64_gnu" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" [[package]] name = "windows_x86_64_gnu" version = "0.53.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2e55b5ac9ea33f2fc1716d1742db15574fd6fc8dadc51caab1c16a3d3b4190ba" [[package]] name = "windows_x86_64_gnullvm" version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" [[package]] name = "windows_x86_64_gnullvm" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" [[package]] name = "windows_x86_64_gnullvm" version = "0.53.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0a6e035dd0599267ce1ee132e51c27dd29437f63325753051e71dd9e42406c57" [[package]] name = "windows_x86_64_msvc" version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" [[package]] name = "windows_x86_64_msvc" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" [[package]] name = "windows_x86_64_msvc" version = "0.53.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "271414315aff87387382ec3d271b52d7ae78726f5d44ac98b4f4030c91880486" [[package]] name = "winnow" version = "0.7.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f3edebf492c8125044983378ecb5766203ad3b4c2f7a922bd7dd207f6d443e95" dependencies = [ "memchr", ] [[package]] name = "wit-bindgen-rt" version = "0.39.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6f42320e61fe2cfd34354ecb597f86f413484a798ba44a8ca1165c58d42da6c1" dependencies = [ "bitflags 2.9.1", ] [[package]] name = "x11rb" version = "0.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5d91ffca73ee7f68ce055750bf9f6eca0780b8c85eff9bc046a3b0da41755e12" dependencies = [ "gethostname", "rustix 0.38.44", "x11rb-protocol", ] [[package]] name = "x11rb-protocol" version = "0.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ec107c4503ea0b4a98ef47356329af139c0a4f7750e621cf2973cd3385ebcb3d" [[package]] name = "xml-rs" version = "0.8.27" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6fd8403733700263c6eb89f192880191f1b83e332f7a20371ddcf421c4a337c7" [[package]] name = "xz2" version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "388c44dc09d76f1536602ead6d325eb532f5c122f17782bd57fb47baeeb767e2" dependencies = [ "lzma-sys", ] [[package]] name = "zerocopy" version = "0.8.26" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1039dd0d3c310cf05de012d8a39ff557cb0d23087fd44cad61df08fc31907a2f" dependencies = [ "zerocopy-derive", ] [[package]] name = "zerocopy-derive" version = "0.8.26" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9ecf5b4cc5364572d7f4c329661bcc82724222973f2cab6f050a4e5c22f75181" dependencies = [ "proc-macro2", "quote", "syn", ] [[package]] name = "zeroize" version = "1.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ced3678a2879b30306d323f4542626697a464a97c0a07c9aebf7ebca65cd4dde" dependencies = [ "zeroize_derive", ] [[package]] name = "zeroize_derive" version = "1.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" dependencies = [ "proc-macro2", "quote", "syn", ] [[package]] name = "zip" version = "0.6.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "760394e246e4c28189f19d488c058bf16f564016aefac5d32bb1f3b51d5e9261" dependencies = [ "byteorder", "crc32fast", "crossbeam-utils", "flate2", ] [[package]] name = "zip" version = "2.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fabe6324e908f85a1c52063ce7aa26b68dcb7eb6dbc83a2d148403c9bc3eba50" dependencies = [ "aes", "arbitrary", "bzip2", "constant_time_eq", "crc32fast", "crossbeam-utils", "deflate64", "displaydoc", "flate2", "getrandom 0.3.3", "hmac", "indexmap", "lzma-rs", "memchr", "pbkdf2", "sha1", "thiserror 2.0.16", "time", "xz2", "zeroize", "zopfli", "zstd", ] [[package]] name = "zopfli" version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "edfc5ee405f504cd4984ecc6f14d02d55cfda60fa4b689434ef4102aae150cd7" dependencies = [ "bumpalo", "crc32fast", "log", "simd-adler32", ] [[package]] name = "zstd" version = "0.13.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e91ee311a569c327171651566e07972200e76fcfe2242a4fa446149a3881c08a" dependencies = [ "zstd-safe", ] [[package]] name = "zstd-safe" version = "7.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8f49c4d5f0abb602a93fb8736af2a4f4dd9512e36f7f570d66e65ff867ed3b9d" dependencies = [ "zstd-sys", ] [[package]] name = "zstd-sys" version = "2.0.15+zstd.1.5.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "eb81183ddd97d0c74cedf1d50d85c8d08c1b8b68ee863bdee9e706eedba1a237" dependencies = [ "cc", "pkg-config", ] [[package]] name = "zune-core" version = "0.4.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3f423a2c17029964870cfaabb1f13dfab7d092a62a29a89264f4d36990ca414a" [[package]] name = "zune-inflate" version = "0.2.54" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "73ab332fe2f6680068f3582b16a24f90ad7096d5d39b974d1c0aff0125116f02" dependencies = [ "simd-adler32", ] [[package]] name = "zune-jpeg" version = "0.4.20" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fc1f7e205ce79eb2da3cd71c5f55f3589785cb7c79f6a03d1c8d1491bda5d089" dependencies = [ "zune-core", ] doxx-0.1.2/Cargo.toml 0000644 00000005517 00000000001 0010024 0 ustar # THIS FILE IS AUTOMATICALLY GENERATED BY CARGO # # When uploading crates to the registry Cargo will automatically # "normalize" Cargo.toml files for maximal compatibility # with all versions of Cargo and also rewrite `path` dependencies # to registry (e.g., crates.io) dependencies. # # If you are reading this file be aware that the original Cargo.toml # will likely look very different (and much more reasonable). # See Cargo.toml.orig for the original contents. [package] edition = "2021" name = "doxx" version = "0.1.2" authors = ["bgreenwell"] build = false exclude = [ "CLAUDE.md", "assets/", "src/bin/generate_test_docs.rs", "tests/fixtures/README.md", ".DS_Store", "*.tmp", "*.log", "*.bak", "*~", ] autolib = false autobins = false autoexamples = false autotests = false autobenches = false default-run = "doxx" description = "Terminal document viewer for .docx files" homepage = "https://github.com/bgreenwell/doxx" documentation = "https://github.com/bgreenwell/doxx#readme" readme = "README.md" keywords = [ "docx", "terminal", "document", "viewer", "tui", ] categories = [ "command-line-utilities", "text-processing", ] license = "MIT" repository = "https://github.com/bgreenwell/doxx" [lib] name = "doxx" crate-type = [ "cdylib", "rlib", ] path = "src/lib.rs" [[bin]] name = "doxx" path = "src/main.rs" [[test]] name = "ansi_export_test" path = "tests/ansi_export_test.rs" [[test]] name = "integration_test" path = "tests/integration_test.rs" [[test]] name = "mixed_formatting_test" path = "tests/mixed_formatting_test.rs" [[test]] name = "search_functionality_test" path = "tests/search_functionality_test.rs" [[test]] name = "strikethrough_test" path = "tests/strikethrough_test.rs" [[test]] name = "test_image_extraction" path = "tests/test_image_extraction.rs" [[test]] name = "unicode_safety" path = "tests/unicode_safety.rs" [dependencies.anyhow] version = "1.0" [dependencies.arboard] version = "3.3" [dependencies.clap] version = "4.4" features = ["derive"] [dependencies.crossterm] version = "0.27" [dependencies.dirs] version = "5.0" [dependencies.docx-rs] version = "0.4" [dependencies.image] version = "0.25" [dependencies.once_cell] version = "1.19" [dependencies.quick-xml] version = "0.36" [dependencies.ratatui] version = "0.29" [dependencies.ratatui-image] version = "8.0" [dependencies.regex] version = "1.10" [dependencies.serde] version = "1.0" features = ["derive"] [dependencies.serde_json] version = "1.0" [dependencies.thiserror] version = "1.0" [dependencies.tokio] version = "1.0" features = [ "rt-multi-thread", "macros", "fs", ] [dependencies.toml] version = "0.8" [dependencies.unicode-segmentation] version = "1.10" [dependencies.viuer] version = "0.7" [dependencies.zip] version = "2.0" [profile.release] lto = true codegen-units = 1 doxx-0.1.2/Cargo.toml.orig 0000644 0000000 0000000 00000002540 10461020230 0013476 0 ustar 0000000 0000000 [package] name = "doxx" version = "0.1.2" edition = "2021" description = "Terminal document viewer for .docx files" license = "MIT" repository = "https://github.com/bgreenwell/doxx" homepage = "https://github.com/bgreenwell/doxx" documentation = "https://github.com/bgreenwell/doxx#readme" readme = "README.md" authors = ["bgreenwell"] keywords = ["docx", "terminal", "document", "viewer", "tui"] categories = ["command-line-utilities", "text-processing"] default-run = "doxx" exclude = [ "CLAUDE.md", "assets/", "src/bin/generate_test_docs.rs", "tests/fixtures/README.md", ".DS_Store", "*.tmp", "*.log", "*.bak", "*~" ] [lib] name = "doxx" crate-type = ["cdylib", "rlib"] [dependencies] # Document parsing docx-rs = "0.4" # Terminal UI ratatui = "0.29" crossterm = "0.27" arboard = "3.3" # Image support viuer = "0.7" image = "0.25" zip = "2.0" ratatui-image = "8.0" tokio = { version = "1.0", features = ["rt-multi-thread", "macros", "fs"] } # Text processing unicode-segmentation = "1.10" regex = "1.10" once_cell = "1.19" quick-xml = "0.36" # CLI and utilities clap = { version = "4.4", features = ["derive"] } serde = { version = "1.0", features = ["derive"] } serde_json = "1.0" anyhow = "1.0" thiserror = "1.0" # Configuration dirs = "5.0" toml = "0.8" # Release optimizations [profile.release] codegen-units = 1 lto = true doxx-0.1.2/Formula/doxx.rb 0000644 0000000 0000000 00000000756 10461020230 0013532 0 ustar 0000000 0000000 class Doxx < Formula desc "Terminal document viewer for .docx files" homepage "https://github.com/bgreenwell/doxx" url "https://github.com/bgreenwell/doxx/archive/refs/tags/v0.1.0.tar.gz" sha256 "REPLACE_WITH_ACTUAL_SHA256" license "MIT" head "https://github.com/bgreenwell/doxx.git", branch: "main" depends_on "rust" => :build def install system "cargo", "install", *std_cargo_args end test do assert_match "doxx", shell_output("#{bin}/doxx --version") end end doxx-0.1.2/ISSUE_26_KEYMAP_PLAN.md 0000644 0000000 0000000 00000013575 10461020230 0014222 0 ustar 0000000 0000000 # Implementation Plan for Configurable Keyboard Shortcuts (GitHub Issue #26) ## Issue Summary User request for configurable keyboard shortcuts similar to CLI tools like `less` and `vim`: - **Navigation**: `u`/`d` or `Ctrl+u`/`Ctrl+d` for page up/down - **Search**: `/` to initiate search - **Search navigation**: `n`/`N` for next/previous search results - **Positioning**: `H`/`L` for Home/End navigation Community suggestion to expand into comprehensive keymap module supporting different editor styles. ## Current State Analysis **Existing keyboard handling** (`src/ui.rs:415-510`): - Hardcoded key matching using `match key.code` statements - Different keybindings for each `ViewMode` (Document, Outline, Search, Help) - Basic vim-like navigation exists: `k`/`j` for up/down, `h` for help - Current search navigation uses `n`/`p` (not `n`/`N` as requested) - Missing requested shortcuts: `u`/`d` for page navigation, `/` for search, `H`/`L` for home/end **Architecture compatibility**: - Well-structured UI module with clear separation of concerns - `App` struct has all necessary state for navigation and search - CLI already has comprehensive argument structure ready for extension - Existing codebase follows Rust best practices ## Implementation Plan ### Phase 1: Core Keymap Infrastructure (~1 week) **1.1 Create keymap module** (`src/keymap.rs`) ```rust pub enum Action { ScrollUp, ScrollDown, PageUp, PageDown, GoHome, GoEnd, Search, SearchNext, SearchPrev, ToggleOutline, ToggleHelp, Copy, Quit } pub struct KeyMap { bindings: HashMap, name: String, } impl KeyMap { pub fn lookup(&self, key: KeyCode) -> Option pub fn validate(&self) -> Result<(), KeymapError> } ``` **1.2 Refactor UI key handling** (`src/ui.rs`) - Replace hardcoded key matching with action-based dispatch - Implement `handle_action()` method for uniform action processing - Maintain backward compatibility with existing shortcuts - Update each ViewMode to use keymap lookup ### Phase 2: CLI Integration (~3 days) **2.1 Add CLI keymap option** - Add `--keymap ` flag to `Cli` struct in `main.rs` - Support presets: "default", "vim", "less" - Update help text and documentation - Pass keymap selection to `App::new()` **2.2 Help system updates** - Dynamically generate help overlay from active keymap - Show current keybindings instead of hardcoded text - Add keymap name to status line ### Phase 3: Popular Presets (~1 week) **3.1 Vim/Less keymap preset** (addresses user request) ```rust pub fn vim_keymap() -> KeyMap { // u/Ctrl+u for page up, d/Ctrl+d for page down // H for home, L for end // / for search initiation // n/N for search next/previous (capital N for reverse) } ``` **3.2 Future extensible presets** - Foundation for VSCode-style, Emacs-style keymaps - Preset validation and error handling - Documentation for creating custom presets ### Phase 4: Testing & Validation (~3 days) **4.1 Comprehensive test suite** - Unit tests for keymap module (validation, conflicts, lookups) - Integration tests for each preset - Regression tests ensuring existing functionality works - Manual testing with real document navigation **4.2 Documentation updates** - README examples showing new keymap options - Help text updates - CHANGELOG entry ## Technical Details **Files to create/modify:** - `src/keymap.rs` (new) - Core keymap infrastructure - `src/ui.rs` - Refactor key handling to use keymaps - `src/main.rs` - Add CLI option - `src/lib.rs` - Export keymap types - `tests/keymap_test.rs` (new) - Comprehensive test suite **CLI Usage Examples:** ```bash # Use vim-style keybindings doxx document.docx --keymap vim # Use less-style keybindings doxx document.docx --keymap less # Default behavior (current keybindings) doxx document.docx --keymap default ``` **Expected Vim/Less Keymap:** - `u` / `Ctrl+u` - Page up (alternative to PageUp) - `d` / `Ctrl+d` - Page down (alternative to PageDown) - `H` - Go to document start (alternative to Home) - `L` - Go to document end (alternative to End) - `/` - Enter search mode - `n` - Next search result - `N` - Previous search result (capital N for reverse) - All existing shortcuts remain available **Backward Compatibility:** - All existing shortcuts continue working through default keymap - No breaking changes to current user experience - Purely additive feature **Architecture Benefits:** - Extensible design supports future keymap additions - Clean separation between key input and action handling - Easy to add new shortcuts without modifying core UI logic - Testable components with clear interfaces **Risk Assessment:** - **Risk level:** Low - purely additive feature - **Complexity:** Medium - requires architectural refactor but builds on solid foundation - **Testing coverage:** High - comprehensive unit and integration tests planned - **Performance impact:** Minimal - HashMap lookup vs direct matching **Success Criteria:** - All existing keyboard shortcuts continue to work (backward compatibility) - Users can select keymap presets with `--keymap vim` - Vim/less users get familiar navigation shortcuts - Help system shows current active keybindings - Architecture supports easy addition of new presets - Zero performance impact on key handling ## Timeline Estimate - **Total development time:** ~2 weeks - **Phase 1:** 1 week (core infrastructure) - **Phase 2:** 3 days (CLI integration) - **Phase 3:** 1 week (presets implementation) - **Phase 4:** 3 days (testing and documentation) ## Future Expansion Opportunities - Runtime keymap switching (press `K` to cycle through keymaps) - Custom user-defined shortcuts via config files - Context-aware keymaps (different shortcuts in search mode vs document mode) - Mouse gesture support for touchpad navigation ## Implementation Status - **Current status:** Planning phase complete - **Next step:** Begin Phase 1 implementation - **Dependencies:** None - can proceed immediately - **Blockers:** None identified doxx-0.1.2/LICENSE 0000644 0000000 0000000 00000002055 10461020230 0011615 0 ustar 0000000 0000000 MIT License Copyright (c) 2025 Ben Greenwell Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. doxx-0.1.2/README.md 0000644 0000000 0000000 00000025443 10461020230 0012075 0 ustar 0000000 0000000 # doxx 📄 > `.docx` files in your terminal — no Microsoft Word required [](https://github.com/bgreenwell/doxx/actions/workflows/ci.yml) [](https://opensource.org/licenses/MIT) [](https://www.rust-lang.org/) A fast, terminal-native document viewer for Word files. View, search, and export `.docx` documents without leaving your command line. ## Screenshots Terminal image display Color support Smart tables with alignment Lists and formatting Inline and display equations ## 🎬 Demo Mixed formatting with colors, bold, italic, underline, strikethrough and interactive navigation ## ✨ Features - **Beautiful terminal rendering** with formatting, tables, and lists - **Equation support** — LaTeX rendering for inline and display equations 📐 - **Fast search** with highlighting 🔍 - **Smart tables** with proper alignment and Unicode borders - **Copy to clipboard** — grab content directly from the terminal - **Export formats** — Markdown, CSV, JSON, plain text, ANSI-colored output - **Terminal images** for Kitty, iTerm2, WezTerm 🖼️ - **Color support** — see Word document colors in your terminal ## 🚀 Installation ### Package managers #### Homebrew (macOS/Linux) ```bash brew install doxx ``` #### Cargo (cross-platform) ```bash cargo install doxx ``` #### Arch Linux ```bash pacman -S doxx ``` The AUR package is also available for the development version: ```bash yay -S doxx-git ``` *Thanks to [@mhegreberg](https://github.com/mhegreberg) for creating and maintaining the AUR package!* #### Nix (cross-platform) ```bash nix profile install github:bgreenwell/doxx ``` *Thanks to [@bobberb](https://github.com/bobberb) for creating the Nix flake!* #### Conda-Forge (cross-platform) ```bash conda install doxx ``` or globally using [Pixi](pixi.sh): ```bash pixi global install doxx ``` #### Scoop (Windows) ```bash # Coming soon scoop bucket add doxx https://github.com/bgreenwell/doxx-scoop scoop install doxx ``` ### Pre-built binaries Download from [GitHub releases](https://github.com/bgreenwell/doxx/releases): ```bash # macOS/Linux - automatic platform detection curl -L https://github.com/bgreenwell/doxx/releases/latest/download/doxx-$(uname -s)-$(uname -m).tar.gz | tar xz sudo mv doxx /usr/local/bin/ # Verify installation doxx --version ``` **Available platforms:** - **Linux**: `x86_64-unknown-linux-musl` (statically linked) - **macOS**: `x86_64-apple-darwin` (Intel) and `aarch64-apple-darwin` (Apple Silicon) - **Windows**: `x86_64-pc-windows-msvc` ### Build from source ```bash git clone https://github.com/bgreenwell/doxx.git cd doxx cargo install --path . # Or for development cargo build --release ``` **Requirements:** - Rust 1.70+ - System dependencies: `libxcb` (Linux only) ## 🎯 Usage ```bash # View a document doxx report.docx # Search for content doxx contract.docx --search "payment" # Start with outline view doxx document.docx --outline # Export to different formats doxx data.docx --export csv > data.csv doxx report.docx --export markdown > report.md # View with images (supported terminals) doxx presentation.docx --images --export text # Enable color rendering doxx slides.docx --color ``` ## 📋 Command Line Options ### Basic options ```bash doxx [OPTIONS] ``` | Option | Description | |--------|-------------| | `` | Input document file (.docx) | | `-h, --help` | Show help information | | `-V, --version` | Show version information | ### Viewing options | Option | Description | |--------|-------------| | `-o, --outline` | Start with outline view for quick navigation | | `-p, --page ` | Jump to specific page number on startup | | `-s, --search ` | Search and highlight term immediately | | `--force-ui` | Force interactive UI mode (bypass TTY detection) | | `--color` | Enable color support for text rendering | ### Export options | Option | Values | Description | |--------|--------|-------------| | `--export ` | `markdown`, `text`, `csv`, `json`, `ansi` | Export document instead of viewing | **Export examples:** ```bash doxx report.docx --export markdown # Convert to Markdown doxx data.docx --export csv # Extract tables as CSV (tables only!) doxx document.docx --export text # Plain text output doxx structure.docx --export json # Document metadata as JSON doxx document.docx --export ansi # ANSI-colored terminal output ``` **📊 CSV export note:** The CSV export extracts **only tables** from the document, ignoring all text content. Perfect for pulling structured data from business reports, research papers, or surveys for analysis in Excel, Python, or databases. ### ANSI export options | Option | Values | Description | |--------|--------|-------------| | `-w, --terminal-width ` | Number | Set terminal width for formatting (default: $COLUMNS or 80) | | `--color-depth ` | `auto`, `1`, `4`, `8`, `24` | Control color rendering depth | **ANSI export examples:** ```bash doxx document.docx --export ansi # Full color ANSI output doxx document.docx --export ansi --color-depth 1 # Monochrome (no colors) doxx document.docx --export ansi --color-depth 4 # 16 colors doxx document.docx --export ansi --terminal-width 80 # Set terminal width doxx report.docx --export ansi | less -R # Pipe to less with color support ``` **🌈 Color depth options:** - `auto` - Auto-detect terminal capabilities - `1` - Monochrome (no colors, formatting only) - `4` - 16 colors (standard ANSI colors) - `8` - 256 colors (extended ANSI palette) - `24` - True color (16.7 million colors) ### Image options | Option | Description | |--------|-------------| | `--images` | Display images inline in terminal (auto-detect capabilities) | | `--extract-images ` | Extract images to specified directory | | `--image-width ` | Maximum image width in terminal columns (default: auto-detect) | | `--image-height ` | Maximum image height in terminal rows (default: auto-detect) | | `--image-scale ` | Image scaling factor (0.1 to 2.0, default: 1.0) | **Image examples:** ```bash doxx presentation.docx --images # Show images inline doxx document.docx --images --image-width 80 # Limit image width doxx slides.docx --extract-images ./images/ # Save images to folder ``` **⚠️ Image display notes:** - `--images` currently works with `--export text` mode and shows placeholders in TUI - Supports iTerm2, Kitty, and WezTerm terminals ## ⌨️ Navigation | Key | Action | |-----|--------| | `↑`/`k` | Scroll up | | `↓`/`j` | Scroll down | | `o` | Toggle outline | | `s` | Search | | `c` | Copy to clipboard | | `h` | Help | | `q` | Quit | ## 🔧 Why doxx? Current terminal tools for Word documents: - **docx2txt** → Loses all formatting, mangled tables - **pandoc** → Complex chain, formatting lost - **antiword** → Only handles old `.doc` files **doxx** gives you: - ✅ Rich formatting preserved (bold, italic, headers) - ✅ Professional table rendering with alignment - ✅ Equation support (inline and display LaTeX) - ✅ Interactive navigation and search - ✅ Multiple export formats for workflows - ✅ Terminal image display for modern terminals - ✅ Fast startup (50ms vs Word's 8+ seconds) Perfect for developers, sysadmins, and anyone who prefers the terminal. ## 📊 Examples ### Quick document analysis ```bash # Get overview and search doxx quarterly-report.docx doxx --search "revenue" # Extract tables for analysis doxx financial-data.docx --export csv | python analyze.py ``` ### Copy workflows ```bash # Review and copy sections doxx meeting-notes.docx # Press 'c' to copy current view to clipboard # Copy search results doxx specs.docx --search "requirements" # Press F2 to copy results with context ``` ### Pipeline integration ```bash # Extract text for processing doxx notes.docx --export text | grep "action items" # Get document structure doxx report.docx --export json | jq '.metadata' ``` ## 🏗️ Architecture Built with Rust for performance: - **[docx-rs](https://crates.io/crates/docx-rs)** — Document parsing - **[ratatui](https://crates.io/crates/ratatui)** — Terminal UI - **[viuer](https://crates.io/crates/viuer)** — Image rendering - **[unicode-segmentation](https://crates.io/crates/unicode-segmentation)** — Proper Unicode handling ## 🛠️ Development ```bash # Build and test cargo build --release cargo test # Run with sample document cargo run -- tests/fixtures/minimal.docx ``` ## Known limitations **Equation positioning:** Display equations may not appear at exact positions due to limitations in the underlying docx-rs parsing library. We've filed an [upstream issue](https://github.com/bokuweb/docx-rs/issues) and are planning a complete fix for v0.2.0 using direct XML parsing. ## Roadmap - Perfect equation positioning (v0.2.0) - Image support in TUI via ratatui-image crate - Enhanced table support (merged cells, complex layouts) - Performance improvements for large documents - Hyperlink navigation - Custom themes ## 💡 Inspiration This project was inspired by [Charm](https://github.com/charmbracelet)'s [Glow](https://github.com/charmbracelet/glow) package — the beautiful terminal Markdown renderer that shows how terminal document viewing can be both powerful and elegant. Just as Glow brings rich Markdown rendering to your command line, doxx aims to do the same for Microsoft Word documents. Thanks to the Charm team for the inspiration! ✨ ## 📝 License MIT License — see [LICENSE](LICENSE) file for details. --- **Made for developers who live in the terminal** 🚀 doxx-0.1.2/RELEASE.md 0000644 0000000 0000000 00000011353 10461020230 0012213 0 ustar 0000000 0000000 # Release guide for doxx This document outlines the complete release process for doxx, including automated pipelines and manual steps. ## 🎯 Release overview The release pipeline includes: - ✅ **Cross-platform binaries** (Linux, macOS Intel/ARM, Windows) - ✅ **GitHub releases** with automated changelog - ✅ **crates.io publishing** for `cargo install doxx` - ✅ **Homebrew formula** (automated updates) - ✅ **Checksums** for security verification - ✅ **Modern GitHub Actions** with proper error handling ## 🚀 Quick release process ### 1. Prepare release ```bash # Make sure you're on main branch and working directory is clean git checkout main git pull origin main # Run the automated release script ./scripts/release.sh [major|minor|patch] # Example for patch release (0.1.0 -> 0.1.1) ./scripts/release.sh patch ``` ### 2. The script will: - ✅ Bump version in `Cargo.toml` - ✅ Run tests to ensure everything works - ✅ Update `Cargo.lock` - ✅ Commit version bump - ✅ Create and push git tag (e.g., `v0.1.1`) - ✅ Trigger GitHub Actions automatically ### 3. GitHub Actions will: - ✅ Build cross-platform binaries (Linux musl, macOS Intel/ARM, Windows) - ✅ Create draft GitHub release with CHANGELOG.md - ✅ Generate SHA256 checksums - ✅ Publish to crates.io (when draft is published) - ✅ Update Homebrew formula automatically ### 4. Manual steps 1. **Review draft release** - Go to [GitHub releases](https://github.com/bgreenwell/doxx/releases) - Edit the draft release created by Actions - Add release highlights and breaking changes if any 2. **Publish release** - Click "Publish release" to make it live - This triggers crates.io publishing - Homebrew formula gets updated automatically ## 📦 Package manager status ### ✅ Active package managers - **crates.io**: `cargo install doxx` ✅ Automated - **GitHub releases**: Direct binary downloads ✅ Automated - **Homebrew**: In progress 🚧 (Formula ready, tap needed) ### 🚧 Future package managers - **Scoop** (Windows): Repository structure ready - **Chocolatey** (Windows): Future consideration - **Snap** (Linux): Future consideration - **AUR** (Arch Linux): Community contribution welcome ## 🔍 Testing release pipeline ### Test without publishing ```bash # Test packaging for crates.io (dry run) cargo publish --dry-run # Test binary builds locally cargo build --release --target x86_64-unknown-linux-musl cargo build --release --target x86_64-apple-darwin # Test CLI works correctly ./target/release/doxx --version ./target/release/doxx tests/fixtures/minimal.docx --export text ``` ### Verify release assets After GitHub release is published: ```bash # Download and verify checksums wget https://github.com/bgreenwell/doxx/releases/latest/download/doxx-checksums.txt wget https://github.com/bgreenwell/doxx/releases/latest/download/doxx-linux-x86_64.tar.gz # Verify checksum matches sha256sum doxx-linux-x86_64.tar.gz grep linux-x86_64 doxx-checksums.txt ``` ## 📋 Pre-release checklist - [ ] All CI/CD tests passing on main branch - [ ] `CHANGELOG.md` updated with release notes - [ ] Version number follows semantic versioning - [ ] All new features documented in README - [ ] Breaking changes clearly documented - [ ] Security issues addressed - [ ] Dependencies updated and audited ## 🛠 Troubleshooting ### Release script issues ```bash # If release script fails, check: git status # Working directory clean? cargo test --all-features # All tests pass? cargo clippy -- -D warnings # No linting issues? ``` ### GitHub Actions issues - Check Actions tab for build failures - Common issues: Missing secrets (`CARGO_REGISTRY_TOKEN`) - Platform-specific build failures (usually dependency issues) ### crates.io publishing issues - Ensure `CARGO_REGISTRY_TOKEN` secret is set - Verify all required metadata in `Cargo.toml` - Check for naming conflicts ## 🔐 Required secrets Repository secrets needed for full automation: - `CARGO_REGISTRY_TOKEN`: For publishing to crates.io - Get from https://crates.io/me - Scope: "Publish new crates and update existing crates" ## 📈 Success metrics After release, verify: - [ ] GitHub release created with all binary assets - [ ] crates.io shows new version (may take a few minutes) - [ ] `cargo install doxx` works with new version - [ ] Download links in README work correctly - [ ] Homebrew formula updated (if tap is public) ## 🎉 Post-release 1. **Announce release** - Update README badges if needed - Consider social media announcement - Update any documentation sites 2. **Monitor** - Watch for user issues or bug reports - Monitor download statistics - Track performance metrics --- **Need help?** Check the GitHub Actions logs or open an issue for release pipeline problems. doxx-0.1.2/RELEASE_v0.1.2.md 0000644 0000000 0000000 00000015220 10461020230 0013114 0 ustar 0000000 0000000 # Release v0.1.2 Planning **Target Date**: TBD **Branch**: `prep-release-v0.1.2` ## Release Goals Fix critical bugs and polish existing features for a stable release suitable for wider adoption. ## Issues Analysis ### MUST FIX (Blocking Release): #### ✅ #40 - File type validation (xlsx/zip crashes) - **Problem**: App hangs when given .xlsx or .zip files - **Impact**: HIGH - Causes confusion and system resource consumption - **Effort**: LOW - **Solution**: Validate file extension and ZIP structure before parsing - **Status**: ✅ COMPLETED #### ❌ #45 - Fix `-w` terminal width flag - **Problem**: `-w` flag doesn't change output width in ANSI export - **Impact**: MEDIUM - Feature is incomplete - **Effort**: HIGH - Requires text wrapping implementation - **Findings**: Flag only affects separators, not paragraph text. Needs full text wrapping feature. - **Status**: 🔄 DEFER TO v0.2.0 #### ✅ #46 - VirusTotal false positives - **Problem**: 3/69 vendors flag Windows binary - **Impact**: LOW - Just needs explanation - **Solution**: Post detailed explanation, close issue - **Status**: 📝 TODO (comment only) #### ✅ #56 - Better error for .doc files - **Problem**: Confusing error when opening old .doc format - **Impact**: LOW - **Solution**: Duplicate of #40, will be fixed automatically - **Status**: 🔄 Close as duplicate ### NICE TO HAVE (If Time Permits): #### ✅ #58 - Equations appearing at bottom (PARTIAL FIX) - **Problem**: Display equations render at end of document instead of inline - **Impact**: MEDIUM-HIGH - **Effort**: MEDIUM (architectural limitation discovered) - **Status**: ✅ PARTIAL FIX IMPLEMENTED - **What was fixed**: - Added paragraph index tracking to equation extraction - Created merge function to insert equations at approximate positions - Equations now appear inline rather than all at end - **Known limitation**: - docx-rs library doesn't parse equation-only paragraphs - Positioning not pixel-perfect for all documents - Works well for most cases, some edge cases remain - **Next steps**: Full XML-based parsing for perfect positioning (v0.2.0) #### ⚠️ #26 - Configurable keyboard shortcuts - **Problem**: Users want vim/less-style keybindings - **Impact**: MEDIUM - Quality of life - **Effort**: MEDIUM - Requires keymap refactor - **Status**: 🎯 Planned for this release per prior commitment - **Decision**: Include if time allows ### DEFER TO v0.2.0: ❌ **#45** - Terminal width text wrapping (HIGH effort, requires new wrapping system) ❌ **#58** - Perfect equation positioning (MEDIUM effort, needs full XML parsing) ❌ **#24** - Advanced numbering improvements (HIGH effort, complex) ❌ **#35** - Kitty graphics in TUI (works in export, TUI is complex) ❌ **#13** - Text selection/copy (HIGH effort) ❌ **#3** - Font-size based heading detection (MEDIUM effort, low priority) ❌ **#37** - Yazi plugin (external, community can build) ❌ **#31** - WebAssembly support (unclear use case, not CLI focus) ## Implementation Checklist ### Phase 1: Critical Fixes ✅ COMPLETED - [x] Implement file type validation (#40) - [x] Add `validate_docx_file()` function - [x] Check extension is `.docx` - [x] Check ZIP contains `word/document.xml` - [x] Add helpful error messages - [x] Test with .xlsx, .zip, .doc files - [x] Investigate `-w` terminal width flag (#45) - [x] Debug why width parameter isn't being used - [x] Determined requires full text wrapping implementation - [x] Decision: Defer to v0.2.0 ### Phase 2: Equation Positioning ✅ COMPLETED (PARTIAL) - [x] Investigate #58 equation positioning - [x] Add paragraph index tracking - [x] Implement merge function - [x] Test with user-provided document - [x] Document known limitations - [x] Plan full fix for v0.2.0 ### Phase 3: Issue Management - [ ] Post explanation on #46 (false positive) - [ ] Close #56 as duplicate of #40 ### Phase 4: Optional (Time Permitting) - [ ] Implement #26 keyboard shortcuts (if feasible) ### Phase 5: Release Prep - [ ] Update CHANGELOG.md - [ ] Run full test suite - [ ] Test with problematic documents from issues - [ ] Update version to 0.1.2 in Cargo.toml - [ ] Create release notes - [ ] Merge to main - [ ] Tag release ## Testing Plan ### Regression Testing - [ ] All existing tests pass - [ ] Formatting still works (bold, italic, colors) - [ ] ANSI export works - [ ] Image support works - [ ] Equation support works ### New Feature Testing - [x] File validation rejects .xlsx files with helpful error - [x] File validation rejects .zip files with helpful error - [x] File validation rejects .doc files with helpful error (via extension check) - [x] File validation accepts valid .docx files - [x] Equation positioning improved (inline vs all at end) ### Document Testing - [ ] business-report.docx - [ ] example.docx - [ ] equations document (#58 if investigating) - [ ] User-provided documents from issues ## Release Notes Draft ### v0.1.2 **Release Date**: TBD #### Fixed - **File Type Validation**: Added proper validation to reject non-.docx files (Excel, ZIP, old Word .doc) with helpful error messages (#40, #56) - Checks file extension is `.docx` - Validates ZIP structure contains `word/document.xml` - Detects Excel files specifically with clear error messages - Prevents hangs and crashes from invalid file types - **Equation Positioning (Partial)**: Improved display equation positioning (#58) - Equations now appear inline in document flow instead of all at end - Added paragraph index tracking for better positioning - Known limitation: Some edge cases may not have pixel-perfect positioning - Full fix planned for v0.2.0 with complete XML parsing - **Security**: Addressed VirusTotal false positive detections with documentation (#46) #### Changed - Improved error messages for invalid file formats - Enhanced equation extraction to track paragraph positions #### Notes - This release focuses on stability and bug fixes - All integration tests now work with Debian packaging (#60) - Text formatting preservation from v0.1.1 continues to work - Terminal width text wrapping deferred to v0.2.0 (#45 - requires larger feature implementation) ## Post-Release ### Immediate Actions - [ ] Close fixed issues - [ ] Announce release - [ ] Monitor for new issue reports ### v0.2.0 Planning High priority for next release: - **Perfect equation positioning** (#58) - Full XML-based parsing for accurate placement - **Terminal width text wrapping** (#45) - Implement paragraph wrapping system - **Configurable keyboard shortcuts** (#26) - vim/less-style keybindings Consider for v0.2.0 or later: - Advanced numbering improvements (#24) - Kitty graphics in TUI (#35) - Text selection and copy (#13) - Font-size based heading detection (#3) doxx-0.1.2/flake.lock 0000644 0000000 0000000 00000003766 10461020230 0012556 0 ustar 0000000 0000000 { "nodes": { "flake-utils": { "inputs": { "systems": "systems" }, "locked": { "lastModified": 1731533236, "narHash": "sha256-l0KFg5HjrsfsO/JpG+r7fRrqm12kzFHyUHqHCVpMMbI=", "owner": "numtide", "repo": "flake-utils", "rev": "11707dc2f618dd54ca8739b309ec4fc024de578b", "type": "github" }, "original": { "owner": "numtide", "repo": "flake-utils", "type": "github" } }, "nixpkgs": { "locked": { "lastModified": 1755615617, "narHash": "sha256-HMwfAJBdrr8wXAkbGhtcby1zGFvs+StOp19xNsbqdOg=", "owner": "NixOS", "repo": "nixpkgs", "rev": "20075955deac2583bb12f07151c2df830ef346b4", "type": "github" }, "original": { "owner": "NixOS", "ref": "nixos-unstable", "repo": "nixpkgs", "type": "github" } }, "root": { "inputs": { "flake-utils": "flake-utils", "nixpkgs": "nixpkgs", "rust-overlay": "rust-overlay" } }, "rust-overlay": { "inputs": { "nixpkgs": [ "nixpkgs" ] }, "locked": { "lastModified": 1756003222, "narHash": "sha256-lmEMhIIbjt8Wp1EYbNqCojuU9ygyDFv8Tu0X1k8qIMc=", "owner": "oxalica", "repo": "rust-overlay", "rev": "88ceedecde53e809b4bf8b5fd10d181889d9bac7", "type": "github" }, "original": { "owner": "oxalica", "repo": "rust-overlay", "type": "github" } }, "systems": { "locked": { "lastModified": 1681028828, "narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=", "owner": "nix-systems", "repo": "default", "rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e", "type": "github" }, "original": { "owner": "nix-systems", "repo": "default", "type": "github" } } }, "root": "root", "version": 7 } doxx-0.1.2/flake.nix 0000644 0000000 0000000 00000011021 10461020230 0012403 0 ustar 0000000 0000000 { description = "doxx - Expose the contents of .docx files without leaving your terminal"; inputs = { nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable"; rust-overlay = { url = "github:oxalica/rust-overlay"; inputs.nixpkgs.follows = "nixpkgs"; }; flake-utils.url = "github:numtide/flake-utils"; }; outputs = { self, nixpkgs, rust-overlay, flake-utils }: flake-utils.lib.eachDefaultSystem (system: let overlays = [ (import rust-overlay) ]; pkgs = import nixpkgs { inherit system overlays; }; # Use the latest stable Rust toolchain rustToolchain = pkgs.rust-bin.stable.latest.default.override { extensions = [ "rust-src" "clippy" "rustfmt" ]; }; # Define the package doxx = pkgs.rustPlatform.buildRustPackage rec { pname = "doxx"; version = "0.1.2"; src = ./.; cargoLock = { lockFile = ./Cargo.lock; }; doCheck = true; checkFlags = [ # FIXME: Fails for some reason "--skip=terminal_image::tests::test_renderer_creation" ]; meta = with pkgs.lib; { description = "Expose the contents of .docx files without leaving your terminal. Fast, safe, and smart — no Office required!"; homepage = "https://github.com/bgreenwell/doxx"; license = licenses.mit; maintainers = [ ]; platforms = platforms.all; }; }; in { # Default package packages.default = doxx; packages.doxx = doxx; # Development shell devShells.default = pkgs.mkShell { buildInputs = with pkgs; [ # Rust toolchain rustToolchain # Development tools cargo-watch cargo-edit cargo-audit cargo-deny cargo-outdated cargo-expand # For macro expansion debugging # Additional development tools git # LSP and formatting tools rust-analyzer # For testing .docx files and document creation pandoc # Debugging tools gdb ]; # For better terminal support TERM = "xterm-256color"; # Development shell hook shellHook = '' echo "❄️ Welcome to the doxx Nix development environment!" echo "" echo "📋 Dependencies loaded:" echo " - Rust ${rustToolchain.version} with clippy, rustfmt, rust-src" echo " - ratatui for terminal UI" echo " - crossterm for cross-platform terminal" echo " - arboard for clipboard support" echo " - docx-rs for document parsing" echo "" echo "❄️ Nix commands:" echo " nix build - Build the project" echo " nix run - Run doxx" echo " nix run . -- --help - Run with help flag" echo " nix develop - Enter this dev shell" echo " nix flake check - Run all checks (fmt, clippy, build)" echo "" echo "📄 Usage examples:" echo " nix run . -- document.docx" echo " nix run . -- document.docx --outline" echo " nix run . -- document.docx --search 'keyword'" echo " nix run . -- document.docx --export csv" echo "" echo "🔧 Development commands (if you need them):" echo " cargo build - Direct build (uses Nix env)" echo " cargo watch -x run - Live reload during development" echo " cargo clippy - Run linter" echo " cargo fmt - Format code" echo "" echo "💡 Pro tip: 'nix run github:bgreenwell/doxx -- file.docx' to run from anywhere!" echo "" ''; }; # Apps for easy running apps.default = { type = "app"; program = "${doxx}/bin/doxx"; }; # Checks checks = { build = doxx; # Add format check fmt-check = pkgs.runCommand "fmt-check" { buildInputs = [ rustToolchain ]; } '' cd ${self} cargo fmt --all -- --check touch $out ''; }; }); } doxx-0.1.2/rustfmt.toml 0000644 0000000 0000000 00000000356 10461020230 0013213 0 ustar 0000000 0000000 # Rust formatting configuration for doxx # Using only stable features for compatibility edition = "2021" max_width = 100 hard_tabs = false tab_spaces = 4 newline_style = "Auto" remove_nested_parens = true use_small_heuristics = "Default" doxx-0.1.2/scripts/release.sh 0000755 0000000 0000000 00000005732 10461020230 0014263 0 ustar 0000000 0000000 #!/usr/bin/env bash set -euo pipefail # Release script for doxx # Usage: ./scripts/release.sh [major|minor|patch] SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" PROJECT_DIR="$(dirname "$SCRIPT_DIR")" # Colors for output RED='\033[0;31m' GREEN='\033[0;32m' YELLOW='\033[1;33m' NC='\033[0m' # No Color log() { echo -e "${GREEN}[INFO]${NC} $1" } warn() { echo -e "${YELLOW}[WARN]${NC} $1" } error() { echo -e "${RED}[ERROR]${NC} $1" exit 1 } # Check if we're on main branch current_branch=$(git rev-parse --abbrev-ref HEAD) if [[ "$current_branch" != "main" ]]; then error "Must be on main branch to release. Current branch: $current_branch" fi # Check if working directory is clean if [[ -n $(git status --porcelain) ]]; then error "Working directory must be clean to release" fi # Check if we have the required tools command -v cargo >/dev/null 2>&1 || error "cargo is required" command -v git >/dev/null 2>&1 || error "git is required" # Get current version current_version=$(grep '^version = ' "$PROJECT_DIR/Cargo.toml" | sed 's/version = "\(.*\)"/\1/') log "Current version: $current_version" # Parse version parts IFS='.' read -r -a version_parts <<< "$current_version" major=${version_parts[0]} minor=${version_parts[1]} patch=${version_parts[2]} # Determine bump type bump_type=${1:-patch} case "$bump_type" in major) new_version="$((major + 1)).0.0" ;; minor) new_version="$major.$((minor + 1)).0" ;; patch) new_version="$major.$minor.$((patch + 1))" ;; *) error "Invalid bump type: $bump_type. Use major, minor, or patch" ;; esac log "Bumping $bump_type version: $current_version -> $new_version" # Update version in Cargo.toml sed -i.bak "s/^version = \"$current_version\"/version = \"$new_version\"/" "$PROJECT_DIR/Cargo.toml" rm "$PROJECT_DIR/Cargo.toml.bak" # Run tests to make sure everything still works log "Running tests..." cd "$PROJECT_DIR" cargo test --all-features # Update Cargo.lock log "Updating Cargo.lock..." cargo build --release # Create changelog entry (if CHANGELOG.md exists) if [[ -f "$PROJECT_DIR/CHANGELOG.md" ]]; then log "Please update CHANGELOG.md with release notes for v$new_version" read -p "Press enter when ready to continue..." fi # Commit version bump git add Cargo.toml Cargo.lock git commit -m "chore: bump version to v$new_version" # Create and push tag tag_name="v$new_version" log "Creating tag: $tag_name" git tag -a "$tag_name" -m "Release $tag_name" log "Pushing to origin..." git push origin main git push origin "$tag_name" log "Release $tag_name has been pushed!" log "GitHub Actions will now:" log " 1. Build cross-platform binaries" log " 2. Create GitHub release (draft)" log " 3. Publish to crates.io" log " 4. Update Homebrew formula" log "" log "Next steps:" log " 1. Go to GitHub releases and edit the draft release" log " 2. Add release notes and publish the release" log " 3. Verify package installations work correctly" doxx-0.1.2/src/ansi.rs 0000644 0000000 0000000 00000025761 10461020230 0012710 0 ustar 0000000 0000000 use anyhow::Result; use crossterm::style::{ Attribute, Color as CrosstermColor, ResetColor, SetAttribute, SetForegroundColor, }; use std::fmt::Write; use crate::{document::*, ColorDepth}; pub struct AnsiOptions { pub terminal_width: usize, pub color_depth: ColorDepth, } impl Default for AnsiOptions { fn default() -> Self { Self { terminal_width: std::env::var("COLUMNS") .ok() .and_then(|s| s.parse().ok()) .unwrap_or(80), color_depth: ColorDepth::Auto, } } } pub fn export_to_ansi_with_options(document: &Document, options: &AnsiOptions) -> Result { let mut output = String::new(); // Add document title write_ansi_heading(&mut output, &document.title, 1, options)?; output.push('\n'); // Add metadata writeln!( output, "{}Document Information{}", format_ansi_text("", true, false, false, false, None, options), format_ansi_reset() )?; writeln!(output, "- File: {}", document.metadata.file_path)?; writeln!(output, "- Pages: {}", document.metadata.page_count)?; writeln!(output, "- Words: {}", document.metadata.word_count)?; if let Some(author) = &document.metadata.author { writeln!(output, "- Author: {author}")?; } output.push('\n'); // Separator let separator = "=".repeat(std::cmp::min(50, options.terminal_width)); writeln!(output, "{separator}")?; output.push('\n'); // Convert document content for element in &document.elements { match element { DocumentElement::Heading { level, text, number, } => { let heading_text = if let Some(number) = number { format!("{number} {text}") } else { text.clone() }; write_ansi_heading(&mut output, &heading_text, *level, options)?; output.push('\n'); } DocumentElement::Paragraph { runs } => { if runs.is_empty() || runs.iter().all(|run| run.text.trim().is_empty()) { continue; } write_ansi_paragraph(&mut output, runs, options)?; output.push('\n'); } DocumentElement::List { items, ordered } => { write_ansi_list(&mut output, items, *ordered, options)?; output.push('\n'); } DocumentElement::Table { table } => { write_ansi_table(&mut output, table, options)?; output.push('\n'); } DocumentElement::Image { description, .. } => { writeln!( output, "{}🖼️ [Image: {}]{}", format_ansi_color(Some("#FF00FF"), options), // Magenta description, format_ansi_reset() )?; output.push('\n'); } DocumentElement::Equation { latex, .. } => { writeln!( output, "{}📐 {}{}", format_ansi_color(Some("#00AAFF"), options), // Cyan latex, format_ansi_reset() )?; output.push('\n'); } DocumentElement::PageBreak => { let separator = "─".repeat(std::cmp::min(60, options.terminal_width)); writeln!( output, "{}{}{}", format_ansi_color(Some("#666666"), options), // Dark gray separator, format_ansi_reset() )?; output.push('\n'); } } } Ok(output) } fn write_ansi_heading( output: &mut String, text: &str, level: u8, options: &AnsiOptions, ) -> Result<()> { let color = match level { 1 => Some("#FFFF00"), // Yellow 2 => Some("#00FF00"), // Green _ => Some("#00FFFF"), // Cyan }; let prefix = match level { 1 => "■ ", 2 => " ▶ ", 3 => " ◦ ", _ => " • ", }; let formatted_text = format_ansi_text( &format!("{prefix}{text}"), true, false, false, false, color, options, ); writeln!(output, "{}{}", formatted_text, format_ansi_reset())?; Ok(()) } fn write_ansi_paragraph( output: &mut String, runs: &[FormattedRun], options: &AnsiOptions, ) -> Result<()> { for run in runs { let formatted_text = format_ansi_text( &run.text, run.formatting.bold, run.formatting.italic, run.formatting.underline, run.formatting.strikethrough, run.formatting.color.as_deref(), options, ); write!(output, "{formatted_text}")?; } write!(output, "{}", format_ansi_reset())?; writeln!(output)?; Ok(()) } fn write_ansi_list( output: &mut String, items: &[ListItem], ordered: bool, options: &AnsiOptions, ) -> Result<()> { for (i, item) in items.iter().enumerate() { let bullet = if ordered { format!("{}. ", i + 1) } else { "• ".to_string() }; let indent = " ".repeat(item.level as usize); let bullet_color = format_ansi_color(Some("#0066FF"), options); // Blue write!( output, "{}{}{}{}", bullet_color, indent, bullet, format_ansi_reset() )?; for run in &item.runs { let formatted_text = format_ansi_text( &run.text, run.formatting.bold, run.formatting.italic, run.formatting.underline, run.formatting.strikethrough, run.formatting.color.as_deref(), options, ); write!(output, "{formatted_text}")?; } write!(output, "{}", format_ansi_reset())?; writeln!(output)?; } Ok(()) } fn write_ansi_table(output: &mut String, table: &TableData, options: &AnsiOptions) -> Result<()> { // Add table title if present if let Some(title) = &table.metadata.title { let formatted_title = format_ansi_text( &format!("📊 {title}"), true, false, false, false, Some("#0066FF"), // Blue options, ); writeln!(output, "{}{}", formatted_title, format_ansi_reset())?; output.push('\n'); } // Simple table rendering for ANSI if !table.headers.is_empty() { // Headers write!(output, "│")?; for header in &table.headers { write!( output, " {}{}{} │", format_ansi_text("", true, false, false, false, None, options), header.content, format_ansi_reset() )?; } writeln!(output)?; // Separator write!(output, "├")?; for _ in &table.headers { write!(output, "─────┼")?; } writeln!(output, "┤")?; // Rows for row in &table.rows { write!(output, "│")?; for cell in row { write!(output, " {} │", cell.content)?; } writeln!(output)?; } } Ok(()) } fn format_ansi_text( text: &str, bold: bool, italic: bool, underline: bool, strikethrough: bool, color: Option<&str>, options: &AnsiOptions, ) -> String { let mut result = String::new(); // Apply formatting attributes if bold { result.push_str(&format!("{}", SetAttribute(Attribute::Bold))); } if italic { result.push_str(&format!("{}", SetAttribute(Attribute::Italic))); } if underline { result.push_str(&format!("{}", SetAttribute(Attribute::Underlined))); } if strikethrough { result.push_str(&format!("{}", SetAttribute(Attribute::CrossedOut))); } // Apply color if let Some(color_hex) = color { result.push_str(&format_ansi_color(Some(color_hex), options)); } result.push_str(text); result } fn format_ansi_color(color_hex: Option<&str>, options: &AnsiOptions) -> String { let Some(hex) = color_hex else { return String::new(); }; match convert_hex_to_crossterm_color(hex, &options.color_depth) { Some(color) => format!("{}", SetForegroundColor(color)), None => String::new(), } } fn format_ansi_reset() -> String { format!("{ResetColor}") } fn convert_hex_to_crossterm_color(hex: &str, color_depth: &ColorDepth) -> Option { // Remove # if present and ensure we have 6 characters let hex = hex.trim_start_matches('#'); if hex.len() != 6 { return None; } // Parse RGB components let r = u8::from_str_radix(&hex[0..2], 16).ok()?; let g = u8::from_str_radix(&hex[2..4], 16).ok()?; let b = u8::from_str_radix(&hex[4..6], 16).ok()?; match color_depth { ColorDepth::Monochrome => None, ColorDepth::Standard => { // Convert to 16 colors (approximation) let color_index = rgb_to_ansi_16(r, g, b); Some(CrosstermColor::AnsiValue(color_index)) } ColorDepth::Extended => { // Convert to 256 colors let color_index = rgb_to_ansi_256(r, g, b); Some(CrosstermColor::AnsiValue(color_index)) } ColorDepth::TrueColor | ColorDepth::Auto => { // Use full RGB Some(CrosstermColor::Rgb { r, g, b }) } } } fn rgb_to_ansi_16(r: u8, g: u8, b: u8) -> u8 { // Simple mapping to 16 colors let r_bright = r > 127; let g_bright = g > 127; let b_bright = b > 127; let base = match (r > 64, g > 64, b > 64) { (false, false, false) => 0, // Black (false, false, true) => 4, // Blue (false, true, false) => 2, // Green (false, true, true) => 6, // Cyan (true, false, false) => 1, // Red (true, false, true) => 5, // Magenta (true, true, false) => 3, // Yellow (true, true, true) => 7, // White }; // Add 8 for bright colors if any component is very bright if r_bright || g_bright || b_bright { base + 8 } else { base } } fn rgb_to_ansi_256(r: u8, g: u8, b: u8) -> u8 { // 256-color conversion if r == g && g == b { // Grayscale if r < 8 { 16 } else if r > 247 { 231 } else { 232 + (r - 8) / 10 } } else { // Color cube: 16 + 36*r + 6*g + b let r_index = (r as f32 / 255.0 * 5.0) as u8; let g_index = (g as f32 / 255.0 * 5.0) as u8; let b_index = (b as f32 / 255.0 * 5.0) as u8; 16 + 36 * r_index + 6 * g_index + b_index } } doxx-0.1.2/src/document.rs 0000644 0000000 0000000 00000272202 10461020230 0013566 0 ustar 0000000 0000000 use anyhow::{bail, Result}; use once_cell::sync::Lazy; use regex::Regex; use serde::{Deserialize, Serialize}; use std::fs::File; use std::path::Path; use zip::ZipArchive; type TableRows = Vec>; type NumberingInfo = (i32, u8); type HeadingNumberInfo = (String, String); /// Image rendering options #[derive(Debug, Clone, Default)] pub struct ImageOptions { pub enabled: bool, pub max_width: Option, pub max_height: Option, pub scale: Option, } #[derive(Debug, Clone, Serialize, Deserialize)] pub struct Document { pub title: String, pub metadata: DocumentMetadata, pub elements: Vec, #[serde(skip)] pub image_options: ImageOptions, } #[derive(Debug, Clone, Serialize, Deserialize)] pub struct DocumentMetadata { pub file_path: String, pub file_size: u64, pub word_count: usize, pub page_count: usize, pub created: Option, pub modified: Option, pub author: Option, } #[derive(Debug, Clone, Serialize, Deserialize)] pub enum DocumentElement { Heading { level: u8, text: String, number: Option, }, Paragraph { runs: Vec, }, List { items: Vec, ordered: bool, }, Table { table: TableData, }, Image { description: String, width: Option, height: Option, relationship_id: Option, // Link to DOCX relationship for image extraction image_path: Option, // Path to extracted image file }, Equation { latex: String, fallback: String, }, PageBreak, } #[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq)] pub struct TextFormatting { pub bold: bool, pub italic: bool, pub underline: bool, pub strikethrough: bool, pub font_size: Option, pub color: Option, } #[derive(Debug, Clone, Serialize, Deserialize)] pub struct FormattedRun { pub text: String, pub formatting: TextFormatting, } impl FormattedRun { /// Consolidate adjacent runs with identical formatting into single runs pub fn consolidate_runs(runs: Vec) -> Vec { if runs.is_empty() { return runs; } let mut consolidated = Vec::new(); let mut current_run = runs[0].clone(); for run in runs.into_iter().skip(1) { if current_run.formatting == run.formatting { // Same formatting - merge the text current_run.text.push_str(&run.text); } else { // Different formatting - push current and start new consolidated.push(current_run); current_run = run; } } // last run consolidated.push(current_run); consolidated } } #[derive(Debug, Clone, Serialize, Deserialize)] pub struct ListItem { pub runs: Vec, pub level: u8, } #[derive(Debug, Clone, Serialize, Deserialize)] pub struct TableData { pub headers: Vec, pub rows: Vec>, pub metadata: TableMetadata, } #[derive(Debug, Clone, Serialize, Deserialize)] pub struct TableCell { pub content: String, pub alignment: TextAlignment, pub formatting: TextFormatting, pub data_type: CellDataType, } #[derive(Debug, Clone, Serialize, Deserialize)] pub struct TableMetadata { pub column_count: usize, pub row_count: usize, pub has_headers: bool, pub column_widths: Vec, pub column_alignments: Vec, pub title: Option, } #[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Default)] pub enum TextAlignment { #[default] Left, Center, Right, Justify, } #[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Default)] pub enum CellDataType { #[default] Text, Number, Currency, Percentage, Date, Boolean, Empty, } #[derive(Debug, Clone)] pub struct SearchResult { pub element_index: usize, pub text: String, #[allow(dead_code)] pub start_pos: usize, #[allow(dead_code)] pub end_pos: usize, } /// Validates that the file is a legitimate .docx file fn validate_docx_file(file_path: &Path) -> Result<()> { // Check file extension let extension = file_path .extension() .and_then(|ext| ext.to_str()) .unwrap_or(""); if extension != "docx" { bail!( "Invalid file format. Expected .docx file, got .{}\n\ Note: doxx only supports Word .docx files (not .doc, .xlsx, .zip, etc.)", extension ); } // Check ZIP structure contains word/document.xml let file = File::open(file_path)?; let mut archive = ZipArchive::new(file)?; if archive.by_name("word/document.xml").is_err() { // Check if it might be an Excel file if archive.by_name("xl/workbook.xml").is_ok() { bail!( "This appears to be an Excel file (.xlsx).\n\ doxx only supports Word documents (.docx)." ); } bail!( "Invalid .docx file: missing word/document.xml\n\ This file may be corrupted or is not a valid Word document." ); } Ok(()) } /// Merge display equations into the element list at their correct paragraph positions /// /// This function handles the fact that docx-rs doesn't parse paragraphs containing only equations. /// We need to track paragraph indices from the XML and insert equations at the right positions. fn merge_display_equations( elements: Vec, display_equations_by_para: std::collections::HashMap>, ) -> Vec { if display_equations_by_para.is_empty() { return elements; } // Get all paragraph indices with equations, sorted let mut eq_para_indices: Vec = display_equations_by_para.keys().copied().collect(); eq_para_indices.sort_unstable(); // Build a new element list with equations inserted at correct positions let mut result = Vec::new(); let mut element_para_index = 0; for element in elements { // Increment paragraph counter for elements that correspond to paragraphs match &element { DocumentElement::Paragraph { .. } | DocumentElement::Heading { .. } | DocumentElement::List { .. } => { element_para_index += 1; // Insert any display equations that come before this element while let Some(&eq_idx) = eq_para_indices.first() { if eq_idx < element_para_index { if let Some(eqs) = display_equations_by_para.get(&eq_idx) { result.extend(eqs.clone()); } eq_para_indices.remove(0); } else { break; } } } _ => {} } result.push(element); } // Add any remaining equations at the end for eq_idx in eq_para_indices { if let Some(eqs) = display_equations_by_para.get(&eq_idx) { result.extend(eqs.clone()); } } result } pub async fn load_document(file_path: &Path, image_options: ImageOptions) -> Result { // Validate file type before attempting to parse validate_docx_file(file_path)?; let file_size = std::fs::metadata(file_path)?.len(); // For now, create a simple implementation that reads the docx file // This is a simplified version to get the project compiling let file_data = std::fs::read(file_path)?; let docx = docx_rs::read_docx(&file_data)?; let title = file_path .file_stem() .and_then(|s| s.to_str()) .unwrap_or("Untitled Document") .to_string(); let mut elements = Vec::new(); let mut word_count = 0; let mut numbering_manager = DocumentNumberingManager::new(); let mut heading_tracker = HeadingNumberTracker::new(); // Analyze document structure to determine if auto-numbering should be enabled let should_auto_number = analyze_heading_structure(&docx.document); if should_auto_number { heading_tracker.enable_auto_numbering(); } // Extract images if enabled let image_extractor = if image_options.enabled { let mut extractor = crate::image_extractor::ImageExtractor::new()?; extractor.extract_images_from_docx(file_path)?; Some(extractor) } else { None }; // Enhanced content extraction with style information for child in &docx.document.children { match child { docx_rs::DocumentChild::Paragraph(para) => { // Check for heading with potential numbering first let heading_info = detect_heading_with_numbering(para); // Check for list numbering properties (Word's automatic lists) let list_info = detect_list_from_paragraph_numbering(para); // Check for images in this paragraph first for child in ¶.children { if let docx_rs::ParagraphChild::Run(run) = child { for run_child in &run.children { if let docx_rs::RunChild::Drawing(_drawing) = run_child { // Create an Image element with consistent ordering if let Some(ref extractor) = image_extractor { let images = extractor.get_extracted_images_sorted(); if !images.is_empty() { // Count images processed so far to maintain document order let image_count = elements .iter() .filter(|e| matches!(e, DocumentElement::Image { .. })) .count(); // Only create Image element if we have an actual image file available if image_count < images.len() { let (_, image_path) = &images[image_count]; elements.push(DocumentElement::Image { description: format!("Image {}", image_count + 1), width: None, height: None, relationship_id: None, image_path: Some(image_path.clone()), }); } } } } } } } // Extract runs with individual formatting let mut formatted_runs = Vec::new(); for child in ¶.children { if let docx_rs::ParagraphChild::Run(run) = child { let run_formatting = extract_run_formatting(run); let mut run_text = String::new(); for child in &run.children { if let docx_rs::RunChild::Text(text_elem) = child { run_text.push_str(&text_elem.text); } } if !run_text.is_empty() { formatted_runs.push(FormattedRun { text: run_text, formatting: run_formatting, }); } } } // Calculate total text for word count and processing let total_text: String = formatted_runs.iter().map(|run| run.text.as_str()).collect(); if !total_text.trim().is_empty() { word_count += total_text.split_whitespace().count(); // Priority: list numbering > heading style > text heuristics if let Some(list_info) = list_info { // This is an automatic Word list item - format with proper indentation let indent = " ".repeat(list_info.level as usize); let prefix = if list_info.is_ordered { // Use the numbering manager for proper sequential numbering if let Some(num_id) = list_info.num_id { let format = get_numbering_format(num_id, list_info.level); numbering_manager.generate_number(num_id, list_info.level, format) } else { // Fallback for missing numId format!("{}. ", list_info.level + 1) } } else { "* ".to_string() // Bullets for unordered }; // For list items, preserve individual run formatting by adding prefix to first run // This maintains formatting fidelity while supporting Word automatic numbering if !formatted_runs.is_empty() { // Add the list prefix to the first run let prefix_text = format!("__WORD_LIST__{indent}{prefix}"); let mut updated_runs = formatted_runs; updated_runs[0].text = format!("{prefix_text}{}", updated_runs[0].text.trim()); elements.push(DocumentElement::Paragraph { runs: updated_runs }); } else { // Fallback for empty runs let list_text = format!("__WORD_LIST__{indent}{prefix}"); elements.push(DocumentElement::Paragraph { runs: vec![FormattedRun { text: list_text, formatting: TextFormatting::default(), }], }); } } else { // Check for headings (with or without numbering) if let Some(heading_info) = heading_info { let heading_text = heading_info.clean_text.unwrap_or(total_text.clone()); let number = if heading_info.number.is_some() { heading_info.number } else { // Generate automatic numbering if enabled for this document let auto_number = heading_tracker.get_number(heading_info.level); if auto_number.is_empty() { None } else { Some(auto_number) } }; elements.push(DocumentElement::Heading { level: heading_info.level, text: heading_text, number, }); } else { // Fallback to text-based heading detection using first run's formatting let first_formatting = if !formatted_runs.is_empty() { &formatted_runs[0].formatting } else { &TextFormatting::default() }; let level = detect_heading_from_text(&total_text, first_formatting); if let Some(level) = level { elements.push(DocumentElement::Heading { level, text: total_text, number: None, }); } else { // This is a regular paragraph - consolidate runs and preserve formatting let consolidated_runs = FormattedRun::consolidate_runs(formatted_runs); elements.push(DocumentElement::Paragraph { runs: consolidated_runs, }); } } } } } docx_rs::DocumentChild::Table(table) => { // Extract table data if let Some(table_element) = extract_table_data(table) { elements.push(table_element); } } _ => { // Handle other document elements (images, etc.) in future } } } // Extract inline equations with their positions let inline_paragraphs = extract_inline_equation_positions(file_path).unwrap_or_default(); // Extract all equations (both inline and display) let equation_infos = extract_equations_from_docx(file_path).unwrap_or_default(); // Create a map of paragraph index -> display equations let mut display_equations_by_para: std::collections::HashMap> = std::collections::HashMap::new(); for eq in equation_infos.iter() { if !eq.is_inline { display_equations_by_para .entry(eq.paragraph_index) .or_default() .push(DocumentElement::Equation { latex: eq.latex.clone(), fallback: eq.fallback.clone(), }); } } // Integrate inline equations into paragraphs and insert display equations at correct positions let mut elements_with_equations = Vec::new(); let mut para_index = 0; for element in elements { match element { DocumentElement::Paragraph { runs } => { para_index += 1; // Check if this paragraph has inline equations if let Some(content_items) = inline_paragraphs.get(¶_index) { // Check if there are actually any inline equations in this paragraph let has_actual_equations = content_items .iter() .any(|item| matches!(item, ParagraphContent::InlineEquation { .. })); if has_actual_equations { // Reconstruct paragraph with inline equations in correct positions let mut new_runs = Vec::new(); let mut accumulated_text = String::new(); for content in content_items { match content { ParagraphContent::Text(text) => { accumulated_text.push_str(text); } ParagraphContent::InlineEquation { latex, fallback: _ } => { // Flush accumulated text before equation if !accumulated_text.is_empty() { new_runs.push(FormattedRun { text: accumulated_text.clone(), formatting: TextFormatting::default(), }); accumulated_text.clear(); } // Add inline equation with $ delimiters new_runs.push(FormattedRun { text: format!("${latex}$"), formatting: TextFormatting::default(), }); } } } // Flush any remaining text if !accumulated_text.is_empty() { new_runs.push(FormattedRun { text: accumulated_text, formatting: TextFormatting::default(), }); } elements_with_equations.push(DocumentElement::Paragraph { runs: new_runs }); } else { // No actual equations, preserve original runs with formatting elements_with_equations.push(DocumentElement::Paragraph { runs }); } } else { // Check if this paragraph is actually a display equation if let Some(display_eqs) = display_equations_by_para.get(¶_index) { // This paragraph contains display equation(s) for eq in display_eqs { elements_with_equations.push(eq.clone()); } } else { // Regular paragraph without equations elements_with_equations.push(DocumentElement::Paragraph { runs }); } } } _ => { elements_with_equations.push(element); } } } // Post-process to group consecutive list items (only for text-based lists) // Word numbering-based lists are already properly formatted let elements = group_list_items(elements_with_equations); // Clean up Word list markers let elements = clean_word_list_markers(elements); // Merge display equations into the final element list at correct positions let elements = merge_display_equations(elements, display_equations_by_para); let metadata = DocumentMetadata { file_path: file_path.to_string_lossy().to_string(), file_size, word_count, page_count: estimate_page_count(word_count), created: None, // Simplified for now modified: None, author: None, }; Ok(Document { title, metadata, elements, image_options, }) } fn detect_heading_from_paragraph_style(para: &docx_rs::Paragraph) -> Option { // Try to access paragraph properties and style if let Some(style) = ¶.property.style { // Check for heading styles (Heading1, Heading2, etc.) if style.val.starts_with("Heading") || style.val.starts_with("heading") { if let Some(level_char) = style.val.chars().last() { if let Some(level) = level_char.to_digit(10) { return Some(level.min(6) as u8); } } // Default to level 1 for unspecified heading styles return Some(1); } } None } #[derive(Debug, Clone)] struct ListInfo { level: u8, is_ordered: bool, num_id: Option, // Word's numbering definition ID } /// Type alias for numbering counters to simplify complex HashMap type type NumberingCounters = std::collections::HashMap<(i32, u8), u32>; /// Manages document-wide numbering state for proper sequential numbering #[derive(Debug)] struct DocumentNumberingManager { /// Counters for each (numId, level) combination /// Key: (numId, level), Value: current counter counters: NumberingCounters, } impl DocumentNumberingManager { fn new() -> Self { Self { counters: NumberingCounters::new(), } } /// Generate the next number for a given numId and level fn generate_number(&mut self, num_id: i32, level: u8, format: NumberingFormat) -> String { // Get current counter for this (numId, level) combination let key = (num_id, level); let counter_value = { let counter = self.counters.entry(key).or_insert(0); *counter += 1; *counter }; // Reset deeper levels when we increment a higher level // This handles hierarchical numbering like 1. -> 1.1 -> 2. (reset 1.1 back to 2.1) self.reset_deeper_levels(num_id, level); // For hierarchical numbering, we need to build the full number string self.format_hierarchical_number(num_id, level, counter_value, format) } fn reset_deeper_levels(&mut self, num_id: i32, current_level: u8) { // Reset all levels deeper than current_level for this numId let keys_to_reset: Vec<_> = self .counters .keys() .filter(|(id, level)| *id == num_id && *level > current_level) .cloned() .collect(); for key in keys_to_reset { self.counters.remove(&key); } } fn format_number(&self, counter: u32, format: NumberingFormat) -> String { match format { NumberingFormat::Decimal => format!("{counter}. "), NumberingFormat::LowerLetter => { // Convert 1->a, 2->b, etc. if counter <= 26 { let letter = (b'a' + (counter - 1) as u8) as char; format!("{letter}. ") } else { format!("{counter}. ") // Fallback for > 26 } } NumberingFormat::LowerRoman => format!("{}. ", Self::to_roman(counter).to_lowercase()), NumberingFormat::UpperLetter => { // Convert 1->A, 2->B, etc. if counter <= 26 { let letter = (b'A' + (counter - 1) as u8) as char; format!("{letter}. ") } else { format!("{counter}. ") // Fallback for > 26 } } NumberingFormat::UpperRoman => format!("{}. ", Self::to_roman(counter)), NumberingFormat::ParenLowerLetter => { if counter <= 26 { let letter = (b'a' + (counter - 1) as u8) as char; format!("({letter})") } else { format!("({counter})") } } NumberingFormat::ParenLowerRoman => { format!("({})", Self::to_roman(counter).to_lowercase()) } NumberingFormat::Bullet => "* ".to_string(), } } fn to_roman(num: u32) -> String { let values = [1000, 900, 500, 400, 100, 90, 50, 40, 10, 9, 5, 4, 1]; let symbols = [ "M", "CM", "D", "CD", "C", "XC", "L", "XL", "X", "IX", "V", "IV", "I", ]; let mut result = String::new(); let mut n = num; for (i, &value) in values.iter().enumerate() { while n >= value { result.push_str(symbols[i]); n -= value; } } result } /// Format hierarchical number (e.g., "2.1", "3.2.1") fn format_hierarchical_number( &self, num_id: i32, level: u8, counter: u32, format: NumberingFormat, ) -> String { // Check if this numId/level combination should use hierarchical numbering let needs_hierarchy = matches!((num_id, level), (4, 1)); // 2.1, 2.2, etc. if needs_hierarchy { // Build hierarchical number by including parent level counters let mut parts = Vec::new(); // Add parent level counter (level 0 for this numId) if let Some(parent_counter) = self.counters.get(&(num_id, 0)) { parts.push(parent_counter.to_string()); } // Add current level counter parts.push(counter.to_string()); // Join with dots and add final punctuation format!("{}. ", parts.join(".")) } else { // Use regular formatting for non-hierarchical levels self.format_number(counter, format) } } } /// Different numbering formats supported by Word #[derive(Debug, Clone, Copy)] enum NumberingFormat { Decimal, // 1. 2. 3. LowerLetter, // a. b. c. UpperLetter, // A. B. C. LowerRoman, // i. ii. iii. UpperRoman, // I. II. III. ParenLowerLetter, // (a) (b) (c) ParenLowerRoman, // (i) (ii) (iii) #[allow(dead_code)] Bullet, // * * * } #[derive(Debug, Clone)] struct HeadingInfo { level: u8, number: Option, clean_text: Option, // Text with number removed } fn detect_list_from_paragraph_numbering(para: &docx_rs::Paragraph) -> Option { // Check if paragraph has numbering properties if let Some(num_pr) = ¶.property.numbering_property { // Extract numbering level (default to 0 if not specified) let level = num_pr.level.as_ref().map(|l| l.val as u8).unwrap_or(0); // Extract numId for state tracking let num_id = num_pr.id.as_ref().map(|id| id.id as i32); // Enhanced detection for mixed list types (same numId, different levels) let is_ordered = if let Some(num_id_val) = num_id { match (num_id_val, level) { // For Word's default mixed list (numId 1): // Level 0 = decimal numbers (1. 2. 3.) // Level 1 = letters (a) b) c)) // Level 2 = roman numerals (i. ii. iii.) (1, 0) => true, // Top level: decimal numbers (was false, causing bug) (1, 1) => true, // Second level: letters (1, 2) => true, // Third level: roman numerals (1, _) => level % 2 == 1, // Pattern for deeper levels (_, _) => true, // Other numIds are typically ordered } } else { false }; return Some(ListInfo { level, is_ordered, num_id, }); } None } /// Determine the numbering format based on Word's numId and level fn get_numbering_format(num_id: i32, level: u8) -> NumberingFormat { match (num_id, level) { // numId=4: Main multilevel list (from advanced-numbering-2.docx) (4, 0) => NumberingFormat::Decimal, // 1., 2., 3. (4, 1) => NumberingFormat::Decimal, // 2.1., 2.2., 2.3. (hierarchical) (4, 2) => NumberingFormat::LowerRoman, // i., ii., iii. // numId=5: Secondary list (a), (b), (c) from same document (5, 2) => NumberingFormat::ParenLowerLetter, // (a), (b), (c) // numId=2: From other test documents (2, 0) => NumberingFormat::Decimal, // 1., 2., 3. (2, 3) => NumberingFormat::ParenLowerRoman, // (i), (ii), (iii) // numId=1: Default Word numbering scheme (1, 0) => NumberingFormat::Decimal, // 1. 2. 3. (1, 1) => NumberingFormat::LowerLetter, // a. b. c. (1, 2) => NumberingFormat::LowerRoman, // i. ii. iii. (1, 3) => NumberingFormat::ParenLowerLetter, // (a) (b) (c) (1, 4) => NumberingFormat::ParenLowerRoman, // (i) (ii) (iii) // Fallback defaults based on level (_, 0) => NumberingFormat::Decimal, (_, 1) => NumberingFormat::LowerLetter, (_, 2) => NumberingFormat::LowerRoman, (_, 3) => NumberingFormat::UpperLetter, (_, 4) => NumberingFormat::UpperRoman, _ => NumberingFormat::Decimal, } } fn detect_heading_with_numbering(para: &docx_rs::Paragraph) -> Option { // First check if this is a heading style let heading_level = detect_heading_from_paragraph_style(para)?; // Extract text using docx-rs proper text extraction let text = extract_paragraph_text(para); // Priority order for numbering detection: // 1. Manual numbering in text content (highest priority - user explicitly typed) // 2. Word's automatic numbering (w:numPr) - explicit numbering properties // 3. Style-based automatic generation (lowest priority - our inference) // First, check for manual numbering in text content if let Some((number, remaining_text)) = extract_heading_number_from_text(&text) { return Some(HeadingInfo { level: heading_level, number: Some(number), clean_text: Some(remaining_text), }); } // Second, check for Word's automatic numbering if let Some(num_pr) = ¶.property.numbering_property { // This is automatic Word numbering - try to reconstruct if let Some((num_id, level)) = extract_numbering_info(num_pr) { let number = reconstruct_heading_number(num_id, level, heading_level); return Some(HeadingInfo { level: heading_level, number: Some(number), clean_text: Some(text), // Keep original text since number is automatic }); } } // If no numbering found, return heading info without number Some(HeadingInfo { level: heading_level, number: None, clean_text: None, }) } /// Extract text from paragraph using docx-rs properly fn extract_paragraph_text(para: &docx_rs::Paragraph) -> String { let mut text = String::new(); for child in ¶.children { match child { docx_rs::ParagraphChild::Run(run) => { text.push_str(&extract_run_text(run)); } docx_rs::ParagraphChild::Insert(insert) => { // Handle insertions (track changes) - simplified approach // Since InsertChild might be different from Run, we'll extract text differently // This is a placeholder - in practice we'd need to handle the specific types for child in &insert.children { if let docx_rs::InsertChild::Run(run) = child { text.push_str(&extract_run_text(run)); } } } docx_rs::ParagraphChild::Delete(_) => { // Skip deletions (track changes) } _ => { // Handle other paragraph children if needed } } } text.trim().to_string() } /// Extract text from a run using docx-rs features fn extract_run_text(run: &docx_rs::Run) -> String { let mut text = String::new(); for child in &run.children { match child { docx_rs::RunChild::Text(text_elem) => { text.push_str(&text_elem.text); } docx_rs::RunChild::Tab(_) => { text.push('\t'); } docx_rs::RunChild::Break(_) => { // Break types are private, so we'll just add a line break text.push('\n'); } docx_rs::RunChild::Drawing(_) => { text.push_str("[Image]"); } _ => { // Handle other run children } } } text } /// Extract numbering information from docx-rs numbering properties fn extract_numbering_info(num_pr: &docx_rs::NumberingProperty) -> Option { let num_id = num_pr.id.as_ref()?.id as i32; let level = num_pr.level.as_ref().map(|l| l.val as u8).unwrap_or(0); Some((num_id, level)) } /// Reconstruct heading number from Word's numbering system fn reconstruct_heading_number(num_id: i32, level: u8, heading_level: u8) -> String { // This is a simplified reconstruction // In a full implementation, we'd need to access the numbering definitions // and track the current state across the document match (num_id, level, heading_level) { // Standard heading numbering schemes (_, 0, 1) => "1".to_string(), (_, 1, 2) => "1.1".to_string(), (_, 2, 3) => "1.1.1".to_string(), (_, 3, 4) => "1.1.1.1".to_string(), _ => { // Fallback based on heading level match heading_level { 1 => "1".to_string(), 2 => "1.1".to_string(), 3 => "1.1.1".to_string(), _ => "1.1.1.1".to_string(), } } } } #[derive(Debug)] struct HeadingNumberTracker { counters: [u32; 6], // Support up to 6 heading levels auto_numbering_enabled: bool, } impl HeadingNumberTracker { fn new() -> Self { Self { counters: [0; 6], auto_numbering_enabled: false, } } fn enable_auto_numbering(&mut self) { self.auto_numbering_enabled = true; } fn get_number(&mut self, level: u8) -> String { if !self.auto_numbering_enabled { return String::new(); } let level_index = (level.saturating_sub(1) as usize).min(5); // Increment current level self.counters[level_index] += 1; // Reset all deeper levels for i in (level_index + 1)..6 { self.counters[i] = 0; } // Build number string (1.2.3 format) let mut parts = Vec::new(); for i in 0..=level_index { if self.counters[i] > 0 { parts.push(self.counters[i].to_string()); } } parts.join(".") } } /// Analyze document structure to determine if automatic numbering should be enabled fn analyze_heading_structure(document: &docx_rs::Document) -> bool { let mut heading_count = 0; let mut has_explicit_numbering = false; let mut level_counts = [0u32; 6]; // Count headings at each level for child in &document.children { if let docx_rs::DocumentChild::Paragraph(para) = child { if let Some(heading_level) = detect_heading_from_paragraph_style(para) { let text = extract_paragraph_text(para); // Check if this heading has explicit numbering in the text if extract_heading_number_from_text(&text).is_some() { has_explicit_numbering = true; } heading_count += 1; let level_index = (heading_level.saturating_sub(1) as usize).min(5); level_counts[level_index] += 1; } } } // Don't auto-number if: // 1. Any headings have explicit numbering // 2. Very few headings (less than 3) // 3. Only one level of headings (no hierarchy) if has_explicit_numbering || heading_count < 3 { return false; } // Check if we have a real hierarchy (headings at multiple levels) let levels_with_headings = level_counts.iter().filter(|&&count| count > 0).count(); // Auto-number if we have multiple levels or multiple headings at level 1 levels_with_headings > 1 || level_counts[0] > 1 } // Lazy static regex patterns for heading number detection // Focused on common patterns for manual numbering in text static HEADING_NUMBER_PATTERNS: Lazy> = Lazy::new(|| { vec![ // Standard decimal numbering: "1.", "1.1", "1.1.1", "2.1.1" (most common) // For single numbers, require a period to distinguish from "Heading 1" style titles // For hierarchical numbers (1.1, 1.2.3), period is optional Regex::new(r"^(\d+(?:\.\d+)+\.?|\d+\.)\s+(.+)$").unwrap(), // Section numbering: "Section 1.2", "Chapter 3" Regex::new(r"^((?:Section|Chapter|Part)\s+\d+(?:\.\d+)*\.?)\s+(.+)$").unwrap(), // Alternative numbering schemes (less common, but still useful) Regex::new(r"^([A-Z]\.)\s+(.+)$").unwrap(), // "A. Introduction" Regex::new(r"^([IVX]+\.)\s+(.+)$").unwrap(), // "I. Overview" ] }); fn extract_heading_number_from_text(text: &str) -> Option { let text = text.trim(); // Early return for empty text if text.is_empty() { return None; } // Try each pattern until one matches for pattern in HEADING_NUMBER_PATTERNS.iter() { if let Some(captures) = pattern.captures(text) { if let (Some(number_match), Some(text_match)) = (captures.get(1), captures.get(2)) { let number = number_match.as_str().trim_end_matches('.'); let remaining_text = text_match.as_str().trim(); // Only return if we have both number and meaningful text if !number.is_empty() && !remaining_text.is_empty() { return Some((number.to_string(), remaining_text.to_string())); } } } } None } #[cfg(test)] mod tests { use super::*; #[test] fn test_heading_number_extraction() { // Test most common formats (decimal hierarchical) assert_eq!( extract_heading_number_from_text("1. Introduction"), Some(("1".to_string(), "Introduction".to_string())) ); assert_eq!( extract_heading_number_from_text("1.1 Project Overview"), Some(("1.1".to_string(), "Project Overview".to_string())) ); assert_eq!( extract_heading_number_from_text("2.1.1 Something Important"), Some(("2.1.1".to_string(), "Something Important".to_string())) ); // Test alternative numbering schemes assert_eq!( extract_heading_number_from_text("A. First Section"), Some(("A".to_string(), "First Section".to_string())) ); assert_eq!( extract_heading_number_from_text("I. Roman Numeral"), Some(("I".to_string(), "Roman Numeral".to_string())) ); // Test section numbering assert_eq!( extract_heading_number_from_text("Section 1.2 Overview"), Some(("Section 1.2".to_string(), "Overview".to_string())) ); // Test no numbering (should fall back to automatic generation) assert_eq!(extract_heading_number_from_text("Introduction"), None); // Test titles with numbers that should NOT be treated as numbered headings assert_eq!(extract_heading_number_from_text("Heading 1"), None); // Note: "Chapter 5 Summary" will match the section pattern, which is intentional // The section pattern is designed to match "Chapter 5 Something" formats assert_eq!( extract_heading_number_from_text("Chapter 5 Summary"), Some(("Chapter 5".to_string(), "Summary".to_string())) ); assert_eq!(extract_heading_number_from_text("Version 2"), None); } } fn extract_run_formatting(run: &docx_rs::Run) -> TextFormatting { let mut formatting = TextFormatting::default(); // Access run properties directly (they're not optional in current API) let props = &run.run_property; formatting.bold = props.bold.is_some(); formatting.italic = props.italic.is_some(); formatting.underline = props.underline.is_some(); formatting.strikethrough = props.strike.is_some() || props.dstrike.is_some(); // Extract color information if let Some(color) = &props.color { // Extract color value through debug formatting as a workaround for private field access let color_debug = format!("{color:?}"); if let Some(start) = color_debug.find("val: \"") { // Safe: searching for ASCII strings in debug output let search_from = start + 6; // length of "val: \"" if let Some(end) = color_debug[search_from..].find("\"") { let color_val = &color_debug[search_from..search_from + end]; formatting.color = Some(color_val.to_string()); } } } // For now, skip font size extraction due to API complexity // TODO: Add font size extraction when we understand the API better formatting } fn detect_heading_from_text(text: &str, formatting: &TextFormatting) -> Option { let text = text.trim(); // Be much more conservative and selective if text.len() < 100 && !text.contains('\n') { // Exclude common non-heading patterns first if is_likely_list_item(text) || is_likely_sentence(text) { return None; } // Exclude patterns that are clearly not headings if text.starts_with("⏺") || text.starts_with("⎿") || text.starts_with("☐") || text.starts_with("☒") { return None; } // Exclude if it contains typical sentence patterns if text.contains(" the ") || text.contains(" and ") || text.contains(" with ") || text.contains(" for ") { return None; } // Strong indicators of headings if formatting.bold && text.len() < 60 && text.len() > 5 { // Bold text that's reasonably short is likely a heading if !text.ends_with('.') && !text.ends_with(',') && !text.ends_with(';') && !text.ends_with(':') { return Some(determine_heading_level_from_text(text)); } } // Check if it's all caps (but not just a short word) if text.len() > 15 && text.len() < 50 && text.chars().all(|c| { c.is_uppercase() || c.is_whitespace() || c.is_numeric() || c.is_ascii_punctuation() }) { return Some(1); } // Very specific patterns that indicate headings if text.starts_with("Chapter ") || text.starts_with("Section ") || text.starts_with("Part ") { return Some(determine_heading_level_from_text(text)); } // Look for standalone phrases that could be headings (very conservative) if text.len() < 40 && text.len() > 10 && !text.ends_with('.') && !text.contains(',') && !text.contains('(') && !text.contains(':') { // Check if it has heading-like characteristics let words = text.split_whitespace().count(); if (2..=5).contains(&words) { // Must contain at least one meaningful word (longer than 3 chars) let has_meaningful_word = text .split_whitespace() .any(|word| word.len() > 3 && word.chars().all(|c| c.is_alphabetic())); if has_meaningful_word && text.chars().next().is_some_and(|c| c.is_uppercase()) { return Some(determine_heading_level_from_text(text)); } } } } None } fn determine_heading_level_from_text(text: &str) -> u8 { // Simple heuristic: shorter text = higher level (lower number) if text.len() < 20 { 1 } else if text.len() < 40 { 2 } else { 3 } } fn is_likely_list_item(text: &str) -> bool { let text = text.trim(); // Skip Word-formatted list items to avoid reprocessing if text.starts_with("__WORD_LIST__") { return false; } // Check for numbered list patterns that are NOT headings if text.starts_with(char::is_numeric) { // If it starts with a number followed by "." and then has substantial content, // it's likely a list item, not a heading if let Some(dot_pos) = text.find('.') { // Safe: '.' is ASCII, so dot_pos+1 is guaranteed to be a char boundary let after_dot = &text[dot_pos + 1..].trim(); // If there's substantial content after the number and dot, it's likely a list item if after_dot.len() > 20 { return true; } } } // Check for bullet point patterns if text.starts_with("• ") || text.starts_with("- ") || text.starts_with("* ") { return true; } // Check for lettered lists if text.len() > 3 && text.chars().nth(1) == Some('.') { let first_char = text.chars().next().unwrap(); if first_char.is_ascii_lowercase() || first_char.is_ascii_uppercase() { return true; } } false } fn group_list_items(elements: Vec) -> Vec { let mut result = Vec::new(); let mut current_list_items = Vec::new(); let mut current_list_ordered = false; for element in elements { match &element { DocumentElement::Paragraph { runs } => { // Get the combined text from all runs for list detection let text: String = runs.iter().map(|run| run.text.as_str()).collect(); if is_likely_list_item(&text) { // Determine if this is an ordered list item let is_ordered = text.trim().starts_with(char::is_numeric); // If we're starting a new list or switching list types, finish the current list if !current_list_items.is_empty() && is_ordered != current_list_ordered { result.push(DocumentElement::List { items: std::mem::take(&mut current_list_items), ordered: current_list_ordered, }); } current_list_ordered = is_ordered; // Calculate nesting level from indentation let level = calculate_list_level(&text); // Clean the runs (remove bullet/number prefix from first run) let clean_runs = clean_list_item_runs(runs.clone()); current_list_items.push(ListItem { runs: clean_runs, level, }); } else { // Not a list item, so finish any current list if !current_list_items.is_empty() { result.push(DocumentElement::List { items: std::mem::take(&mut current_list_items), ordered: current_list_ordered, }); } result.push(element); } } _ => { // Non-paragraph element, finish any current list if !current_list_items.is_empty() { result.push(DocumentElement::List { items: std::mem::take(&mut current_list_items), ordered: current_list_ordered, }); } result.push(element); } } } // Don't forget the last list if the document ends with one if !current_list_items.is_empty() { result.push(DocumentElement::List { items: current_list_items, ordered: current_list_ordered, }); } result } fn calculate_list_level(text: &str) -> u8 { // Count leading whitespace to determine nesting level let leading_spaces = text.len() - text.trim_start().len(); // Convert spaces to levels (every 2-4 spaces = 1 level) // Use 2 spaces per level as it's common in Word documents (leading_spaces / 2) as u8 } fn clean_list_item_runs(runs: Vec) -> Vec { if runs.is_empty() { return runs; } // Get the combined text to determine what prefix to remove let combined_text: String = runs.iter().map(|run| run.text.as_str()).collect(); let text = combined_text.trim(); // Determine what prefix we need to remove let prefix_to_remove = if text.starts_with("• ") { "• " } else if text.starts_with("- ") { "- " } else if text.starts_with("* ") { "* " } else if let Some(dot_pos) = text.find('.') { let prefix = &text[..dot_pos]; if prefix.chars().all(|c| c.is_ascii_digit()) { // For numbered lists, include the dot and following space &text[..dot_pos + if text.chars().nth(dot_pos + 1) == Some(' ') { 2 } else { 1 }] } else if text.chars().count() > 2 && text.chars().nth(1) == Some('.') { let first_char = text.chars().next().unwrap(); if first_char.is_ascii_lowercase() || first_char.is_ascii_uppercase() { // For lettered lists, include the letter, dot, and following space &text[..if text.chars().nth(2) == Some(' ') { 3 } else { 2 }] } else { "" } } else { "" } } else { "" }; if prefix_to_remove.is_empty() { return runs; } // Remove the prefix from the runs while preserving formatting let mut result_runs = Vec::new(); let mut chars_to_remove = prefix_to_remove.chars().count(); for run in runs { if chars_to_remove == 0 { // No more prefix to remove, keep this run as-is result_runs.push(run); } else { let run_char_count = run.text.chars().count(); if run_char_count <= chars_to_remove { // This entire run is part of the prefix to remove chars_to_remove -= run_char_count; } else { // This run contains part of the text we want to keep let keep_text: String = run.text.chars().skip(chars_to_remove).collect(); if !keep_text.is_empty() { result_runs.push(FormattedRun { text: keep_text.trim_start().to_string(), formatting: run.formatting, }); } chars_to_remove = 0; } } } result_runs } fn is_likely_sentence(text: &str) -> bool { let text = text.trim(); // If it contains multiple sentences, it's probably not a heading if text.matches(". ").count() > 1 { return true; } // If it ends with common sentence endings and is long, it's probably a sentence if text.len() > 80 && (text.ends_with('.') || text.ends_with('!') || text.ends_with('?')) { return true; } // If it contains common sentence connectors, it's likely a sentence if text.contains(" and ") || text.contains(" but ") || text.contains(" however ") || text.contains(" therefore ") { return true; } false } fn estimate_page_count(word_count: usize) -> usize { // Rough estimate: 250 words per page (word_count as f32 / 250.0).ceil() as usize } pub fn search_document(document: &Document, query: &str) -> Vec { let mut results = Vec::new(); // TODO: consider deferring search execution until Enter is pressed if query.is_empty() { return results; } let query_lower = query.to_lowercase(); for (element_index, element) in document.elements.iter().enumerate() { let text = match element { DocumentElement::Heading { text, .. } => text, DocumentElement::Paragraph { runs } => { // Combine text from all runs for searching &runs.iter().map(|run| run.text.as_str()).collect::() } DocumentElement::List { items, .. } => { // Search in list items for item in items { let item_text: String = item.runs.iter().map(|run| run.text.as_str()).collect(); let text_lower = item_text.to_lowercase(); if let Some(start_pos) = text_lower.find(&query_lower) { results.push(SearchResult { element_index, text: item_text, start_pos, end_pos: start_pos + query.len(), }); } } continue; } DocumentElement::Table { table } => { // Search in table content for header in &table.headers { let text_lower = header.content.to_lowercase(); if let Some(start_pos) = text_lower.find(&query_lower) { results.push(SearchResult { element_index, text: header.content.clone(), start_pos, end_pos: start_pos + query.len(), }); } } for row in &table.rows { for cell in row { let text_lower = cell.content.to_lowercase(); if let Some(start_pos) = text_lower.find(&query_lower) { results.push(SearchResult { element_index, text: cell.content.clone(), start_pos, end_pos: start_pos + query.len(), }); } } } continue; } DocumentElement::Image { description, .. } => description, DocumentElement::Equation { latex, .. } => latex, DocumentElement::PageBreak => continue, }; let text_lower = text.to_lowercase(); if let Some(start_pos) = text_lower.find(&query_lower) { results.push(SearchResult { element_index, text: text.clone(), start_pos, end_pos: start_pos + query.len(), }); } } results } pub fn generate_outline(document: &Document) -> Vec { let mut outline = Vec::new(); for (index, element) in document.elements.iter().enumerate() { if let DocumentElement::Heading { level, text, number, } = element { let title = if let Some(number) = number { format!("{number} {text}") } else { text.clone() }; outline.push(OutlineItem { title, level: *level, element_index: index, }); } } outline } fn extract_table_data(table: &docx_rs::Table) -> Option { let mut header_cells = Vec::new(); let mut data_rows = Vec::new(); let mut is_first_row = true; let mut _raw_headers = Vec::new(); let mut raw_rows = Vec::new(); // First pass: extract raw text content for table_child in &table.rows { let docx_rs::TableChild::TableRow(row) = table_child; let mut row_cells = Vec::new(); for row_child in &row.cells { let docx_rs::TableRowChild::TableCell(cell) = row_child; let mut cell_text = String::new(); let mut cell_formatting = TextFormatting::default(); // Extract text and formatting from all content in the cell for content in &cell.children { match content { docx_rs::TableCellContent::Paragraph(para) => { for para_child in ¶.children { if let docx_rs::ParagraphChild::Run(run) = para_child { // Extract formatting from the first run if !cell_formatting.bold && !cell_formatting.italic { cell_formatting = extract_run_formatting(run); } for run_child in &run.children { if let docx_rs::RunChild::Text(text_elem) = run_child { if !cell_text.is_empty() && !cell_text.ends_with(' ') { cell_text.push(' '); } cell_text.push_str(&text_elem.text); } } } } } _ => { // Handle nested tables or other content if needed } } } let table_cell = TableCell::new(cell_text.trim().to_string()).with_formatting(cell_formatting); row_cells.push(table_cell); } if !row_cells.is_empty() { let raw_text: Vec = row_cells.iter().map(|c| c.content.clone()).collect(); if is_first_row && appears_to_be_header(&raw_text) { _raw_headers = raw_text; header_cells = row_cells; is_first_row = false; } else { raw_rows.push(raw_text); data_rows.push(row_cells); is_first_row = false; } } } // If no headers were detected, use the first row as headers if header_cells.is_empty() && !data_rows.is_empty() { header_cells = data_rows.remove(0); raw_rows.remove(0); } // Return table only if it has content if !header_cells.is_empty() || !data_rows.is_empty() { let table_data = TableData::new(header_cells, data_rows); Some(DocumentElement::Table { table: table_data }) } else { None } } fn appears_to_be_header(row: &[String]) -> bool { // Heuristics to detect if a row is likely a header let total_chars: usize = row.iter().map(|cell| cell.len()).sum(); let avg_length = if !row.is_empty() { total_chars / row.len() } else { 0 }; // Headers tend to be shorter and more concise if avg_length > 50 { return false; } // Check if most cells contain typical header words or are short phrases let header_indicators = row .iter() .filter(|cell| { let cell_lower = cell.to_lowercase(); let word_count = cell.split_whitespace().count(); // Short phrases (1-3 words) are often headers if word_count <= 3 && !cell.trim().is_empty() { return true; } // Common header words if cell_lower.contains("name") || cell_lower.contains("date") || cell_lower.contains("amount") || cell_lower.contains("type") || cell_lower.contains("status") || cell_lower.contains("id") || cell_lower.contains("description") || cell_lower.contains("count") { return true; } false }) .count(); // If more than half the cells look like headers, treat the row as a header header_indicators > row.len() / 2 } // Enhanced table processing functions impl TableData { pub fn new(headers: Vec, rows: Vec>) -> Self { let column_count = headers.len(); let row_count = rows.len(); let has_headers = !headers.is_empty(); // Calculate optimal column widths let column_widths = calculate_column_widths(&headers, &rows); // Determine column alignments let column_alignments = determine_column_alignments(&headers, &rows); let metadata = TableMetadata { column_count, row_count, has_headers, column_widths, column_alignments, title: None, }; Self { headers, rows, metadata, } } pub fn _get_column_width(&self, column_index: usize) -> usize { self.metadata .column_widths .get(column_index) .copied() .unwrap_or(10) } pub fn _get_column_alignment(&self, column_index: usize) -> TextAlignment { self.metadata .column_alignments .get(column_index) .copied() .unwrap_or(TextAlignment::Left) } } impl TableCell { pub fn new(content: String) -> Self { let data_type = detect_cell_data_type(&content); let alignment = default_alignment_for_type(data_type); Self { content, alignment, formatting: TextFormatting::default(), data_type, } } pub fn _with_alignment(mut self, alignment: TextAlignment) -> Self { self.alignment = alignment; self } pub fn with_formatting(mut self, formatting: TextFormatting) -> Self { self.formatting = formatting; self } pub fn display_width(&self) -> usize { // Calculate display width considering unicode characters unicode_segmentation::UnicodeSegmentation::graphemes(self.content.as_str(), true).count() } } fn calculate_column_widths(headers: &[TableCell], rows: &TableRows) -> Vec { if headers.is_empty() { return Vec::new(); } let mut widths = headers .iter() .map(|h| h.display_width()) .collect::>(); for row in rows { for (i, cell) in row.iter().enumerate() { if let Some(current_width) = widths.get_mut(i) { *current_width = (*current_width).max(cell.display_width()); } } } // Ensure minimum width of 3 characters per column widths.iter_mut().for_each(|w| *w = (*w).max(3)); widths } fn determine_column_alignments(headers: &[TableCell], rows: &TableRows) -> Vec { let column_count = headers.len(); let mut alignments = vec![TextAlignment::Left; column_count]; for (col_index, alignment) in alignments.iter_mut().enumerate().take(column_count) { let mut numeric_count = 0; let mut total_count = 0; // Check data types in this column for row in rows { if let Some(cell) = row.get(col_index) { total_count += 1; if matches!( cell.data_type, CellDataType::Number | CellDataType::Currency | CellDataType::Percentage ) { numeric_count += 1; } } } // If more than 70% of cells are numeric, right-align the column if total_count > 0 && (numeric_count as f32 / total_count as f32) > 0.7 { *alignment = TextAlignment::Right; } } alignments } fn detect_cell_data_type(content: &str) -> CellDataType { let trimmed = content.trim(); if trimmed.is_empty() { return CellDataType::Empty; } // Check for currency if trimmed.starts_with('$') || trimmed.starts_with('€') || trimmed.starts_with('£') { return CellDataType::Currency; } // Check for percentage if trimmed.ends_with('%') { return CellDataType::Percentage; } // Check for boolean let lower = trimmed.to_lowercase(); if matches!(lower.as_str(), "true" | "false" | "yes" | "no" | "y" | "n") { return CellDataType::Boolean; } // Check for number (including with commas) let number_candidate = trimmed.replace(',', ""); if number_candidate.parse::().is_ok() { return CellDataType::Number; } // Check for date patterns (basic) if trimmed.contains('/') || trimmed.contains('-') { let parts: Vec<&str> = trimmed.split(['/', '-']).collect(); if parts.len() == 3 && parts.iter().all(|p| p.parse::().is_ok()) { return CellDataType::Date; } } CellDataType::Text } fn default_alignment_for_type(data_type: CellDataType) -> TextAlignment { match data_type { CellDataType::Number | CellDataType::Currency | CellDataType::Percentage => { TextAlignment::Right } CellDataType::Boolean => TextAlignment::Center, _ => TextAlignment::Left, } } #[derive(Debug, Clone)] pub struct OutlineItem { pub title: String, pub level: u8, pub element_index: usize, } fn clean_word_list_markers(elements: Vec) -> Vec { elements .into_iter() .map(|element| match element { DocumentElement::Paragraph { runs } => { let cleaned_runs = runs .into_iter() .map(|mut run| { if run.text.starts_with("__WORD_LIST__") { run.text = run .text .strip_prefix("__WORD_LIST__") .unwrap_or(&run.text) .to_string(); } run }) .collect(); DocumentElement::Paragraph { runs: cleaned_runs } } DocumentElement::List { items, ordered } => { let cleaned_items = items .into_iter() .map(|item| { let combined_text: String = item.runs.iter().map(|run| run.text.as_str()).collect(); let cleaned_runs = if combined_text.starts_with("__WORD_LIST__") { // Remove the __WORD_LIST__ prefix from the first run let mut new_runs = item.runs.clone(); if let Some(first_run) = new_runs.first_mut() { first_run.text = first_run .text .strip_prefix("__WORD_LIST__") .unwrap_or(&first_run.text) .to_string(); } new_runs } else { item.runs.clone() }; ListItem { runs: cleaned_runs, level: item.level, } }) .collect(); DocumentElement::List { items: cleaned_items, ordered, } } other => other, }) .collect() } /// Equation type and context information #[derive(Debug, Clone)] struct EquationInfo { latex: String, fallback: String, is_inline: bool, paragraph_index: usize, } /// Represents content within a paragraph (text or inline equation) #[derive(Debug, Clone)] enum ParagraphContent { Text(String), #[allow(dead_code)] // fallback may be used for UI display in future InlineEquation { latex: String, fallback: String, }, } /// Parse paragraphs with inline equations directly from XML /// Returns a map of paragraph index to ordered content (text and inline equations) fn extract_inline_equation_positions( file_path: &Path, ) -> Result>> { use quick_xml::events::Event; use quick_xml::Reader; use std::fs::File; use std::io::Read; use zip::ZipArchive; let file = File::open(file_path)?; let mut archive = ZipArchive::new(file)?; // Read word/document.xml let mut document_xml = String::new(); let mut xml_file = archive.by_name("word/document.xml")?; xml_file.read_to_string(&mut document_xml)?; let mut paragraphs: std::collections::HashMap> = std::collections::HashMap::new(); let mut reader = Reader::from_str(&document_xml); reader.config_mut().trim_text(false); // Don't trim to preserve spacing let mut buf = Vec::new(); let mut in_paragraph = false; let mut in_math = false; let mut in_math_para = false; // Track if we're in a display equation let mut in_text_run = false; let mut current_paragraph_index = 0; let mut current_paragraph_content: Vec = Vec::new(); let mut current_text = String::new(); let mut current_omml = String::new(); loop { match reader.read_event_into(&mut buf) { Ok(Event::Start(ref e)) if e.name().as_ref() == b"w:p" => { in_paragraph = true; current_paragraph_index += 1; current_paragraph_content.clear(); } Ok(Event::End(ref e)) if e.name().as_ref() == b"w:p" => { in_paragraph = false; if !current_paragraph_content.is_empty() { paragraphs.insert(current_paragraph_index, current_paragraph_content.clone()); } } Ok(Event::Start(ref e)) if e.name().as_ref() == b"m:oMathPara" => { in_math_para = true; } Ok(Event::End(ref e)) if e.name().as_ref() == b"m:oMathPara" => { in_math_para = false; } Ok(Event::Start(ref e)) if e.name().as_ref() == b"m:oMath" && in_paragraph && !in_math_para => { // Inline equation (not wrapped in oMathPara) in_math = true; current_omml.clear(); } Ok(Event::End(ref e)) if e.name().as_ref() == b"m:oMath" && in_math => { in_math = false; let (latex, fallback) = parse_simple_omml(¤t_omml); current_paragraph_content .push(ParagraphContent::InlineEquation { latex, fallback }); current_omml.clear(); } Ok(Event::Start(ref e)) if e.name().as_ref() == b"w:t" && in_paragraph && !in_math => { in_text_run = true; current_text.clear(); } Ok(Event::End(ref e)) if e.name().as_ref() == b"w:t" && in_text_run => { in_text_run = false; if !current_text.is_empty() { current_paragraph_content.push(ParagraphContent::Text(current_text.clone())); } } Ok(Event::Text(ref e)) if in_text_run => { current_text.push_str(&e.unescape().unwrap_or_default()); } // Capture OMML content for inline equations Ok(Event::Start(ref e)) if in_math => { let name_ref = e.name(); let tag_name = std::str::from_utf8(name_ref.as_ref()).unwrap_or(""); current_omml.push('<'); current_omml.push_str(tag_name); for a in e.attributes().flatten() { let key = std::str::from_utf8(a.key.as_ref()).unwrap_or(""); let value = String::from_utf8_lossy(&a.value); current_omml.push(' '); current_omml.push_str(key); current_omml.push_str("=\""); current_omml.push_str(&value); current_omml.push('"'); } current_omml.push('>'); } Ok(Event::End(ref e)) if in_math => { let name_ref = e.name(); let tag_name = std::str::from_utf8(name_ref.as_ref()).unwrap_or(""); current_omml.push_str(""); current_omml.push_str(tag_name); current_omml.push('>'); } Ok(Event::Empty(ref e)) if in_math => { let name_ref = e.name(); let tag_name = std::str::from_utf8(name_ref.as_ref()).unwrap_or(""); current_omml.push('<'); current_omml.push_str(tag_name); for a in e.attributes().flatten() { let key = std::str::from_utf8(a.key.as_ref()).unwrap_or(""); let value = String::from_utf8_lossy(&a.value); current_omml.push(' '); current_omml.push_str(key); current_omml.push_str("=\""); current_omml.push_str(&value); current_omml.push('"'); } current_omml.push_str("/>"); } Ok(Event::Text(ref e)) if in_math => { current_omml.push_str(&e.unescape().unwrap_or_default()); } Ok(Event::Eof) => break, Err(e) => { eprintln!("Error reading XML for inline equations: {e}"); break; } _ => {} } buf.clear(); } Ok(paragraphs) } /// Extract equations from .docx file by reading raw XML /// Since docx-rs doesn't expose OMML (Office Math Markup Language), we parse the ZIP directly fn extract_equations_from_docx(file_path: &Path) -> Result> { use quick_xml::events::Event; use quick_xml::Reader; use std::fs::File; use std::io::Read; use zip::ZipArchive; let file = File::open(file_path)?; let mut archive = ZipArchive::new(file)?; // Read word/document.xml let mut document_xml = String::new(); let mut xml_file = archive.by_name("word/document.xml")?; xml_file.read_to_string(&mut document_xml)?; let mut equations = Vec::new(); let mut reader = Reader::from_str(&document_xml); reader.config_mut().trim_text(true); let mut buf = Vec::new(); let mut in_math = false; let mut in_math_para = false; let mut current_omml = String::new(); let mut current_paragraph_index = 0; loop { match reader.read_event_into(&mut buf) { Ok(Event::Start(ref e)) if e.name().as_ref() == b"w:p" => { current_paragraph_index += 1; } Ok(Event::Start(ref e)) if e.name().as_ref() == b"m:oMathPara" => { in_math_para = true; } Ok(Event::End(ref e)) if e.name().as_ref() == b"m:oMathPara" => { in_math_para = false; } Ok(Event::Start(ref e)) if e.name().as_ref() == b"m:oMath" => { in_math = true; current_omml.clear(); } Ok(Event::End(ref e)) if e.name().as_ref() == b"m:oMath" => { in_math = false; // Parse the collected OMML to LaTeX let (latex, fallback) = parse_simple_omml(¤t_omml); // Inline equations are NOT wrapped in let is_inline = !in_math_para; equations.push(EquationInfo { latex, fallback, is_inline, paragraph_index: current_paragraph_index, }); current_omml.clear(); } Ok(Event::Start(ref e)) if in_math => { let name_ref = e.name(); let tag_name = std::str::from_utf8(name_ref.as_ref()).unwrap_or(""); current_omml.push('<'); current_omml.push_str(tag_name); // Capture attributes (e.g., m:chr m:val="∑") for a in e.attributes().flatten() { let key = std::str::from_utf8(a.key.as_ref()).unwrap_or(""); let value = String::from_utf8_lossy(&a.value); current_omml.push(' '); current_omml.push_str(key); current_omml.push_str("=\""); current_omml.push_str(&value); current_omml.push('"'); } current_omml.push('>'); } Ok(Event::End(ref e)) if in_math => { let name_ref = e.name(); let tag_name = std::str::from_utf8(name_ref.as_ref()).unwrap_or(""); current_omml.push_str(""); current_omml.push_str(tag_name); current_omml.push('>'); } Ok(Event::Empty(ref e)) if in_math => { // Handle self-closing tags like let name_ref = e.name(); let tag_name = std::str::from_utf8(name_ref.as_ref()).unwrap_or(""); current_omml.push('<'); current_omml.push_str(tag_name); // Capture attributes for a in e.attributes().flatten() { let key = std::str::from_utf8(a.key.as_ref()).unwrap_or(""); let value = String::from_utf8_lossy(&a.value); current_omml.push(' '); current_omml.push_str(key); current_omml.push_str("=\""); current_omml.push_str(&value); current_omml.push('"'); } current_omml.push_str("/>"); } Ok(Event::Text(ref e)) if in_math => { current_omml.push_str(&e.unescape().unwrap_or_default()); } Ok(Event::Eof) => break, Err(e) => { eprintln!("Error reading OMML: {e}"); break; } _ => {} } buf.clear(); } Ok(equations) } /// OMML parser that converts to LaTeX format fn parse_simple_omml(omml: &str) -> (String, String) { // Extract plain text for fallback let fallback = omml .split("") .skip(1) .filter_map(|s| s.split("").next()) .collect::>() .join(""); let latex = omml_to_latex(omml); if latex.is_empty() { (fallback.clone(), fallback) } else { (latex, fallback) } } /// Convert OMML XML to LaTeX fn omml_to_latex(omml: &str) -> String { let mut result = String::new(); let mut i = 0; while i < omml.len() { // Look for OMML structures if omml[i..].starts_with("") { // Superscript: ^{...} let end = omml[i..].find("").unwrap_or(omml.len() - i); let content = &omml[i..i + end]; if let (Some(base), Some(sup)) = ( extract_latex_text(content, "m:e"), extract_latex_text(content, "m:sup"), ) { result.push_str(&base); result.push_str("^{"); result.push_str(&sup); result.push('}'); } i += end + 8; } else if omml[i..].starts_with("") { // Subscript: _{...} let end = omml[i..].find("").unwrap_or(omml.len() - i); let content = &omml[i..i + end]; if let (Some(base), Some(sub)) = ( extract_latex_text(content, "m:e"), extract_latex_text(content, "m:sub"), ) { result.push_str(&base); result.push_str("_{"); result.push_str(&sub); result.push('}'); } i += end + 8; } else if omml[i..].starts_with("") { // Subscript and superscript: _{}^{} let end = omml[i..].find("").unwrap_or(omml.len() - i); let content = &omml[i..i + end]; if let (Some(base), Some(sub), Some(sup)) = ( extract_latex_text(content, "m:e"), extract_latex_text(content, "m:sub"), extract_latex_text(content, "m:sup"), ) { result.push_str(&base); result.push_str("_{"); result.push_str(&sub); result.push_str("}^{"); result.push_str(&sup); result.push('}'); } i += end + 12; } else if omml[i..].starts_with("") { // Delimiter: \left(...\right) let end = omml[i..].find("").unwrap_or(omml.len() - i); let content = &omml[i..i + end]; result.push_str("\\left("); if let Some(inner) = extract_latex_text(content, "m:e") { result.push_str(&inner); } result.push_str("\\right)"); i += end + 5; } else if omml[i..].starts_with("") { // Fraction: \frac{num}{den} or binomial coefficient: \binom{n}{k} let end = omml[i..].find("").unwrap_or(omml.len() - i); let content = &omml[i..i + end]; // Check if it's a binomial coefficient (noBar type) let is_binom = content.contains("m:val=\"noBar\""); if let (Some(num), Some(den)) = ( extract_latex_text(content, "m:num"), extract_latex_text(content, "m:den"), ) { if is_binom { result.push_str("\\binom{"); result.push_str(&num); result.push_str("}{"); result.push_str(&den); result.push('}'); } else { result.push_str("\\frac{"); result.push_str(&num); result.push_str("}{"); result.push_str(&den); result.push('}'); } } i += end + 5; } else if omml[i..].starts_with("") { // Function: \sin, \cos, \tan, etc. let end = omml[i..].find("").unwrap_or(omml.len() - i); let content = &omml[i..i + end]; if let Some(func_name) = extract_latex_text(content, "m:fName") { result.push('\\'); result.push_str(&func_name); } if let Some(argument) = extract_latex_text(content, "m:e") { result.push(' '); result.push_str(&argument); } i += end + 8; } else if omml[i..].starts_with("") { // Radical (square root): \sqrt{...} or \sqrt[n]{...} let end = omml[i..].find("").unwrap_or(omml.len() - i); let content = &omml[i..i + end]; result.push_str("\\sqrt"); // Check for degree (nth root) if let Some(deg) = extract_latex_text(content, "m:deg") { if deg != "2" && !deg.is_empty() { result.push('['); result.push_str(°); result.push(']'); } } result.push('{'); if let Some(base) = extract_latex_text(content, "m:e") { result.push_str(&base); } result.push('}'); i += end + 7; } else if omml[i..].starts_with("").unwrap_or(omml.len() - i); let content = &omml[i..i + end]; // Extract operator character and convert to LaTeX command let operator = if let Some(chr_pos) = content.find("m:val=\"") { let start = chr_pos + 7; if let Some(end_quote) = content[start..].find('"') { let chr = &content[start..start + end_quote]; match chr { "∑" => "\\sum", "∫" => "\\int", "∬" => "\\iint", "∭" => "\\iiint", "∮" => "\\oint", "∏" => "\\prod", "⋃" => "\\bigcup", "⋂" => "\\bigcap", _ => "\\sum", } } else { "\\sum" } } else { "\\sum" }; result.push_str(operator); // Extract sub and sup if let Some(sub) = extract_latex_text(content, "m:sub") { result.push_str("_{"); result.push_str(&sub); result.push('}'); } if let Some(sup) = extract_latex_text(content, "m:sup") { result.push_str("^{"); result.push_str(&sup); result.push('}'); } if let Some(base) = extract_latex_text(content, "m:e") { result.push(' '); result.push_str(&base); } i += end + 9; } else if omml[i..].starts_with("") { // Text run - extract text without processing let end = omml[i..].find("").unwrap_or(omml.len() - i); let content = &omml[i..i + end]; if let Some(text) = extract_text(content, "m:t") { // Convert special characters to LaTeX for ch in text.chars() { match ch { 'π' => result.push_str("\\pi "), 'α' => result.push_str("\\alpha "), 'β' => result.push_str("\\beta "), 'γ' => result.push_str("\\gamma "), 'Γ' => result.push_str("\\Gamma "), 'δ' => result.push_str("\\delta "), 'Δ' => result.push_str("\\Delta "), 'θ' => result.push_str("\\theta "), 'λ' => result.push_str("\\lambda "), 'μ' => result.push_str("\\mu "), 'σ' => result.push_str("\\sigma "), 'Σ' => result.push_str("\\Sigma "), 'φ' => result.push_str("\\phi "), 'ω' => result.push_str("\\omega "), 'Ω' => result.push_str("\\Omega "), '∞' => result.push_str("\\infty "), '±' => result.push_str("\\pm "), '×' => result.push_str("\\times "), '÷' => result.push_str("\\div "), '≤' => result.push_str("\\leq "), '≥' => result.push_str("\\geq "), '≠' => result.push_str("\\neq "), '≈' => result.push_str("\\approx "), '∈' => result.push_str("\\in "), '∉' => result.push_str("\\notin "), '⊂' => result.push_str("\\subset "), '⊃' => result.push_str("\\supset "), '∪' => result.push_str("\\cup "), '∩' => result.push_str("\\cap "), '∅' => result.push_str("\\emptyset "), '√' => result.push_str("\\sqrt"), _ => result.push(ch), } } } i += end + 5; } else if omml[i..].starts_with("") { // Text content let end = omml[i + 4..].find("").unwrap_or(omml.len() - i - 4); let text = &omml[i + 4..i + 4 + end]; // Convert special characters for ch in text.chars() { match ch { 'π' => result.push_str("\\pi"), 'α' => result.push_str("\\alpha"), 'β' => result.push_str("\\beta"), _ => result.push(ch), } } i += 4 + end + 5; } else { i += 1; } } result } /// Extract text from an OMML tag and recursively convert nested OMML to LaTeX fn extract_latex_text(omml: &str, tag: &str) -> Option { let start_tag = format!("<{tag}>"); let end_tag = format!("{tag}>"); if let Some(start_pos) = omml.find(&start_tag) { let content = &omml[start_pos + start_tag.len()..]; // Find the matching closing tag, accounting for nesting let mut depth = 1; let mut pos = 0; let mut end_pos = None; while pos < content.len() && depth > 0 { if content[pos..].starts_with(&start_tag) { depth += 1; pos += start_tag.len(); } else if content[pos..].starts_with(&end_tag) { depth -= 1; if depth == 0 { end_pos = Some(pos); break; } pos += end_tag.len(); } else { // Skip to next character boundary (Unicode-safe) let next_char = content[pos..].chars().next(); if let Some(ch) = next_char { pos += ch.len_utf8(); } else { break; } } } if let Some(end_pos) = end_pos { let inner = &content[..end_pos]; // Check if inner content has OMML structures if inner.contains(" tags let text = inner .split("") .skip(1) .filter_map(|s| s.split("").next()) .collect::>() .join(""); if !text.is_empty() { return Some(text); } } } } None } /// Extract text from an OMML tag fn extract_text(omml: &str, tag: &str) -> Option { let start_tag = format!("<{tag}>"); let end_tag = format!("{tag}>"); if let Some(start_pos) = omml.find(&start_tag) { let content = &omml[start_pos + start_tag.len()..]; if let Some(end_pos) = content.find(&end_tag) { let inner = &content[..end_pos]; // Inner is already the text between and , just return it if !inner.is_empty() { return Some(inner.to_string()); } } } None } doxx-0.1.2/src/equation.rs 0000644 0000000 0000000 00000043443 10461020230 0013600 0 ustar 0000000 0000000 use anyhow::Result; use quick_xml::events::Event; use quick_xml::Reader; /// Represents a mathematical equation extracted from a DOCX file #[derive(Debug, Clone)] pub struct Equation { /// Raw OMML (Office Math Markup Language) XML pub omml: String, /// Parsed and rendered Unicode representation pub unicode: String, /// Plain text fallback pub fallback: String, } /// OMML element types we need to parse #[derive(Debug, Clone)] enum OmmlElement { /// Superscript: base^exponent Superscript { base: Box, sup: Box, }, /// Subscript: base_sub Subscript { base: Box, sub: Box, }, /// Fraction: numerator/denominator Fraction { num: Box, den: Box, }, /// N-ary operator (sum, integral, etc.) Nary { operator: String, sub: Option>, sup: Option>, base: Box, }, /// Delimiter (parentheses, brackets, etc.) Delimiter { content: Box }, /// Text run Text(String), /// Sequence of elements Sequence(Vec), } impl Equation { /// Parse OMML XML and convert to Unicode representation pub fn from_omml(omml: String) -> Result { let unicode = parse_omml_to_unicode(&omml)?; let fallback = extract_text_from_omml(&omml); Ok(Equation { omml, unicode, fallback, }) } } /// Extract plain text from OMML (for fallback display) fn extract_text_from_omml(omml: &str) -> String { let mut reader = Reader::from_str(omml); reader.config_mut().trim_text(true); let mut text = String::new(); let mut buf = Vec::new(); loop { match reader.read_event_into(&mut buf) { Ok(Event::Start(ref e)) if e.name().as_ref() == b"m:t" => { // Inside tag, capture text if let Ok(Event::Text(e)) = reader.read_event_into(&mut buf) { text.push_str(&e.unescape().unwrap_or_default()); } } Ok(Event::Eof) => break, Err(e) => { eprintln!("Error parsing OMML: {e}"); break; } _ => {} } buf.clear(); } text } /// Convert OMML XML to Unicode mathematical representation fn parse_omml_to_unicode(omml: &str) -> Result { let element = parse_omml_element(omml)?; Ok(render_to_unicode(&element)) } /// Parse OMML XML into structured elements fn parse_omml_element(xml: &str) -> Result { let mut reader = Reader::from_str(xml); reader.config_mut().trim_text(true); let mut buf = Vec::new(); let mut elements = Vec::new(); loop { match reader.read_event_into(&mut buf) { Ok(Event::Start(ref e)) => { let name_ref = e.name(); let tag_name = std::str::from_utf8(name_ref.as_ref()).unwrap_or(""); match tag_name { "m:sSup" => { // Parse superscript structure elements.push(parse_superscript(&mut reader)?); } "m:sSub" => { elements.push(parse_subscript(&mut reader)?); } "m:f" => { elements.push(parse_fraction(&mut reader)?); } "m:nary" => { elements.push(parse_nary(&mut reader)?); } "m:d" => { elements.push(parse_delimiter(&mut reader)?); } "m:r" => { elements.push(parse_run(&mut reader)?); } _ => {} } } Ok(Event::Eof) => break, Err(e) => anyhow::bail!("XML parsing error: {}", e), _ => {} } buf.clear(); } if elements.len() == 1 { Ok(elements.into_iter().next().unwrap()) } else { Ok(OmmlElement::Sequence(elements)) } } /// Parse superscript element fn parse_superscript(reader: &mut Reader<&[u8]>) -> Result { let mut base = None; let mut sup = None; let mut buf = Vec::new(); loop { match reader.read_event_into(&mut buf) { Ok(Event::Start(ref e)) => { let name_ref = e.name(); let tag = std::str::from_utf8(name_ref.as_ref()).unwrap_or(""); match tag { "m:e" => { let content = read_element_content(reader, "m:e")?; if base.is_none() { base = Some(parse_omml_element(&content)?); } } "m:sup" => { let content = read_element_content(reader, "m:sup")?; sup = Some(parse_omml_element(&content)?); } _ => {} } } Ok(Event::End(ref e)) if e.name().as_ref() == b"m:sSup" => break, Ok(Event::Eof) => break, Err(e) => anyhow::bail!("Superscript parse error: {}", e), _ => {} } buf.clear(); } Ok(OmmlElement::Superscript { base: Box::new(base.unwrap_or(OmmlElement::Text(String::new()))), sup: Box::new(sup.unwrap_or(OmmlElement::Text(String::new()))), }) } /// Parse subscript element fn parse_subscript(reader: &mut Reader<&[u8]>) -> Result { let mut base = None; let mut sub = None; let mut buf = Vec::new(); loop { match reader.read_event_into(&mut buf) { Ok(Event::Start(ref e)) => { let name_ref = e.name(); let tag = std::str::from_utf8(name_ref.as_ref()).unwrap_or(""); match tag { "m:e" => { let content = read_element_content(reader, "m:e")?; if base.is_none() { base = Some(parse_omml_element(&content)?); } } "m:sub" => { let content = read_element_content(reader, "m:sub")?; sub = Some(parse_omml_element(&content)?); } _ => {} } } Ok(Event::End(ref e)) if e.name().as_ref() == b"m:sSub" => break, Ok(Event::Eof) => break, Err(e) => anyhow::bail!("Subscript parse error: {}", e), _ => {} } buf.clear(); } Ok(OmmlElement::Subscript { base: Box::new(base.unwrap_or(OmmlElement::Text(String::new()))), sub: Box::new(sub.unwrap_or(OmmlElement::Text(String::new()))), }) } /// Parse fraction element fn parse_fraction(reader: &mut Reader<&[u8]>) -> Result { let mut num = None; let mut den = None; let mut buf = Vec::new(); loop { match reader.read_event_into(&mut buf) { Ok(Event::Start(ref e)) => { let name_ref = e.name(); let tag = std::str::from_utf8(name_ref.as_ref()).unwrap_or(""); match tag { "m:num" => { let content = read_element_content(reader, "m:num")?; num = Some(parse_omml_element(&content)?); } "m:den" => { let content = read_element_content(reader, "m:den")?; den = Some(parse_omml_element(&content)?); } _ => {} } } Ok(Event::End(ref e)) if e.name().as_ref() == b"m:f" => break, Ok(Event::Eof) => break, Err(e) => anyhow::bail!("Fraction parse error: {}", e), _ => {} } buf.clear(); } Ok(OmmlElement::Fraction { num: Box::new(num.unwrap_or(OmmlElement::Text(String::new()))), den: Box::new(den.unwrap_or(OmmlElement::Text(String::new()))), }) } /// Parse n-ary operator (sum, integral, product, etc.) fn parse_nary(reader: &mut Reader<&[u8]>) -> Result { let mut operator = String::from("∑"); // Default to summation let mut sub = None; let mut sup = None; let mut base = None; let mut buf = Vec::new(); loop { match reader.read_event_into(&mut buf) { Ok(Event::Start(ref e)) => { let name_ref = e.name(); let tag = std::str::from_utf8(name_ref.as_ref()).unwrap_or(""); match tag { "m:chr" => { // Extract operator character from attribute if let Some(a) = e .attributes() .flatten() .find(|a| a.key.as_ref() == b"m:val") { operator = String::from_utf8_lossy(&a.value).to_string(); } } "m:sub" => { let content = read_element_content(reader, "m:sub")?; sub = Some(parse_omml_element(&content)?); } "m:sup" => { let content = read_element_content(reader, "m:sup")?; sup = Some(parse_omml_element(&content)?); } "m:e" => { let content = read_element_content(reader, "m:e")?; base = Some(parse_omml_element(&content)?); } _ => {} } } Ok(Event::End(ref e)) if e.name().as_ref() == b"m:nary" => break, Ok(Event::Eof) => break, Err(e) => anyhow::bail!("Nary parse error: {}", e), _ => {} } buf.clear(); } Ok(OmmlElement::Nary { operator, sub: sub.map(Box::new), sup: sup.map(Box::new), base: Box::new(base.unwrap_or(OmmlElement::Text(String::new()))), }) } /// Parse delimiter (parentheses, brackets, etc.) fn parse_delimiter(reader: &mut Reader<&[u8]>) -> Result { let mut content = None; let mut buf = Vec::new(); loop { match reader.read_event_into(&mut buf) { Ok(Event::Start(ref e)) => { let name_ref = e.name(); let tag = std::str::from_utf8(name_ref.as_ref()).unwrap_or(""); if tag == "m:e" { let xml_content = read_element_content(reader, "m:e")?; content = Some(parse_omml_element(&xml_content)?); } } Ok(Event::End(ref e)) if e.name().as_ref() == b"m:d" => break, Ok(Event::Eof) => break, Err(e) => anyhow::bail!("Delimiter parse error: {}", e), _ => {} } buf.clear(); } Ok(OmmlElement::Delimiter { content: Box::new(content.unwrap_or(OmmlElement::Text(String::new()))), }) } /// Parse text run fn parse_run(reader: &mut Reader<&[u8]>) -> Result { let mut text = String::new(); let mut buf = Vec::new(); loop { match reader.read_event_into(&mut buf) { Ok(Event::Start(ref e)) if e.name().as_ref() == b"m:t" => { if let Ok(Event::Text(e)) = reader.read_event_into(&mut buf) { text.push_str(&e.unescape().unwrap_or_default()); } } Ok(Event::End(ref e)) if e.name().as_ref() == b"m:r" => break, Ok(Event::Eof) => break, Err(e) => anyhow::bail!("Run parse error: {}", e), _ => {} } buf.clear(); } Ok(OmmlElement::Text(text)) } /// Read content of an XML element as a string fn read_element_content(reader: &mut Reader<&[u8]>, end_tag: &str) -> Result { let mut content = String::new(); let mut buf = Vec::new(); let mut depth = 1; let end_tag_bytes = end_tag.as_bytes(); loop { match reader.read_event_into(&mut buf) { Ok(Event::Start(ref e)) => { content.push('<'); content.push_str(std::str::from_utf8(e.name().as_ref()).unwrap_or("")); for a in e.attributes().flatten() { content.push(' '); content.push_str(std::str::from_utf8(a.key.as_ref()).unwrap_or("")); content.push_str("=\""); content.push_str(&String::from_utf8_lossy(&a.value)); content.push('"'); } content.push('>'); depth += 1; } Ok(Event::End(ref e)) => { depth -= 1; if depth == 0 && e.name().as_ref() == end_tag_bytes { break; } content.push_str(""); content.push_str(std::str::from_utf8(e.name().as_ref()).unwrap_or("")); content.push('>'); } Ok(Event::Text(ref e)) => { content.push_str(&e.unescape().unwrap_or_default()); } Ok(Event::Eof) => break, Err(e) => anyhow::bail!("Element content read error: {}", e), _ => {} } buf.clear(); } Ok(content) } /// Render parsed OMML element to Unicode string fn render_to_unicode(element: &OmmlElement) -> String { match element { OmmlElement::Text(s) => s.clone(), OmmlElement::Sequence(elements) => elements.iter().map(render_to_unicode).collect(), OmmlElement::Superscript { base, sup } => { let base_str = render_to_unicode(base); let sup_str = render_to_unicode(sup); format!("{}{}", base_str, to_superscript(&sup_str)) } OmmlElement::Subscript { base, sub } => { let base_str = render_to_unicode(base); let sub_str = render_to_unicode(sub); format!("{}{}", base_str, to_subscript(&sub_str)) } OmmlElement::Fraction { num, den } => { let num_str = render_to_unicode(num); let den_str = render_to_unicode(den); // For simple single-char fractions, use Unicode fractions match (num_str.as_str(), den_str.as_str()) { ("1", "2") => "½".to_string(), ("1", "4") => "¼".to_string(), ("3", "4") => "¾".to_string(), ("1", "3") => "⅓".to_string(), ("2", "3") => "⅔".to_string(), ("1", "5") => "⅕".to_string(), ("1", "8") => "⅛".to_string(), _ => format!("({num_str}⁄{den_str})"), } } OmmlElement::Nary { operator, sub, sup, base, } => { let mut result = operator.clone(); if let Some(s) = sub { result.push_str(&to_subscript(&render_to_unicode(s))); } if let Some(s) = sup { result.push_str(&to_superscript(&render_to_unicode(s))); } result.push_str(&render_to_unicode(base)); result } OmmlElement::Delimiter { content } => { format!("({})", render_to_unicode(content)) } } } /// Convert ASCII text to Unicode superscript fn to_superscript(text: &str) -> String { text.chars() .map(|c| match c { '0' => '⁰', '1' => '¹', '2' => '²', '3' => '³', '4' => '⁴', '5' => '⁵', '6' => '⁶', '7' => '⁷', '8' => '⁸', '9' => '⁹', '+' => '⁺', '-' => '⁻', '=' => '⁼', '(' => '⁽', ')' => '⁾', 'n' => 'ⁿ', 'i' => 'ⁱ', _ => c, // Keep other characters as-is }) .collect() } /// Convert ASCII text to Unicode subscript fn to_subscript(text: &str) -> String { text.chars() .map(|c| match c { '0' => '₀', '1' => '₁', '2' => '₂', '3' => '₃', '4' => '₄', '5' => '₅', '6' => '₆', '7' => '₇', '8' => '₈', '9' => '₉', '+' => '₊', '-' => '₋', '=' => '₌', '(' => '₍', ')' => '₎', 'a' => 'ₐ', 'e' => 'ₑ', 'h' => 'ₕ', 'i' => 'ᵢ', 'j' => 'ⱼ', 'k' => 'ₖ', 'l' => 'ₗ', 'm' => 'ₘ', 'n' => 'ₙ', 'o' => 'ₒ', 'p' => 'ₚ', 'r' => 'ᵣ', 's' => 'ₛ', 't' => 'ₜ', 'u' => 'ᵤ', 'v' => 'ᵥ', 'x' => 'ₓ', _ => c, // Keep other characters as-is }) .collect() } #[cfg(test)] mod tests { use super::*; #[test] fn test_superscript_conversion() { assert_eq!(to_superscript("2"), "²"); assert_eq!(to_superscript("n"), "ⁿ"); assert_eq!(to_superscript("10"), "¹⁰"); } #[test] fn test_subscript_conversion() { assert_eq!(to_subscript("0"), "₀"); assert_eq!(to_subscript("k"), "ₖ"); assert_eq!(to_subscript("n-k"), "ₙ₋ₖ"); } #[test] fn test_simple_fraction() { let omml = r#"12"#; let eq = Equation::from_omml(omml.to_string()).unwrap(); assert_eq!(eq.unicode, "½"); } } doxx-0.1.2/src/export.rs 0000644 0000000 0000000 00000055303 10461020230 0013272 0 ustar 0000000 0000000 use anyhow::Result; use crate::{ ansi::{export_to_ansi_with_options, AnsiOptions}, document::*, ColorDepth, ExportFormat, }; pub fn export_document(document: &Document, format: &ExportFormat) -> Result<()> { match format { ExportFormat::Markdown => export_to_markdown(document), ExportFormat::Text => export_to_text(document), ExportFormat::Csv => export_to_csv(document), ExportFormat::Json => export_to_json(document), ExportFormat::Ansi => export_to_ansi(document), } } pub fn export_to_markdown(document: &Document) -> Result<()> { let mut markdown = String::new(); // Add document title markdown.push_str(&format!("# {}\n\n", document.title)); // Add metadata markdown.push_str("## Document Information\n\n"); markdown.push_str(&format!("- **File**: {}\n", document.metadata.file_path)); markdown.push_str(&format!("- **Pages**: {}\n", document.metadata.page_count)); markdown.push_str(&format!("- **Words**: {}\n", document.metadata.word_count)); if let Some(author) = &document.metadata.author { markdown.push_str(&format!("- **Author**: {author}\n")); } markdown.push_str("\n---\n\n"); // Convert document content for element in &document.elements { match element { DocumentElement::Heading { level, text, number, } => { let prefix = "#".repeat(*level as usize + 1); // +1 because title is h1 let heading_text = if let Some(number) = number { format!("{number} {text}") } else { text.clone() }; markdown.push_str(&format!("{prefix} {heading_text}\n\n")); } DocumentElement::Paragraph { runs } => { let mut paragraph_text = String::new(); for run in runs { let mut formatted_text = run.text.clone(); if run.formatting.bold { formatted_text = format!("**{formatted_text}**"); } if run.formatting.italic { formatted_text = format!("*{formatted_text}*"); } if run.formatting.strikethrough { formatted_text = format!("~~{formatted_text}~~"); } paragraph_text.push_str(&formatted_text); } markdown.push_str(&format!("{paragraph_text}\n\n")); } DocumentElement::List { items, ordered } => { for (i, item) in items.iter().enumerate() { let indent = " ".repeat(item.level as usize); let bullet = if *ordered { format!("{}. ", i + 1) } else { "- ".to_string() }; let mut item_text = String::new(); for run in &item.runs { let mut formatted_text = run.text.clone(); if run.formatting.bold { formatted_text = format!("**{formatted_text}**"); } if run.formatting.italic { formatted_text = format!("*{formatted_text}*"); } if run.formatting.strikethrough { formatted_text = format!("~~{formatted_text}~~"); } item_text.push_str(&formatted_text); } markdown.push_str(&format!("{indent}{bullet}{item_text}\n")); } markdown.push('\n'); } DocumentElement::Table { table } => { // Add table title if present if let Some(title) = &table.metadata.title { markdown.push_str(&format!("### {title}\n\n")); } // Markdown table header let header_content: Vec = table.headers.iter().map(|h| h.content.clone()).collect(); markdown.push_str(&format!("| {} |\n", header_content.join(" | "))); // Generate alignment indicators let alignment_row: Vec = table .metadata .column_alignments .iter() .map(|align| match align { TextAlignment::Left => ":---".to_string(), TextAlignment::Right => "---:".to_string(), TextAlignment::Center => ":---:".to_string(), TextAlignment::Justify => ":---".to_string(), }) .collect(); markdown.push_str(&format!("| {} |\n", alignment_row.join(" | "))); // Table rows for row in &table.rows { let row_content: Vec = row.iter().map(|cell| cell.content.clone()).collect(); markdown.push_str(&format!("| {} |\n", row_content.join(" | "))); } markdown.push('\n'); } DocumentElement::Image { description, width, height, image_path, .. } => { let alt = description; let url = image_path .as_ref() .map(|p| p.to_string_lossy().to_string()) .unwrap_or_else(|| description.clone()); let dimensions = match (width, height) { (Some(w), Some(h)) => format!(" "), _ => String::new(), }; markdown.push_str(&format!("{dimensions}\n\n")); } DocumentElement::Equation { latex, .. } => { markdown.push_str(&format!("$${latex}$$\n\n")); } DocumentElement::PageBreak => { markdown.push_str("\n---\n\n"); } } } print!("{markdown}"); Ok(()) } pub fn format_as_text(document: &Document) -> String { let mut text = String::new(); // Add document title text.push_str(&format!("{}\n", document.title)); text.push_str(&"=".repeat(document.title.len())); text.push_str("\n\n"); // Convert document content for element in &document.elements { match element { DocumentElement::Heading { level, text: heading_text, .. } => { let underline = match level { 1 => "=", 2 => "-", _ => "~", }; text.push_str(&format!("{heading_text}\n")); text.push_str(&underline.repeat(heading_text.len())); text.push_str("\n\n"); } DocumentElement::Paragraph { runs } => { let para_text: String = runs.iter().map(|run| run.text.as_str()).collect(); text.push_str(&format!("{para_text}\n\n")); } DocumentElement::List { items, ordered } => { for (i, item) in items.iter().enumerate() { let bullet = if *ordered { format!("{}. ", i + 1) } else { "* ".to_string() }; let indent = " ".repeat(item.level as usize); let item_text: String = item.runs.iter().map(|run| run.text.as_str()).collect(); text.push_str(&format!("{indent}{bullet}{item_text}\n")); } text.push('\n'); } DocumentElement::Table { table } => { // Add table title if present if let Some(title) = &table.metadata.title { text.push_str(&format!("{title}\n")); text.push_str(&"=".repeat(title.len())); text.push_str("\n\n"); } // Use the calculated column widths from metadata let col_widths = &table.metadata.column_widths; // Top border let top_border = generate_text_table_border(col_widths, "┌", "┬", "┐", "─"); text.push_str(&format!("{top_border}\n")); // Header with proper alignment let header_line = render_text_table_row(&table.headers, col_widths, true); text.push_str(&format!("{header_line}\n")); // Header separator let separator = generate_text_table_border(col_widths, "├", "┼", "┤", "─"); text.push_str(&format!("{separator}\n")); // Data rows for row in &table.rows { let row_line = render_text_table_row(row, col_widths, false); text.push_str(&format!("{row_line}\n")); } // Bottom border let bottom_border = generate_text_table_border(col_widths, "└", "┴", "┘", "─"); text.push_str(&format!("{bottom_border}\n")); text.push('\n'); } DocumentElement::PageBreak => { text.push_str("---\n\n"); } DocumentElement::Image { description, image_path, .. } => { // Try to render the image inline if available if let Some(path) = image_path { match crate::terminal_image::TerminalImageRenderer::with_options( document.image_options.max_width, document.image_options.max_height, document.image_options.scale, ) .render_image_from_path(path, description) { Ok(_) => { // Image displayed successfully, add spacing text.push('\n'); } Err(_) => { // Fallback to text description text.push_str(&format!("[Image: {description}]\n\n")); } } } else { text.push_str(&format!("[Image: {description}]\n\n")); } } DocumentElement::Equation { latex, .. } => { text.push_str(&format!("Equation: {latex}\n\n")); } } } text } pub fn export_to_text(document: &Document) -> Result<()> { export_to_text_with_images(document); Ok(()) } fn export_to_text_with_images(document: &Document) { // Print title println!("{}\n", document.title); // Print metadata println!("Document Information:"); println!("- File: {}", document.metadata.file_path); println!("- Pages: {}", document.metadata.page_count); println!("- Words: {}", document.metadata.word_count); if let Some(author) = &document.metadata.author { println!("- Author: {author}"); } println!("\n{}\n", "=".repeat(50)); // Process elements in order, printing immediately for element in &document.elements { match element { DocumentElement::Heading { level, text, number, } => { let prefix = "#".repeat(*level as usize); let heading_text = if let Some(number) = number { format!("{number} {text}") } else { text.clone() }; println!("{prefix} {heading_text}\n"); } DocumentElement::Paragraph { runs } => { let mut paragraph_text = String::new(); for run in runs { let mut formatted_text = run.text.clone(); if run.formatting.bold { formatted_text = format!("**{formatted_text}**"); } if run.formatting.italic { formatted_text = format!("*{formatted_text}*"); } if run.formatting.underline { formatted_text = format!("_{formatted_text}_"); } if run.formatting.strikethrough { formatted_text = format!("~~{formatted_text}~~"); } paragraph_text.push_str(&formatted_text); } println!("{paragraph_text}\n"); } DocumentElement::List { items, .. } => { for item in items { let item_text: String = item.runs.iter().map(|run| run.text.as_str()).collect(); println!("- {item_text}"); } println!(); } DocumentElement::Table { table } => { // Simple table rendering for text export for row in &table.rows { let row_content: Vec = row.iter().map(|cell| cell.content.clone()).collect(); println!("| {} |", row_content.join(" | ")); } println!(); } DocumentElement::Image { description, image_path, .. } => { // Render image immediately in the correct position if let Some(path) = image_path { match crate::terminal_image::TerminalImageRenderer::with_options( document.image_options.max_width, document.image_options.max_height, document.image_options.scale, ) .render_image_from_path(path, description) { Ok(_) => { // Image displayed successfully, add spacing println!(); } Err(_) => { // Fallback to text description println!("[Image: {description}]\n"); } } } else { println!("[Image: {description}]\n"); } } DocumentElement::Equation { latex, .. } => { println!("Equation: {latex}\n"); } DocumentElement::PageBreak => { println!("{}\n", "-".repeat(50)); } } } } pub fn export_to_csv(document: &Document) -> Result<()> { let mut csv_output = Vec::new(); // Find all tables in the document for (table_index, element) in document.elements.iter().enumerate() { if let DocumentElement::Table { table } = element { if table_index > 0 { csv_output.push(String::new()); // Empty line between tables csv_output.push(format!("# Table {}", table_index + 1)); } // Add table title as comment if present if let Some(title) = &table.metadata.title { csv_output.push(format!("# {title}")); } // CSV header let header_line = table .headers .iter() .map(|h| escape_csv_field(&h.content)) .collect::>() .join(","); csv_output.push(header_line); // CSV rows for row in &table.rows { let row_line = row .iter() .map(|cell| escape_csv_field(&cell.content)) .collect::>() .join(","); csv_output.push(row_line); } } } if csv_output.is_empty() { println!("No tables found in document"); } else { for line in csv_output { println!("{line}"); } } Ok(()) } pub fn export_to_json(document: &Document) -> Result<()> { let json_output = serde_json::to_string_pretty(document)?; println!("{json_output}"); Ok(()) } #[allow(dead_code)] pub fn extract_citations(document: &Document) -> Result> { let mut citations = Vec::new(); // Simple citation extraction - look for common citation patterns for (index, element) in document.elements.iter().enumerate() { let text = match element { DocumentElement::Heading { text, .. } => text, DocumentElement::Paragraph { runs } => { &runs.iter().map(|run| run.text.as_str()).collect::() } _ => continue, }; // Look for citation patterns like (Author, Year) or [1] let citation_patterns = [ r"\([A-Z][a-z]+,\s*\d{4}\)", // (Author, 2024) r"\[[0-9]+\]", // [1] r"\([A-Z][a-z]+\s+et\s+al\.,\s*\d{4}\)", // (Author et al., 2024) ]; for pattern in &citation_patterns { if let Ok(regex) = regex::Regex::new(pattern) { for mat in regex.find_iter(text) { citations.push(Citation { text: mat.as_str().to_string(), element_index: index, citation_type: CitationType::InText, }); } } } } Ok(citations) } #[allow(dead_code)] pub fn extract_bibliography(document: &Document) -> Result> { let mut bibliography = Vec::new(); // Look for bibliography or references section for (index, element) in document.elements.iter().enumerate() { if let DocumentElement::Heading { text, .. } = element { if text.to_lowercase().contains("reference") || text.to_lowercase().contains("bibliography") || text.to_lowercase().contains("works cited") { // Process following elements as bibliography entries for (bib_index, bib_element) in document.elements[index + 1..].iter().enumerate() { match bib_element { DocumentElement::Paragraph { runs } => { let text: String = runs.iter().map(|run| run.text.as_str()).collect(); if !text.trim().is_empty() { bibliography.push(Citation { text: text.clone(), element_index: index + bib_index + 1, citation_type: CitationType::Bibliography, }); } } DocumentElement::List { items, .. } => { for item in items { let text: String = item.runs.iter().map(|run| run.text.as_str()).collect(); bibliography.push(Citation { text, element_index: index + bib_index + 1, citation_type: CitationType::Bibliography, }); } } DocumentElement::Heading { .. } => break, // Next section _ => {} } } break; } } } Ok(bibliography) } #[allow(dead_code)] #[derive(Debug, Clone)] pub struct Citation { pub text: String, pub element_index: usize, pub citation_type: CitationType, } #[allow(dead_code)] #[derive(Debug, Clone)] pub enum CitationType { InText, Bibliography, } fn escape_csv_field(field: &str) -> String { if field.contains(',') || field.contains('"') || field.contains('\n') { format!("\"{}\"", field.replace('"', "\"\"")) } else { field.to_string() } } // Helper functions for text table rendering fn generate_text_table_border( column_widths: &[usize], left: &str, middle: &str, right: &str, fill: &str, ) -> String { let mut border = String::new(); border.push_str(left); for (i, &width) in column_widths.iter().enumerate() { border.push_str(&fill.repeat(width + 2)); // +2 for padding if i < column_widths.len() - 1 { border.push_str(middle); } } border.push_str(right); border } fn render_text_table_row(cells: &[TableCell], column_widths: &[usize], _is_header: bool) -> String { let mut row = String::new(); row.push('│'); for (i, cell) in cells.iter().enumerate() { let width = column_widths.get(i).copied().unwrap_or(10); let aligned_content = align_text_cell_content(&cell.content, cell.alignment, width); row.push(' '); row.push_str(&aligned_content); row.push(' '); row.push('│'); } row } fn align_text_cell_content(content: &str, alignment: TextAlignment, width: usize) -> String { let trimmed = content.trim(); match alignment { TextAlignment::Left => format!("{trimmed: format!("{trimmed:>width$}"), TextAlignment::Center => { let padding = width.saturating_sub(trimmed.len()); let left_pad = padding / 2; let right_pad = padding - left_pad; format!( "{}{}{}", " ".repeat(left_pad), trimmed, " ".repeat(right_pad) ) } TextAlignment::Justify => { // For export, treat justify as left-aligned format!("{trimmed: Result<()> { let options = AnsiOptions::default(); let ansi_output = export_to_ansi_with_options(document, &options)?; print!("{ansi_output}"); Ok(()) } pub fn export_to_ansi_with_cli_options( document: &Document, terminal_width: Option, color_depth: &ColorDepth, ) -> Result<()> { let options = AnsiOptions { terminal_width: terminal_width.unwrap_or_else(|| { std::env::var("COLUMNS") .ok() .and_then(|s| s.parse().ok()) .unwrap_or(80) }), color_depth: color_depth.clone(), }; let ansi_output = export_to_ansi_with_options(document, &options)?; print!("{ansi_output}"); Ok(()) } doxx-0.1.2/src/image_extractor.rs 0000644 0000000 0000000 00000014437 10461020230 0015131 0 ustar 0000000 0000000 use anyhow::Result; use std::collections::HashMap; use std::fs::{self, File}; use std::io::{Read, Write}; use std::path::{Path, PathBuf}; use zip::ZipArchive; // Type aliases to simplify complex return types type ImageList<'a> = Vec<(&'a String, &'a PathBuf)>; type ExtractedImages = Vec<(String, PathBuf)>; /// Manages extraction of images from DOCX files #[derive(Debug)] pub struct ImageExtractor { temp_dir: PathBuf, extracted_images: HashMap, // relationship_id -> temp_file_path } #[derive(Debug, Clone)] pub enum ImageFormat { Png, Jpeg, Gif, Bmp, Tiff, } impl ImageFormat { pub fn from_filename(filename: &str) -> Option { let extension = Path::new(filename).extension()?.to_str()?.to_lowercase(); match extension.as_str() { "png" => Some(Self::Png), "jpg" | "jpeg" => Some(Self::Jpeg), "gif" => Some(Self::Gif), "bmp" => Some(Self::Bmp), "tiff" | "tif" => Some(Self::Tiff), _ => None, } } pub fn to_extension(&self) -> &'static str { match self { Self::Png => "png", Self::Jpeg => "jpg", Self::Gif => "gif", Self::Bmp => "bmp", Self::Tiff => "tiff", } } } impl ImageExtractor { /// Create a new image extractor with a temporary directory pub fn new() -> Result { let temp_dir = std::env::temp_dir().join("doxx_images"); fs::create_dir_all(&temp_dir)?; Ok(Self { temp_dir, extracted_images: HashMap::new(), }) } /// Extract all images from a DOCX file pub fn extract_images_from_docx(&mut self, docx_path: &Path) -> Result<()> { let file = File::open(docx_path)?; let mut archive = ZipArchive::new(file)?; // Look for images in the word/media/ folder for i in 0..archive.len() { let mut file = archive.by_index(i)?; let outpath = file.name().to_string(); // Clone the name to avoid borrow issues // Check if this is an image file in the media folder if outpath.starts_with("word/media/") && self.is_image_file(&outpath) { let filename = Path::new(&outpath) .file_name() .and_then(|n| n.to_str()) .unwrap_or("unknown"); // Create a unique temp file path let temp_file_path = self.temp_dir.join(filename); // Read the image data let mut buffer = Vec::new(); file.read_to_end(&mut buffer)?; // Write to temp file let mut temp_file = File::create(&temp_file_path)?; temp_file.write_all(&buffer)?; // Store the mapping (we'll enhance this with proper relationship parsing later) let rel_id = filename.to_string(); // Simplified for now self.extracted_images.insert(rel_id, temp_file_path); } } println!( "Extracted {} images to {}", self.extracted_images.len(), self.temp_dir.display() ); Ok(()) } /// Get image data by relationship ID pub fn get_image_data(&self, rel_id: &str) -> Result> { if let Some(path) = self.extracted_images.get(rel_id) { Ok(fs::read(path)?) } else { anyhow::bail!("Image not found: {}", rel_id) } } /// Get the path to an extracted image pub fn get_image_path(&self, rel_id: &str) -> Option<&PathBuf> { self.extracted_images.get(rel_id) } /// List all extracted images pub fn list_images(&self) -> ImageList<'_> { self.extracted_images.iter().collect() } /// Get all extracted images as a vector of (rel_id, path) pairs pub fn get_extracted_images(&self) -> ExtractedImages { self.extracted_images .iter() .map(|(rel_id, path)| (rel_id.clone(), path.clone())) .collect() } /// Get all extracted images sorted by filename for consistent ordering pub fn get_extracted_images_sorted(&self) -> ExtractedImages { let mut images: ExtractedImages = self .extracted_images .iter() .map(|(rel_id, path)| (rel_id.clone(), path.clone())) .collect(); // Sort by filename to ensure consistent ordering images.sort_by(|a, b| a.0.cmp(&b.0)); images } /// Clean up temporary files pub fn cleanup(&self) -> Result<()> { if self.temp_dir.exists() { fs::remove_dir_all(&self.temp_dir)?; } Ok(()) } /// Check if a file is an image based on its extension fn is_image_file(&self, filename: &str) -> bool { ImageFormat::from_filename(filename).is_some() } } impl Drop for ImageExtractor { fn drop(&mut self) { // Don't automatically clean up temp files - let them persist // for the lifetime of the application. The OS will clean them up // when the temp directory is cleared, or users can manually clean up. // let _ = self.cleanup(); } } #[cfg(test)] mod tests { use super::*; #[test] fn test_image_format_detection() { assert!(matches!( ImageFormat::from_filename("image.png"), Some(ImageFormat::Png) )); assert!(matches!( ImageFormat::from_filename("photo.jpg"), Some(ImageFormat::Jpeg) )); assert!(matches!( ImageFormat::from_filename("photo.jpeg"), Some(ImageFormat::Jpeg) )); assert!(matches!( ImageFormat::from_filename("animation.gif"), Some(ImageFormat::Gif) )); assert!(matches!( ImageFormat::from_filename("bitmap.bmp"), Some(ImageFormat::Bmp) )); assert!(matches!( ImageFormat::from_filename("scan.tiff"), Some(ImageFormat::Tiff) )); assert!(ImageFormat::from_filename("document.txt").is_none()); } #[test] fn test_image_extractor_creation() { let extractor = ImageExtractor::new().unwrap(); assert!(extractor.temp_dir.exists()); assert!(extractor.extracted_images.is_empty()); } } doxx-0.1.2/src/lib.rs 0000644 0000000 0000000 00000002122 10461020230 0012506 0 ustar 0000000 0000000 //! doxx: Terminal document viewer for .docx files //! //! This library provides functionality for parsing Microsoft Word documents //! and displaying them in terminal environments with rich formatting support. pub mod ansi; pub mod document; pub mod equation; pub mod export; pub mod image_extractor; pub mod terminal_image; /// Export format options #[derive(clap::ValueEnum, Clone)] pub enum ExportFormat { Markdown, Text, Csv, Json, Ansi, } /// Color depth options for ANSI export #[derive(clap::ValueEnum, Clone, Debug)] pub enum ColorDepth { /// Auto-detect terminal color capabilities Auto, /// Monochrome (no colors) #[value(name = "1")] Monochrome, /// 16 colors #[value(name = "4")] Standard, /// 256 colors #[value(name = "8")] Extended, /// 24-bit true color #[value(name = "24")] TrueColor, } // Re-export commonly used types pub use document::{Document, DocumentElement}; pub use equation::Equation; pub use image_extractor::ImageExtractor; pub use terminal_image::{TerminalImageRenderer, TerminalImageSupport}; doxx-0.1.2/src/main.rs 0000644 0000000 0000000 00000011753 10461020230 0012676 0 ustar 0000000 0000000 use anyhow::Result; use clap::{Parser, Subcommand}; use std::path::PathBuf; use doxx::{ColorDepth, ExportFormat}; mod ansi; mod document; mod export; pub mod image_extractor; pub mod terminal_image; mod ui; #[derive(Parser)] #[command( name = "doxx", version, about = "Terminal document viewer for .docx files", long_about = "Beautiful .docx viewing in your terminal" )] struct Cli { /// Input document file (.docx) #[arg(value_name = "FILE")] file: Option, /// Start with outline view #[arg(short, long)] outline: bool, /// Jump to specific page #[arg(short, long)] page: Option, /// Search and highlight term #[arg(short, long)] search: Option, /// Export format #[arg(long, value_enum)] export: Option, /// Terminal width for ANSI export (default: $COLUMNS or 80) #[arg(short = 'w', long, value_name = "COLS")] terminal_width: Option, /// Color depth for ANSI export #[arg(long, value_enum, default_value = "auto")] color_depth: ColorDepth, /// Force interactive UI mode (bypass TTY detection) #[arg(long)] force_ui: bool, /// Enable color support for text rendering #[arg(long)] color: bool, /// Display images inline in terminal (auto-detect capabilities) #[arg(long)] images: bool, /// Force text-only mode for images (no inline display) #[arg(long)] no_images: bool, /// Extract images to a directory #[arg(long)] extract_images: Option, /// Maximum image width in terminal columns (default: auto-detect) #[arg(long, value_name = "COLS")] image_width: Option, /// Maximum image height in terminal rows (default: auto-detect) #[arg(long, value_name = "ROWS")] image_height: Option, /// Image scaling factor (0.1 to 2.0, default: 1.0) #[arg(long, value_name = "SCALE")] image_scale: Option, /// Test terminal image capabilities #[arg(long)] debug_terminal: bool, /// Configuration commands #[command(subcommand)] config: Option, } #[derive(Subcommand)] enum ConfigCommands { /// Set configuration value Set { key: String, value: String }, /// Get configuration value Get { key: String }, /// Initialize configuration Init, } #[tokio::main] async fn main() -> Result<()> { let cli = Cli::parse(); // Handle debug terminal command if cli.debug_terminal { use terminal_image::TerminalImageRenderer; let renderer = TerminalImageRenderer::new(); renderer.print_capabilities(); return Ok(()); } match &cli.config { Some(ConfigCommands::Init) => { println!("Initializing doxx configuration..."); // TODO: Initialize config file return Ok(()); } Some(ConfigCommands::Set { key, value }) => { println!("Setting {key} = {value}"); // TODO: Set config value return Ok(()); } Some(ConfigCommands::Get { key }) => { println!("Getting {key}"); // TODO: Get config value return Ok(()); } None => {} } let file_path = cli .file .clone() .ok_or_else(|| anyhow::anyhow!("Please provide a document file to view"))?; if !file_path.exists() { anyhow::bail!("File not found: {}", file_path.display()); } let image_options = document::ImageOptions { enabled: cli.images, max_width: cli.image_width, max_height: cli.image_height, scale: cli.image_scale, }; let document = document::load_document(&file_path, image_options).await?; // Handle image extraction flag if let Some(extract_dir) = &cli.extract_images { use image_extractor::ImageExtractor; let mut extractor = ImageExtractor::new()?; extractor.extract_images_from_docx(&file_path)?; // Copy extracted images to the specified directory std::fs::create_dir_all(extract_dir)?; for (rel_id, temp_path) in extractor.list_images() { let target_path = extract_dir.join(rel_id); std::fs::copy(temp_path, &target_path)?; println!("Extracted: {}", target_path.display()); } println!( "Successfully extracted {} images to {}", extractor.list_images().len(), extract_dir.display() ); return Ok(()); } if let Some(export_format) = &cli.export { match export_format { ExportFormat::Ansi => { export::export_to_ansi_with_cli_options( &document, cli.terminal_width, &cli.color_depth, )?; } _ => { export::export_document(&document, export_format)?; } } return Ok(()); } // Start terminal UI ui::run_viewer(document, &cli).await?; Ok(()) } doxx-0.1.2/src/terminal_image.rs 0000644 0000000 0000000 00000024241 10461020230 0014723 0 ustar 0000000 0000000 use anyhow::Result; use std::path::Path; /// Terminal image display capabilities #[derive(Debug, Clone, Copy, PartialEq)] pub enum TerminalImageSupport { Kitty, // Kitty graphics protocol ITerm2, // iTerm2 graphics protocol Sixel, // Sixel graphics HalfBlocks, // Unicode half-block fallback None, // Text description only } /// Handles display of images in the terminal using various protocols #[derive(Debug)] pub struct TerminalImageRenderer { support: TerminalImageSupport, max_width: u32, max_height: u32, } impl TerminalImageRenderer { /// Create a new terminal image renderer with auto-detected capabilities pub fn new() -> Self { let support = Self::detect_capabilities(); let (max_width, max_height) = Self::get_terminal_size(); Self { support, max_width, max_height, } } /// Create a new terminal image renderer with custom size limits pub fn with_size_limits(max_width: Option, max_height: Option) -> Self { let support = Self::detect_capabilities(); let (default_width, default_height) = Self::get_terminal_size(); Self { support, max_width: max_width.unwrap_or(default_width), max_height: max_height.unwrap_or(default_height), } } /// Create a new terminal image renderer with custom size limits and scaling pub fn with_options( max_width: Option, max_height: Option, scale: Option, ) -> Self { let support = Self::detect_capabilities(); let (default_width, default_height) = Self::get_terminal_size(); let scale_factor = scale.unwrap_or(1.0).clamp(0.1, 2.0); // Clamp between 0.1 and 2.0 let scaled_width = max_width.unwrap_or(default_width); let scaled_height = max_height.unwrap_or(default_height); Self { support, max_width: ((scaled_width as f32) * scale_factor) as u32, max_height: ((scaled_height as f32) * scale_factor) as u32, } } /// Create a renderer with specific capabilities (for testing) pub fn with_support(support: TerminalImageSupport) -> Self { let (max_width, max_height) = Self::get_terminal_size(); Self { support, max_width, max_height, } } /// Detect terminal image display capabilities pub fn detect_capabilities() -> TerminalImageSupport { // Check for WezTerm FIRST - it supports Kitty protocol if let Ok(term_program) = std::env::var("TERM_PROGRAM") { if term_program == "WezTerm" { return TerminalImageSupport::Kitty; } } // Check for iTerm2 (this function exists) if viuer::is_iterm_supported() { return TerminalImageSupport::ITerm2; } // Sixel support disabled for now to avoid linking issues // Will re-enable after fixing dependencies // Check terminal type for Kitty support if let Ok(term) = std::env::var("TERM") { match term.as_str() { "xterm-kitty" => TerminalImageSupport::Kitty, "wezterm" => TerminalImageSupport::Kitty, "screen" | "screen-256color" => { // Screen/tmux might support passthrough TerminalImageSupport::HalfBlocks } _ => TerminalImageSupport::HalfBlocks, } } else { TerminalImageSupport::HalfBlocks } } /// Get the current support level pub fn support(&self) -> TerminalImageSupport { self.support } /// Check if we can display images inline pub fn can_display_images(&self) -> bool { !matches!(self.support, TerminalImageSupport::None) } /// Render an image from a file path pub fn render_image_from_path(&self, image_path: &Path, description: &str) -> Result<()> { match self.support { TerminalImageSupport::None => { println!("📷 Image: {description}"); Ok(()) } _ => { let display_path = image_path.to_path_buf(); // Use viuer to display the image with appropriate protocol let mut conf = viuer::Config { transparent: true, absolute_offset: false, width: Some(self.max_width.min(80)), // Limit width to 80 columns height: Some(self.max_height.min(24)), // Limit height to 24 rows ..Default::default() }; // Set protocol based on terminal capability match self.support { TerminalImageSupport::Kitty => { conf.use_kitty = true; conf.use_iterm = false; } TerminalImageSupport::ITerm2 => { conf.use_kitty = false; conf.use_iterm = true; } _ => {} } match viuer::print_from_file(&display_path, &conf) { Ok(_) => { // Print description after the image if !description.is_empty() { println!("📷 {description}"); } Ok(()) } Err(e) => { // Fallback to text description if image display fails println!("📷 Image: {description} (display failed: {e})"); Ok(()) } } } } } /// Render an image from raw bytes pub fn render_image_from_bytes(&self, image_data: &[u8], description: &str) -> Result<()> { match self.support { TerminalImageSupport::None => { println!("📷 Image: {description}"); Ok(()) } _ => { let mut conf = viuer::Config { transparent: true, absolute_offset: false, width: Some(self.max_width.min(80)), height: Some(self.max_height.min(24)), ..Default::default() }; // Set protocol based on terminal capability match self.support { TerminalImageSupport::Kitty => { conf.use_kitty = true; conf.use_iterm = false; } TerminalImageSupport::ITerm2 => { conf.use_kitty = false; conf.use_iterm = true; } _ => {} } // Create a temporary file for viuer (it needs a file path) let temp_path = std::env::temp_dir().join("doxx_temp_image.png"); std::fs::write(&temp_path, image_data)?; match viuer::print_from_file(&temp_path, &conf) { Ok(_) => { // Clean up temp file let _ = std::fs::remove_file(&temp_path); if !description.is_empty() { println!("📷 {description}"); } Ok(()) } Err(e) => { println!("📷 Image: {description} (display failed: {e})"); Ok(()) } } } } } /// Get terminal size for image scaling fn get_terminal_size() -> (u32, u32) { // Try to get terminal size from crossterm if let Ok((width, height)) = crossterm::terminal::size() { (width as u32, height as u32) } else { // Fallback to reasonable defaults (80, 24) } } /// Print capabilities information for debugging pub fn print_capabilities(&self) { println!("=== Terminal Image Debug Information ==="); println!("Detected support: {:?}", self.support); println!("Max dimensions: {}x{}", self.max_width, self.max_height); println!("Can display images: {}", self.can_display_images()); // Environment variables if let Ok(term) = std::env::var("TERM") { println!("TERM: {term}"); } else { println!("TERM: not set"); } if let Ok(term_program) = std::env::var("TERM_PROGRAM") { println!("TERM_PROGRAM: {term_program}"); } else { println!("TERM_PROGRAM: not set"); } // Viuer capabilities println!( "viuer::is_iterm_supported(): {}", viuer::is_iterm_supported() ); // Additional debug info if let Ok(colorterm) = std::env::var("COLORTERM") { println!("COLORTERM: {colorterm}"); } println!("========================================"); } /// Debug method to test image rendering pub fn debug_render(&self) { println!( "DEBUG: Attempting to render test image with support: {:?}", self.support ); } } impl Default for TerminalImageRenderer { fn default() -> Self { Self::new() } } #[cfg(test)] mod tests { use super::*; #[test] fn test_capability_detection() { // This will vary by environment, but should not panic let support = TerminalImageRenderer::detect_capabilities(); println!("Detected support: {support:?}"); } #[test] fn test_renderer_creation() { let renderer = TerminalImageRenderer::new(); assert!(renderer.max_width > 0); assert!(renderer.max_height > 0); } #[test] fn test_can_display_images() { let renderer = TerminalImageRenderer::with_support(TerminalImageSupport::Kitty); assert!(renderer.can_display_images()); let renderer = TerminalImageRenderer::with_support(TerminalImageSupport::None); assert!(!renderer.can_display_images()); } } doxx-0.1.2/src/ui.rs 0000644 0000000 0000000 00000131427 10461020230 0012370 0 ustar 0000000 0000000 use anyhow::Result; use arboard::Clipboard; use crossterm::{ event::{ self, DisableMouseCapture, EnableMouseCapture, Event, KeyCode, KeyEventKind, MouseEventKind, }, execute, terminal::{disable_raw_mode, enable_raw_mode, EnterAlternateScreen, LeaveAlternateScreen}, tty::IsTty, }; use ratatui::{ backend::{Backend, CrosstermBackend}, layout::{Constraint, Direction, Layout, Margin, Rect}, style::{Color, Modifier, Style}, text::{Line, Span, Text}, widgets::{ Block, Borders, Clear, List, ListItem, ListState, Paragraph, Scrollbar, ScrollbarOrientation, ScrollbarState, Wrap, }, Frame, Terminal, }; use std::io; use crate::{document::*, Cli}; use ratatui_image::{picker::Picker, protocol::StatefulProtocol}; type ImageProtocols = Vec; pub struct App { pub document: Document, pub current_view: ViewMode, pub scroll_offset: usize, pub search_query: String, pub search_results: Vec, pub backup_search_results: Vec, pub current_search_index: usize, pub outline_state: ListState, pub show_help: bool, pub clipboard: Option, pub status_message: Option, pub color_enabled: bool, pub image_picker: Option, pub image_protocols: ImageProtocols, } #[derive(Debug, Clone)] pub enum ViewMode { Document, Outline, Search, #[allow(dead_code)] Help, } impl App { pub fn new(document: Document, cli: &Cli) -> Self { let mut app = Self { document, current_view: ViewMode::Document, scroll_offset: 0, search_query: String::new(), search_results: Vec::new(), backup_search_results: Vec::new(), current_search_index: 0, outline_state: ListState::default(), show_help: false, clipboard: Clipboard::new().ok(), status_message: None, color_enabled: cli.color, image_picker: None, image_protocols: Vec::new(), }; // Apply CLI options if cli.outline { app.current_view = ViewMode::Outline; } if let Some(search) = &cli.search { app.search_query = search.clone(); app.search_results = crate::document::search_document(&app.document, search); app.current_view = ViewMode::Search; } if let Some(page) = cli.page { // Rough estimate of elements per page let elements_per_page = 10; app.scroll_offset = (page.saturating_sub(1)) * elements_per_page; } // Initialize image support if images are enabled if cli.images { app.init_image_support(); } app } fn init_image_support(&mut self) { // Try to initialize picker from terminal query on Unix, use font size on Windows #[cfg(unix)] let picker = if let Ok(p) = Picker::from_query_stdio() { p } else { // Fallback to manual font size Picker::from_fontsize((8, 16)) }; #[cfg(not(unix))] let picker = Picker::from_fontsize((8, 16)); // Process all images in the document for element in &self.document.elements { if let DocumentElement::Image { image_path: Some(path), .. } = element { // Try to load and create protocol for each image if let Ok(img) = image::ImageReader::open(path) { if let Ok(dyn_img) = img.decode() { let protocol = picker.new_resize_protocol(dyn_img); self.image_protocols.push(protocol); } } } } self.image_picker = Some(picker); } pub fn next_search_result(&mut self) { if !self.search_results.is_empty() { self.current_search_index = (self.current_search_index + 1) % self.search_results.len(); if let Some(result) = self.search_results.get(self.current_search_index) { self.scroll_offset = result.element_index; } } } pub fn prev_search_result(&mut self) { if !self.search_results.is_empty() { self.current_search_index = if self.current_search_index == 0 { self.search_results.len() - 1 } else { self.current_search_index - 1 }; if let Some(result) = self.search_results.get(self.current_search_index) { self.scroll_offset = result.element_index; } } } pub fn scroll_up(&mut self) { self.scroll_offset = self.scroll_offset.saturating_sub(1); } pub fn scroll_down(&mut self) { if self.scroll_offset + 1 < self.document.elements.len() { self.scroll_offset += 1; } } pub fn page_up(&mut self, page_size: usize) { self.scroll_offset = self.scroll_offset.saturating_sub(page_size); } pub fn page_down(&mut self, page_size: usize) { let max_offset = self.document.elements.len().saturating_sub(1); self.scroll_offset = std::cmp::min(self.scroll_offset + page_size, max_offset); } pub fn copy_content(&mut self) { if let Some(clipboard) = &mut self.clipboard { let content = match self.current_view { ViewMode::Document => { // Copy the full document as text crate::export::format_as_text(&self.document) } ViewMode::Search => { // Copy search results if self.search_results.is_empty() { "No search results to copy.".to_string() } else { let mut content = format!("Search results for '{}':\n\n", self.search_query); for (i, result) in self.search_results.iter().enumerate() { content.push_str(&format!("{}. {}\n", i + 1, result.text.trim())); } content } } ViewMode::Outline => { // Copy document outline let outline = crate::document::generate_outline(&self.document); let mut content = String::from("Document Outline:\n\n"); for item in outline { let indent = " ".repeat((item.level as usize).saturating_sub(1)); content.push_str(&format!("{}{}\n", indent, item.title)); } content } _ => "Content not available for copying in this view.".to_string(), }; match clipboard.set_text(content) { Ok(_) => { self.status_message = Some("Copied to clipboard!".to_string()); } Err(_) => { self.status_message = Some("Failed to copy to clipboard.".to_string()); } } } else { self.status_message = Some("Clipboard not available.".to_string()); } } pub fn clear_status_message(&mut self) { self.status_message = None; } pub fn toggle_search_state(&mut self) { if self.search_query.is_empty() { return; } // Toggles search state: clears results if active, restores backup if inactive. if !self.search_results.is_empty() { self.backup_search_results = self.search_results.clone(); self.search_results.clear(); } else if !self.backup_search_results.is_empty() { self.search_results = self.backup_search_results.clone(); self.backup_search_results.clear(); } } } async fn run_non_interactive(document: Document, cli: &Cli) -> Result<()> { let app = App::new(document, cli); match app.current_view { ViewMode::Outline => { // Show outline let outline = crate::document::generate_outline(&app.document); println!("Document Outline:"); println!("================"); for item in outline { let indent = " ".repeat((item.level.saturating_sub(1)) as usize); println!("{}{}", indent, item.title); } } ViewMode::Search => { // Show search results println!("Search Results for '{}':", app.search_query); println!("=============================="); for (i, result) in app.search_results.iter().enumerate() { println!("{}. {}", i + 1, result.text.trim()); println!(); } if app.search_results.is_empty() { println!("No results found."); } } _ => { // Default: show basic document info and content preview println!("Document: {}", app.document.title); println!("Pages: {}", app.document.metadata.page_count); println!("Words: {}", app.document.metadata.word_count); println!(); println!("Content Preview:"); println!("================"); // Show first few elements with proper formatting let preview_count = std::cmp::min(app.document.elements.len(), 20); for element in &app.document.elements[0..preview_count] { match element { DocumentElement::Heading { level, text, number, } => { let prefix = match level { 1 => "# ", 2 => "## ", _ => "### ", }; let heading_text = if let Some(number) = number { format!("{number} {text}") } else { text.clone() }; println!("{prefix}{heading_text}"); println!(); } DocumentElement::Paragraph { runs } => { let text: String = runs.iter().map(|run| run.text.as_str()).collect(); println!("{text}"); println!(); } DocumentElement::List { items, ordered } => { for (i, item) in items.iter().enumerate() { let bullet = if *ordered { format!("{}. ", i + 1) } else { "• ".to_string() }; let indent = " ".repeat(item.level as usize); let item_text: String = item.runs.iter().map(|run| run.text.as_str()).collect(); println!("{indent}{bullet}{item_text}"); } println!(); } DocumentElement::Table { .. } => { println!("[Table content - use --export csv to view]"); println!(); } DocumentElement::Image { description, image_path, .. } => { if let Some(path) = image_path { // Try to display the image inline using terminal protocols match crate::terminal_image::TerminalImageRenderer::with_options( app.document.image_options.max_width, app.document.image_options.max_height, app.document.image_options.scale, ) .render_image_from_path(path, description) { Ok(_) => { // Image displayed successfully println!(); } Err(_) => { // Fallback to text description println!("📷 [Image: {description}]"); println!(); } } } else { println!("📷 [Image: {description}]"); println!(); } } DocumentElement::Equation { latex, .. } => { println!("📐 Equation: {latex}"); println!(); } DocumentElement::PageBreak => { println!("---"); println!(); } } } if app.document.elements.len() > preview_count { println!( "... and {} more elements", app.document.elements.len() - preview_count ); println!(); } println!( "Use --export to save full content, or run in an interactive terminal for full UI." ); } } Ok(()) } pub async fn run_viewer(document: Document, cli: &Cli) -> Result<()> { // Check if we're in an interactive terminal or forced to use UI if !cli.force_ui && !IsTty::is_tty(&io::stdout()) { // Fallback for non-interactive environments return run_non_interactive(document, cli).await; } // Setup terminal enable_raw_mode()?; let mut stdout = io::stdout(); execute!(stdout, EnterAlternateScreen, EnableMouseCapture)?; let backend = CrosstermBackend::new(stdout); let mut terminal = Terminal::new(backend)?; // Create app let mut app = App::new(document, cli); // Run the app let res = run_app(&mut terminal, &mut app).await; // Restore terminal disable_raw_mode()?; execute!( terminal.backend_mut(), LeaveAlternateScreen, DisableMouseCapture )?; terminal.show_cursor()?; if let Err(err) = res { println!("{err:?}"); } Ok(()) } async fn run_app(terminal: &mut Terminal, app: &mut App) -> Result<()> { loop { terminal.draw(|f| ui(f, app))?; match event::read()? { Event::Key(key) => { if key.kind == KeyEventKind::Press { // Clear status message on any key press (except the copy key) if app.status_message.is_some() && key.code != KeyCode::Char('c') && key.code != KeyCode::F(2) { app.clear_status_message(); } match app.current_view { ViewMode::Document => match key.code { KeyCode::Char('q') => break, KeyCode::Char('o') => app.current_view = ViewMode::Outline, KeyCode::Char('s') => app.current_view = ViewMode::Search, KeyCode::Char('S') => app.toggle_search_state(), KeyCode::Char('c') => app.copy_content(), KeyCode::Char('h') | KeyCode::F(1) => app.show_help = !app.show_help, KeyCode::Up | KeyCode::Char('k') => app.scroll_up(), KeyCode::Down | KeyCode::Char('j') => app.scroll_down(), KeyCode::PageUp => app.page_up(10), KeyCode::PageDown => app.page_down(10), KeyCode::Home => app.scroll_offset = 0, KeyCode::End => { app.scroll_offset = app.document.elements.len().saturating_sub(1) } KeyCode::Char('n') if !app.search_results.is_empty() => { app.next_search_result() } KeyCode::Char('p') if !app.search_results.is_empty() => { app.prev_search_result() } _ => {} }, ViewMode::Outline => match key.code { KeyCode::Char('q') | KeyCode::Esc => { app.current_view = ViewMode::Document } KeyCode::Char('c') => app.copy_content(), KeyCode::Up | KeyCode::Char('k') => { let selected = app.outline_state.selected().unwrap_or(0); if selected > 0 { app.outline_state.select(Some(selected - 1)); } } KeyCode::Down | KeyCode::Char('j') => { let selected = app.outline_state.selected().unwrap_or(0); if selected + 1 < crate::document::generate_outline(&app.document).len() { app.outline_state.select(Some(selected + 1)); } } KeyCode::Enter => { if let Some(selected) = app.outline_state.selected() { if let Some(outline_item) = crate::document::generate_outline(&app.document) .get(selected) { app.scroll_offset = outline_item.element_index; app.current_view = ViewMode::Document; } } } _ => {} }, ViewMode::Search => match key.code { KeyCode::Esc => app.current_view = ViewMode::Document, KeyCode::F(2) => app.copy_content(), // Use F2 for copy in search mode to avoid conflicts KeyCode::Char(c) => { app.search_query.push(c); app.search_results = crate::document::search_document( &app.document, &app.search_query, ); app.current_search_index = 0; } KeyCode::Backspace => { app.search_query.pop(); app.search_results = crate::document::search_document( &app.document, &app.search_query, ); app.current_search_index = 0; } KeyCode::Enter | KeyCode::Down => app.next_search_result(), KeyCode::Up => app.prev_search_result(), _ => {} }, ViewMode::Help => match key.code { KeyCode::Char('q') | KeyCode::Esc | KeyCode::Char('h') | KeyCode::F(1) => { app.show_help = false; app.current_view = ViewMode::Document; } _ => {} }, } } } Event::Mouse(mouse) => { match mouse.kind { MouseEventKind::ScrollUp => { match app.current_view { ViewMode::Document => { // Scroll up 3 lines for smooth mouse wheel experience for _ in 0..3 { app.scroll_up(); } } ViewMode::Outline => { let selected = app.outline_state.selected().unwrap_or(0); if selected > 0 { app.outline_state.select(Some(selected - 1)); } } ViewMode::Search => app.prev_search_result(), _ => {} } } MouseEventKind::ScrollDown => { match app.current_view { ViewMode::Document => { // Scroll down 3 lines for smooth mouse wheel experience for _ in 0..3 { app.scroll_down(); } } ViewMode::Outline => { let selected = app.outline_state.selected().unwrap_or(0); if selected + 1 < crate::document::generate_outline(&app.document).len() { app.outline_state.select(Some(selected + 1)); } } ViewMode::Search => app.next_search_result(), _ => {} } } _ => {} } } _ => {} } } Ok(()) } fn ui(f: &mut Frame, app: &mut App) { let chunks = Layout::default() .direction(Direction::Vertical) .constraints([Constraint::Min(0), Constraint::Length(3)].as_ref()) .split(f.area()); // Main content area match app.current_view { ViewMode::Document => render_document(f, chunks[0], app), ViewMode::Outline => render_outline(f, chunks[0], app), ViewMode::Search => render_search(f, chunks[0], app), ViewMode::Help => render_help(f, chunks[0]), } // Status bar render_status_bar(f, chunks[1], app); // Help overlay if app.show_help { render_help_overlay(f, app); } } fn render_document(f: &mut Frame, area: Rect, app: &mut App) { let title = format!("📄 doxx - {}", app.document.title); let block = Block::default() .title(title) .borders(Borders::ALL) .border_style(Style::default().fg(Color::Blue)); let inner = block.inner(area); f.render_widget(block, area); let visible_height = inner.height as usize; let end_index = std::cmp::min( app.scroll_offset + visible_height, app.document.elements.len(), ); let mut text = Text::default(); for (index, element) in app.document.elements[app.scroll_offset..end_index] .iter() .enumerate() { let actual_index = app.scroll_offset + index; let is_search_match = app .search_results .iter() .any(|r| r.element_index == actual_index); match element { DocumentElement::Heading { level, text: heading_text, number, } => { let style = match level { 1 => Style::default() .fg(Color::Yellow) .add_modifier(Modifier::BOLD), 2 => Style::default() .fg(Color::Green) .add_modifier(Modifier::BOLD), _ => Style::default() .fg(Color::Cyan) .add_modifier(Modifier::BOLD), }; let prefix = match level { 1 => "■ ".to_string(), 2 => " ▶ ".to_string(), 3 => " ◦ ".to_string(), _ => " • ".to_string(), }; let display_text = if let Some(number) = number { format!("{number} {heading_text}") } else { heading_text.clone() }; let line = if is_search_match { Line::from(vec![ Span::styled(prefix.clone(), style), Span::styled(display_text, style.bg(Color::Yellow).fg(Color::Black)), ]) } else { Line::from(vec![ Span::styled(prefix, style), Span::styled(display_text, style), ]) }; text.lines.push(line); text.lines.push(Line::from("")); } DocumentElement::Paragraph { runs } => { // Skip empty paragraphs if runs.is_empty() || runs.iter().all(|run| run.text.trim().is_empty()) { continue; } // Build spans from individual runs with their formatting let mut spans = Vec::new(); let total_text: String = runs.iter().map(|run| run.text.as_str()).collect(); for run in runs { let mut style = Style::default(); // Apply text formatting if run.formatting.bold { style = style.add_modifier(Modifier::BOLD); } if run.formatting.italic { style = style.add_modifier(Modifier::ITALIC); } if run.formatting.underline { style = style.add_modifier(Modifier::UNDERLINED); } if run.formatting.strikethrough { style = style.add_modifier(Modifier::CROSSED_OUT); } // Apply text color from document formatting (only if color is enabled) if app.color_enabled { if let Some(color_hex) = &run.formatting.color { if let Some(color) = hex_to_color(color_hex) { style = style.fg(color); } } } // Add visual indication for different types of content let display_text = if total_text.len() > 100 { // Long paragraphs get some indentation for the first run only if spans.is_empty() { format!(" {}", run.text) } else { run.text.clone() } } else { run.text.clone() }; if is_search_match { style = style.bg(Color::Yellow).fg(Color::Black); } spans.push(Span::styled(display_text, style)); } let line = Line::from(spans); text.lines.push(line); text.lines.push(Line::from("")); } DocumentElement::List { items, ordered } => { for (i, item) in items.iter().enumerate() { let bullet = if *ordered { format!("{}. ", i + 1) } else { "• ".to_string() }; let indent = " ".repeat(item.level as usize); // Combine indent and bullet to ensure proper spacing let prefixed_bullet = format!("{indent}{bullet}"); // Create spans for the formatted runs let mut spans = vec![Span::styled( prefixed_bullet, Style::default().fg(Color::Blue), )]; for run in &item.runs { let mut style = Style::default(); if run.formatting.bold { style = style.add_modifier(Modifier::BOLD); } if run.formatting.italic { style = style.add_modifier(Modifier::ITALIC); } if run.formatting.underline { style = style.add_modifier(Modifier::UNDERLINED); } if run.formatting.strikethrough { style = style.add_modifier(Modifier::CROSSED_OUT); } if let Some(color_hex) = &run.formatting.color { if let Some(color) = hex_to_color(color_hex) { style = style.fg(color); } } spans.push(Span::styled(run.text.clone(), style)); } let line = Line::from(spans); text.lines.push(line); } text.lines.push(Line::from("")); } DocumentElement::Table { table } => { render_table_enhanced(table, &mut text); } DocumentElement::Image { description, width, height, image_path, .. } => { let dimensions = match (width, height) { (Some(w), Some(h)) => format!(" ({w}x{h})"), _ => String::new(), }; let status = if image_path.is_some() && !app.image_protocols.is_empty() { " [TUI placeholder - use --export text to view images]" } else if image_path.is_some() { " [Image available - use --export text to view]" } else { " [Image not extracted]" }; let line = Line::from(vec![ Span::styled("🖼️ ", Style::default().fg(Color::Magenta)), Span::styled(description, Style::default().fg(Color::Gray)), Span::styled(dimensions, Style::default().fg(Color::DarkGray)), Span::styled(status, Style::default().fg(Color::Green)), ]); text.lines.push(line); text.lines.push(Line::from("")); } DocumentElement::Equation { latex, .. } => { let line = Line::from(vec![ Span::styled("📐 ", Style::default().fg(Color::Cyan)), Span::styled( latex, Style::default() .fg(Color::White) .add_modifier(Modifier::BOLD), ), ]); text.lines.push(line); text.lines.push(Line::from("")); } DocumentElement::PageBreak => { text.lines.push(Line::from(Span::styled( "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━", Style::default().fg(Color::DarkGray), ))); text.lines.push(Line::from("")); } } } let paragraph = Paragraph::new(text) .wrap(Wrap { trim: false }) // Don't trim whitespace to preserve list indentation .scroll((0, 0)); f.render_widget(paragraph, inner); // Render scrollbar let scrollbar = Scrollbar::default() .orientation(ScrollbarOrientation::VerticalRight) .begin_symbol(Some("↑")) .end_symbol(Some("↓")); let mut scrollbar_state = ScrollbarState::default() .content_length(app.document.elements.len()) .position(app.scroll_offset); f.render_stateful_widget( scrollbar, area.inner(Margin { vertical: 1, horizontal: 0, }), &mut scrollbar_state, ); } fn render_outline(f: &mut Frame, area: Rect, app: &mut App) { let outline = crate::document::generate_outline(&app.document); let items: Vec = outline .iter() .map(|item| { let indent = " ".repeat((item.level.saturating_sub(1)) as usize); let text = format!("{}{}", indent, item.title); ListItem::new(text) }) .collect(); let list = List::new(items) .block( Block::default() .title("📋 Document Outline") .borders(Borders::ALL) .border_style(Style::default().fg(Color::Green)), ) .style(Style::default().fg(Color::White)) .highlight_style(Style::default().bg(Color::Blue).fg(Color::White)) .highlight_symbol("➤ "); f.render_stateful_widget(list, area, &mut app.outline_state); } fn render_search(f: &mut Frame, area: Rect, app: &App) { let chunks = Layout::default() .direction(Direction::Vertical) .constraints([Constraint::Length(3), Constraint::Min(0)].as_ref()) .split(area); // Search input let input = Paragraph::new(app.search_query.as_str()) .style(Style::default().fg(Color::Yellow)) .block( Block::default() .borders(Borders::ALL) .title("🔍 Search") .border_style(Style::default().fg(Color::Yellow)), ); f.render_widget(input, chunks[0]); // Search results let results: Vec = app .search_results .iter() .enumerate() .map(|(i, result)| { let prefix = "📄"; // Simplified for now let style = if i == app.current_search_index { Style::default().bg(Color::Blue).fg(Color::White) } else { Style::default() }; // Truncate long results and add context (Unicode-safe) let display_text = if result.text.len() > 80 { // Safe truncation: find the largest valid UTF-8 boundary <= 77 bytes let max_bytes = 77; let safe_boundary = if result.text.len() <= max_bytes { result.text.len() } else { let mut boundary = max_bytes; while boundary > 0 && !result.text.is_char_boundary(boundary) { boundary -= 1; } boundary }; format!("{}...", &result.text[..safe_boundary]) } else { result.text.clone() }; ListItem::new(format!("{} {} [{}]", prefix, display_text, i + 1)).style(style) }) .collect(); let results_list = List::new(results).block( Block::default() .title(format!( "Results ({}/{})", if app.search_results.is_empty() { 0 } else { app.current_search_index + 1 }, app.search_results.len() )) .borders(Borders::ALL) .border_style(Style::default().fg(Color::Yellow)), ); f.render_widget(results_list, chunks[1]); } fn render_help(f: &mut Frame, area: Rect) { let help_text = vec![ "🆘 doxx - Help", "", "📖 Document Navigation:", " ↑/k Scroll up", " ↓/j Scroll down", " Page Up Page up", " Page Down Page down", " Home Go to start", " End Go to end", "", "🔍 Search:", " s Open search", " n Next result", " p Previous result", " S Deselect/Reselect current selection", "", "📋 Other Features:", " o Show outline", " c Copy content to clipboard", " h/F1 Toggle help", " q Quit", "", "📄 Copy Functionality:", " Document: Copies full document as text", " Outline: Copies document structure", " Search: Copies search results (use F2)", "", "Press any key to close help...", ]; let help = Paragraph::new(help_text.join("\n")) .block( Block::default() .title("Help") .borders(Borders::ALL) .border_style(Style::default().fg(Color::Yellow)), ) .wrap(Wrap { trim: true }); f.render_widget(help, area); } fn render_help_overlay(f: &mut Frame, _app: &App) { let area = centered_rect(60, 70, f.area()); f.render_widget(Clear, area); render_help(f, area); } fn render_status_bar(f: &mut Frame, area: Rect, app: &App) { let metadata = &app.document.metadata; let view_indicator = match app.current_view { ViewMode::Document => "📄 Document", ViewMode::Outline => "📋 Outline", ViewMode::Search => "🔍 Search", ViewMode::Help => "❓ Help", }; let search_info = if !app.search_results.is_empty() { format!( " • 🔍 {}/{} matches", app.current_search_index + 1, app.search_results.len() ) } else if !app.search_query.is_empty() { " • 🔍 No matches".to_string() } else { String::new() }; let status_text = if let Some(status_msg) = &app.status_message { // Show status message (like copy confirmation) with higher priority status_msg.clone() } else { format!( "{} • 📄 {} • {} pages • {} words • {}/{}{}", view_indicator, metadata .file_path .split('/') .next_back() .unwrap_or("Unknown"), metadata.page_count, metadata.word_count, app.scroll_offset + 1, app.document.elements.len(), search_info ) }; let status_style = if app.status_message.is_some() { // Highlight status messages Style::default() .fg(Color::Green) .bg(Color::DarkGray) .add_modifier(Modifier::BOLD) } else { Style::default().fg(Color::White).bg(Color::DarkGray) }; let status = Paragraph::new(status_text) .style(status_style) .block(Block::default()); f.render_widget(status, area); // Navigation help let help_text = "[↕] Scroll [o] Outline [s] Search [c] Copy [h] Help [q] Quit"; let help_area = Rect { x: area.x, y: area.y + 1, width: area.width, height: 1, }; let help = Paragraph::new(help_text) .style(Style::default().fg(Color::Gray)) .block(Block::default()); f.render_widget(help, help_area); } fn render_table_enhanced(table: &TableData, text: &mut Text) { let metadata = &table.metadata; // Add table title if present if let Some(title) = &metadata.title { text.lines.push(Line::from(Span::styled( format!("📊 {title}"), Style::default() .fg(Color::Blue) .add_modifier(Modifier::BOLD), ))); text.lines.push(Line::from("")); } // Generate table with proper alignment and borders if !table.headers.is_empty() { // Top border let top_border = generate_table_border(&metadata.column_widths, BorderType::Top); text.lines.push(Line::from(Span::styled( top_border, Style::default().fg(Color::Gray), ))); // Header row let header_line = render_table_row(&table.headers, &metadata.column_widths, true); text.lines.push(Line::from(Span::styled( header_line, Style::default().add_modifier(Modifier::BOLD), ))); // Header separator let separator = generate_table_border(&metadata.column_widths, BorderType::Separator); text.lines.push(Line::from(Span::styled( separator, Style::default().fg(Color::Gray), ))); // Data rows for row in &table.rows { let row_line = render_table_row(row, &metadata.column_widths, false); text.lines.push(Line::from(Span::raw(row_line))); } // Bottom border let bottom_border = generate_table_border(&metadata.column_widths, BorderType::Bottom); text.lines.push(Line::from(Span::styled( bottom_border, Style::default().fg(Color::Gray), ))); } text.lines.push(Line::from("")); } #[derive(Clone, Copy)] enum BorderType { Top, Separator, Bottom, } fn generate_table_border(column_widths: &[usize], border_type: BorderType) -> String { let (left, middle, right, fill) = match border_type { BorderType::Top => ("┌", "┬", "┐", "─"), BorderType::Separator => ("├", "┼", "┤", "─"), BorderType::Bottom => ("└", "┴", "┘", "─"), }; let mut border = String::new(); border.push_str(left); for (i, &width) in column_widths.iter().enumerate() { border.push_str(&fill.repeat(width + 2)); // +2 for padding if i < column_widths.len() - 1 { border.push_str(middle); } } border.push_str(right); border } fn render_table_row(cells: &[TableCell], column_widths: &[usize], is_header: bool) -> String { let mut row = String::new(); row.push('│'); for (i, cell) in cells.iter().enumerate() { let width = column_widths.get(i).copied().unwrap_or(10); let aligned_content = align_cell_content(&cell.content, cell.alignment, width); let formatted_content = if is_header { aligned_content } else { apply_cell_formatting(&aligned_content, &cell.formatting) }; row.push(' '); row.push_str(&formatted_content); row.push(' '); row.push('│'); } row } fn align_cell_content(content: &str, alignment: TextAlignment, width: usize) -> String { let trimmed = content.trim(); match alignment { TextAlignment::Left => format!("{trimmed: format!("{trimmed:>width$}"), TextAlignment::Center => { let padding = width.saturating_sub(trimmed.len()); let left_pad = padding / 2; let right_pad = padding - left_pad; format!( "{}{}{}", " ".repeat(left_pad), trimmed, " ".repeat(right_pad) ) } TextAlignment::Justify => { // For terminal output, treat justify as left-aligned format!("{trimmed: String { // For terminal output, we'll keep formatting simple // Advanced formatting could use ANSI codes here content.to_string() } fn centered_rect(percent_x: u16, percent_y: u16, r: Rect) -> Rect { let popup_layout = Layout::default() .direction(Direction::Vertical) .constraints([ Constraint::Percentage((100 - percent_y) / 2), Constraint::Percentage(percent_y), Constraint::Percentage((100 - percent_y) / 2), ]) .split(r); Layout::default() .direction(Direction::Horizontal) .constraints([ Constraint::Percentage((100 - percent_x) / 2), Constraint::Percentage(percent_x), Constraint::Percentage((100 - percent_x) / 2), ]) .split(popup_layout[1])[1] } /// Convert hex color code to ratatui Color fn hex_to_color(hex: &str) -> Option { // Remove # if present and ensure we have 6 characters let hex = hex.trim_start_matches('#'); if hex.len() != 6 { return None; } // Parse RGB components let r = u8::from_str_radix(&hex[0..2], 16).ok()?; let g = u8::from_str_radix(&hex[2..4], 16).ok()?; let b = u8::from_str_radix(&hex[4..6], 16).ok()?; Some(Color::Rgb(r, g, b)) } doxx-0.1.2/tests/ansi_export_test.rs 0000644 0000000 0000000 00000027372 10461020230 0015723 0 ustar 0000000 0000000 use doxx::{ ansi::{export_to_ansi_with_options, AnsiOptions}, document::{Document, DocumentElement, FormattedRun, TextFormatting}, ColorDepth, }; #[test] fn test_ansi_export_basic() { let document = create_test_document(); let options = AnsiOptions { terminal_width: 80, color_depth: ColorDepth::TrueColor, }; let result = export_to_ansi_with_options(&document, &options); assert!(result.is_ok()); let output = result.unwrap(); assert!(output.contains("Test Document")); assert!(output.contains("Document Information")); } #[test] fn test_ansi_export_formatting() { let document = create_formatted_document(); let options = AnsiOptions { terminal_width: 80, color_depth: ColorDepth::TrueColor, }; let result = export_to_ansi_with_options(&document, &options); assert!(result.is_ok()); let output = result.unwrap(); // Check for ANSI formatting codes assert!(output.contains("[1m")); // Bold assert!(output.contains("[3m")); // Italic assert!(output.contains("[4m")); // Underline assert!(output.contains("[9m")); // Strikethrough assert!(output.contains("[38;2;")); // RGB color assert!(output.contains("[0m")); // Reset } #[test] fn test_ansi_export_color_depths() { let document = create_colored_document(); // Test monochrome (no colors) let monochrome_options = AnsiOptions { terminal_width: 80, color_depth: ColorDepth::Monochrome, }; let mono_output = export_to_ansi_with_options(&document, &monochrome_options).unwrap(); assert!(!mono_output.contains("[38;2;")); // No RGB colors assert!(!mono_output.contains("[38;5;")); // No ANSI colors // Test 16 colors let standard_options = AnsiOptions { terminal_width: 80, color_depth: ColorDepth::Standard, }; let standard_output = export_to_ansi_with_options(&document, &standard_options).unwrap(); assert!(standard_output.contains("[38;5;")); // ANSI colors assert!(!standard_output.contains("[38;2;")); // No RGB colors // Test true color let true_color_options = AnsiOptions { terminal_width: 80, color_depth: ColorDepth::TrueColor, }; let true_color_output = export_to_ansi_with_options(&document, &true_color_options).unwrap(); assert!(true_color_output.contains("[38;2;")); // RGB colors } #[test] fn test_ansi_export_terminal_width() { let document = create_test_document(); // Test narrow width let narrow_options = AnsiOptions { terminal_width: 40, color_depth: ColorDepth::Auto, }; let narrow_output = export_to_ansi_with_options(&document, &narrow_options).unwrap(); // Check that separator respects width let lines: Vec<&str> = narrow_output.lines().collect(); let separator_line = lines.iter().find(|line| line.contains("====")).unwrap(); // Should be 40 characters or close to it (accounting for ANSI codes) let clean_line = strip_ansi_codes(separator_line); assert_eq!(clean_line.len(), 40); // Test wide width let wide_options = AnsiOptions { terminal_width: 120, color_depth: ColorDepth::Auto, }; let wide_output = export_to_ansi_with_options(&document, &wide_options).unwrap(); let wide_lines: Vec<&str> = wide_output.lines().collect(); let wide_separator = wide_lines .iter() .find(|line| line.contains("====")) .unwrap(); let wide_clean = strip_ansi_codes(wide_separator); assert_eq!(wide_clean.len(), 50); // Limited by min(50, width) } #[test] fn test_ansi_export_lists() { let document = create_list_document(); let options = AnsiOptions::default(); let result = export_to_ansi_with_options(&document, &options); assert!(result.is_ok()); let output = result.unwrap(); assert!(output.contains("1. ")); // Ordered list marker assert!(output.contains("• ")); // Unordered list marker assert!(output.contains(" ")); // Indentation for nested items } #[test] fn test_ansi_export_tables() { let document = create_table_document(); let options = AnsiOptions::default(); let result = export_to_ansi_with_options(&document, &options); assert!(result.is_ok()); let output = result.unwrap(); assert!(output.contains("│")); // Table borders assert!(output.contains("─")); // Table borders assert!(output.contains("📊")); // Table icon } // Helper functions to create test documents fn create_test_document() -> Document { use doxx::document::DocumentMetadata; Document { title: "Test Document".to_string(), metadata: DocumentMetadata { file_path: "test.docx".to_string(), file_size: 1024, word_count: 10, page_count: 1, created: None, modified: None, author: Some("Test Author".to_string()), }, elements: vec![DocumentElement::Paragraph { runs: vec![FormattedRun { text: "This is a simple paragraph.".to_string(), formatting: TextFormatting::default(), }], }], image_options: Default::default(), } } fn create_formatted_document() -> Document { use doxx::document::DocumentMetadata; let bold_formatting = TextFormatting { bold: true, ..Default::default() }; let italic_formatting = TextFormatting { italic: true, ..Default::default() }; let underline_formatting = TextFormatting { underline: true, ..Default::default() }; let strikethrough_formatting = TextFormatting { strikethrough: true, ..Default::default() }; Document { title: "Formatted Document".to_string(), metadata: DocumentMetadata { file_path: "formatted.docx".to_string(), file_size: 2048, word_count: 20, page_count: 1, created: None, modified: None, author: None, }, elements: vec![DocumentElement::Paragraph { runs: vec![ FormattedRun { text: "Bold text ".to_string(), formatting: bold_formatting, }, FormattedRun { text: "italic text ".to_string(), formatting: italic_formatting, }, FormattedRun { text: "underlined text ".to_string(), formatting: underline_formatting, }, FormattedRun { text: "strikethrough text".to_string(), formatting: strikethrough_formatting, }, ], }], image_options: Default::default(), } } fn create_colored_document() -> Document { use doxx::document::DocumentMetadata; let red_formatting = TextFormatting { color: Some("#FF0000".to_string()), ..Default::default() }; let blue_formatting = TextFormatting { color: Some("#0000FF".to_string()), ..Default::default() }; Document { title: "Colored Document".to_string(), metadata: DocumentMetadata { file_path: "colored.docx".to_string(), file_size: 1536, word_count: 15, page_count: 1, created: None, modified: None, author: None, }, elements: vec![DocumentElement::Paragraph { runs: vec![ FormattedRun { text: "Red text ".to_string(), formatting: red_formatting, }, FormattedRun { text: "Blue text".to_string(), formatting: blue_formatting, }, ], }], image_options: Default::default(), } } fn create_list_document() -> Document { use doxx::document::{DocumentMetadata, ListItem}; Document { title: "List Document".to_string(), metadata: DocumentMetadata { file_path: "lists.docx".to_string(), file_size: 1280, word_count: 12, page_count: 1, created: None, modified: None, author: None, }, elements: vec![ DocumentElement::List { items: vec![ ListItem { runs: vec![FormattedRun { text: "First item".to_string(), formatting: TextFormatting::default(), }], level: 0, }, ListItem { runs: vec![FormattedRun { text: "Second item".to_string(), formatting: TextFormatting::default(), }], level: 0, }, ListItem { runs: vec![FormattedRun { text: "Nested item".to_string(), formatting: TextFormatting::default(), }], level: 1, }, ], ordered: true, }, DocumentElement::List { items: vec![ListItem { runs: vec![FormattedRun { text: "Bullet item".to_string(), formatting: TextFormatting::default(), }], level: 0, }], ordered: false, }, ], image_options: Default::default(), } } fn create_table_document() -> Document { use doxx::document::{ CellDataType, DocumentMetadata, TableCell, TableData, TableMetadata, TextAlignment, }; let table = TableData { headers: vec![ TableCell { content: "Name".to_string(), alignment: TextAlignment::Left, formatting: TextFormatting::default(), data_type: CellDataType::Text, }, TableCell { content: "Age".to_string(), alignment: TextAlignment::Right, formatting: TextFormatting::default(), data_type: CellDataType::Number, }, ], rows: vec![vec![ TableCell { content: "Alice".to_string(), alignment: TextAlignment::Left, formatting: TextFormatting::default(), data_type: CellDataType::Text, }, TableCell { content: "30".to_string(), alignment: TextAlignment::Right, formatting: TextFormatting::default(), data_type: CellDataType::Number, }, ]], metadata: TableMetadata { title: Some("Test Table".to_string()), column_widths: vec![10, 5], column_alignments: vec![TextAlignment::Left, TextAlignment::Right], column_count: 2, row_count: 1, has_headers: true, }, }; Document { title: "Table Document".to_string(), metadata: DocumentMetadata { file_path: "table.docx".to_string(), file_size: 1792, word_count: 8, page_count: 1, created: None, modified: None, author: None, }, elements: vec![DocumentElement::Table { table }], image_options: Default::default(), } } fn strip_ansi_codes(text: &str) -> String { // Simple ANSI code stripping for testing let ansi_regex = regex::Regex::new(r"\x1b\[[0-9;]*m").unwrap(); ansi_regex.replace_all(text, "").to_string() } doxx-0.1.2/tests/fixtures/advanced-numbering.docx 0000644 0000000 0000000 00000040200 10461020230 0020225 0 ustar 0000000 0000000 PK ! 2oWf [Content_Types].xml ( j0EѶJ(ɢeh4NDB81$14 {1ښl w%=^i7+-d&0A6l4L60#ÒS OX *V$:B~^Kڃ /PI~7$iJ&B0ZDutOJK(HxG L+vdcӻW>*\XRmpZ}HwnMVn-")/ZwB`4 sDXj;A*ʝc֝ 4[S9> {V4pW&A|d? PK ! N _rels/.rels ( j0@ѽQN/c[ILj<]aGӓzsFu]U ^[x 1xpf#I)ʃY*Di")c$qU~31jH[{=E~ f?3-]Tꓸ2j),l0/%b zʼn, /|f\Z?6!Y_o]A PK ! z G word/document.xml\r}OUѳm(Ji{JSIS %+O||IImSF-Or$bȘP!OkUfKk??/!I^fT~>L;&