upstream-ontologist-0.3.6/.cargo_vcs_info.json0000644000000001360000000000100151010ustar { "git": { "sha1": "71fe89341a9c47deb0f6dea7446b3338375b641c" }, "path_in_vcs": "" }upstream-ontologist-0.3.6/Cargo.lock0000644000005037010000000000100130620ustar # This file is automatically @generated by Cargo. # It is not intended for manual editing. version = 4 [[package]] name = "addr2line" version = "0.25.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1b5d307320b3181d6d7954e663bd7c774a838b8220fe0593c86d9fb09f498b4b" dependencies = [ "gimli", ] [[package]] name = "adler2" version = "2.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "320119579fcad9c21884f5c4861d16174d0e06250625266f50fe6898340abefa" [[package]] name = "ahash" version = "0.8.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5a15f179cd60c4584b8a8c596927aadc462e27f2ca70c04e0071964a73ba7a75" dependencies = [ "cfg-if", "once_cell", "version_check", "zerocopy", ] [[package]] name = "aho-corasick" version = "1.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ddd31a130427c27518df266943a5308ed92d4b226cc639f5a8f1002816174301" dependencies = [ "memchr", ] [[package]] name = "allocator-api2" version = "0.2.21" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923" [[package]] name = "android_system_properties" version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311" dependencies = [ "libc", ] [[package]] name = "annotate-snippets" version = "0.11.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "710e8eae58854cdc1790fcb56cca04d712a17be849eeb81da2a724bf4bae2bc4" dependencies = [ "anstyle", "unicode-width", ] [[package]] name = "ansi_term" version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d52a9bb7ec0cf484c551830a7ce27bd20d67eac647e1befb56b0be4ee39a55d2" dependencies = [ "winapi", ] [[package]] name = "anstream" version = "0.6.21" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "43d5b281e737544384e969a5ccad3f1cdd24b48086a0fc1b2a5262a26b8f4f4a" dependencies = [ "anstyle", "anstyle-parse", "anstyle-query", "anstyle-wincon", "colorchoice", "is_terminal_polyfill", "utf8parse", ] [[package]] name = "anstyle" version = "1.0.13" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5192cca8006f1fd4f7237516f40fa183bb07f8fbdfedaa0036de5ea9b0b45e78" [[package]] name = "anstyle-parse" version = "0.2.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4e7644824f0aa2c7b9384579234ef10eb7efb6a0deb83f9630a49594dd9c15c2" dependencies = [ "utf8parse", ] [[package]] name = "anstyle-query" version = "1.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "40c48f72fd53cd289104fc64099abca73db4166ad86ea0b4341abe65af83dadc" dependencies = [ "windows-sys 0.61.2", ] [[package]] name = "anstyle-wincon" version = "3.0.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "291e6a250ff86cd4a820112fb8898808a366d8f9f58ce16d1f538353ad55747d" dependencies = [ "anstyle", "once_cell_polyfill", "windows-sys 0.61.2", ] [[package]] name = "anyhow" version = "1.0.100" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a23eb6b1614318a8071c9b2521f36b424b2c83db5eb3a0fead4a6c0809af6e61" [[package]] name = "ar_archive_writer" version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f0c269894b6fe5e9d7ada0cf69b5bf847ff35bc25fc271f08e1d080fce80339a" dependencies = [ "object 0.32.2", ] [[package]] name = "arbitrary" version = "1.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c3d036a3c4ab069c7b410a2ce876bd74808d2d0888a82667669f8e783a898bf1" dependencies = [ "derive_arbitrary", ] [[package]] name = "arc-swap" version = "1.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "69f7f8c3906b62b754cd5326047894316021dcfe5a194c8ea52bdd94934a3457" [[package]] name = "arrayref" version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "76a2e8124351fda1ef8aaaa3bbd7ebbcb486bbcd4225aca0aa0d84bb2db8fecb" [[package]] name = "arrayvec" version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50" [[package]] name = "ascii-canvas" version = "3.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8824ecca2e851cec16968d54a01dd372ef8f95b244fb84b84e70128be347c3c6" dependencies = [ "term", ] [[package]] name = "async-trait" version = "0.1.89" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9035ad2d096bed7955a320ee7e2230574d28fd3c3a0f186cbea1ff3c7eed5dbb" dependencies = [ "proc-macro2", "quote", "syn 2.0.111", ] [[package]] name = "atomic-waker" version = "1.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0" [[package]] name = "autocfg" version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8" [[package]] name = "backtrace" version = "0.3.76" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bb531853791a215d7c62a30daf0dde835f381ab5de4589cfe7c649d2cbe92bd6" dependencies = [ "addr2line", "cfg-if", "libc", "miniz_oxide", "object 0.37.3", "rustc-demangle", "windows-link", ] [[package]] name = "base16ct" version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4c7f02d4ea65f2c1853089ffd8d2787bdbc63de2f0d29dedbcf8ccdfa0ccd4cf" [[package]] name = "base64" version = "0.22.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" [[package]] name = "base64ct" version = "1.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "55248b47b0caf0546f7988906588779981c43bb1bc9d0c44087278f80cdb44ba" [[package]] name = "bit-set" version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0700ddab506f33b20a03b13996eccd309a48e5ff77d0d95926aa0210fb4e95f1" dependencies = [ "bit-vec 0.6.3", ] [[package]] name = "bit-set" version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "08807e080ed7f9d5433fa9b275196cfc35414f66a0c79d864dc51a0d825231a3" dependencies = [ "bit-vec 0.8.0", ] [[package]] name = "bit-vec" version = "0.6.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "349f9b6a179ed607305526ca489b34ad0a41aed5f7980fa90eb03160b69598fb" [[package]] name = "bit-vec" version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5e764a1d40d510daf35e07be9eb06e75770908c27d411ee6c92109c9840eaaf7" [[package]] name = "bitflags" version = "2.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "812e12b5285cc515a9c72a5c1d3b6d46a19dac5acfef5265968c166106e31dd3" [[package]] name = "bitmaps" version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "031043d04099746d8db04daf1fa424b2bc8bd69d92b25962dcde24da39ab64a2" dependencies = [ "typenum", ] [[package]] name = "blake3" version = "1.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3888aaa89e4b2a40fca9848e400f6a658a5a3978de7be858e209cafa8be9a4a0" dependencies = [ "arrayref", "arrayvec", "cc", "cfg-if", "constant_time_eq", ] [[package]] name = "block-buffer" version = "0.10.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71" dependencies = [ "generic-array", ] [[package]] name = "block2" version = "0.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cdeb9d870516001442e364c5220d3574d2da8dc765554b4a617230d33fa58ef5" dependencies = [ "objc2", ] [[package]] name = "boxcar" version = "0.2.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "36f64beae40a84da1b4b26ff2761a5b895c12adc41dc25aaee1c4f2bbfe97a6e" [[package]] name = "breezyshim" version = "0.7.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8efc483b715372b682e6094ddebdb0d6687e1c6f5ff922155495e7bfb2ae3d1d" dependencies = [ "chrono", "ctor", "lazy-regex", "lazy_static", "log", "patchkit", "percent-encoding", "pyo3", "pyo3-filelike", "regex", "serde", "tempfile", "url", "whoami", ] [[package]] name = "bstr" version = "1.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "63044e1ae8e69f3b5a92c736ca6269b8d12fa7efe39bf34ddb06d102cf0e2cab" dependencies = [ "memchr", "regex-automata", "serde", ] [[package]] name = "bumpalo" version = "3.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "46c5e41b57b8bba42a04676d81cb89e9ee8e859a1a66f80a5a72e1cb76b34d43" [[package]] name = "byteorder" version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" [[package]] name = "bytes" version = "1.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b35204fbdc0b3f4446b89fc1ac2cf84a8a68971995d0bf2e925ec7cd960f9cb3" [[package]] name = "bytesize" version = "1.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2e93abca9e28e0a1b9877922aacb20576e05d4679ffa78c3d6dc22a26a216659" [[package]] name = "cargo" version = "0.86.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "62fdf5dbde4bf8d8149a4d32568d28d92af9dc4a4975727d89bd8dfb69fb810e" dependencies = [ "annotate-snippets", "anstream", "anstyle", "anyhow", "base64", "blake3", "bytesize", "cargo-credential", "cargo-credential-libsecret", "cargo-credential-macos-keychain", "cargo-credential-wincred", "cargo-platform", "cargo-util", "cargo-util-schemas", "clap", "clap_complete", "color-print", "crates-io", "curl", "curl-sys", "filetime", "flate2", "git2", "git2-curl", "gix", "glob", "hex", "hmac", "home", "http-auth", "humantime", "ignore", "im-rc", "indexmap", "itertools 0.13.0", "jobserver", "lazycell", "libc", "libgit2-sys", "memchr", "opener", "os_info", "pasetors", "pathdiff", "rand", "regex", "rusqlite", "rustc-hash 2.1.1", "rustc-stable-hash", "rustfix", "same-file", "semver", "serde", "serde-untagged", "serde_ignored", "serde_json", "sha1", "shell-escape", "supports-hyperlinks", "supports-unicode", "tar", "tempfile", "thiserror 1.0.69", "time", "toml 0.8.23", "toml_edit", "tracing", "tracing-chrome", "tracing-subscriber", "unicase", "unicode-width", "url", "walkdir", "windows-sys 0.59.0", ] [[package]] name = "cargo-credential" version = "0.4.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e36f089041deadf16226478a7737a833864fbda09408c7af237b9d615eeb6d69" dependencies = [ "anyhow", "libc", "serde", "serde_json", "thiserror 2.0.17", "time", "windows-sys 0.60.2", ] [[package]] name = "cargo-credential-libsecret" version = "0.4.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d4c6e10380bfa25a4f1495e6ca7256332857e3ef7bb16adf376641e6af03acaf" dependencies = [ "anyhow", "cargo-credential", "libloading", ] [[package]] name = "cargo-credential-macos-keychain" version = "0.4.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e95b9c2431165b30ea111f2933ed6799bfa9a66c9503046064cf8f001960ea1b" dependencies = [ "cargo-credential", "security-framework 3.5.1", ] [[package]] name = "cargo-credential-wincred" version = "0.4.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c35397b066a83f2e036fb23fca2fb400bfa65e8e8453c21e0b1690cf8250e414" dependencies = [ "cargo-credential", "windows-sys 0.60.2", ] [[package]] name = "cargo-platform" version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "84982c6c0ae343635a3a4ee6dedef965513735c8b183caa7289fa6e27399ebd4" dependencies = [ "serde", ] [[package]] name = "cargo-util" version = "0.2.24" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f97c9ef0f8af69bfcecfe4c17a414d7bb978fe794bc1a38952e27b5c5d87492d" dependencies = [ "anyhow", "core-foundation 0.10.1", "filetime", "hex", "ignore", "jobserver", "libc", "miow", "same-file", "sha2", "shell-escape", "tempfile", "tracing", "walkdir", "windows-sys 0.60.2", ] [[package]] name = "cargo-util-schemas" version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9f905f68f8cb8a8182592d9858a5895360f0a5b08b6901fdb10498fb91829804" dependencies = [ "semver", "serde", "serde-untagged", "serde-value", "thiserror 1.0.69", "toml 0.8.23", "unicode-xid", "url", ] [[package]] name = "cc" version = "1.2.48" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c481bdbf0ed3b892f6f806287d72acd515b352a4ec27a208489b8c1bc839633a" dependencies = [ "find-msvc-tools", "jobserver", "libc", "shlex", ] [[package]] name = "cfg-if" version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9330f8b2ff13f34540b44e946ef35111825727b38d33286ef986142615121801" [[package]] name = "cfg_aliases" version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724" [[package]] name = "charset" version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f1f927b07c74ba84c7e5fe4db2baeb3e996ab2688992e39ac68ce3220a677c7e" dependencies = [ "base64", "encoding_rs", ] [[package]] name = "chrono" version = "0.4.42" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "145052bdd345b87320e369255277e3fb5152762ad123a901ef5c262dd38fe8d2" dependencies = [ "iana-time-zone", "js-sys", "num-traits", "wasm-bindgen", "windows-link", ] [[package]] name = "chumsky" version = "0.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8eebd66744a15ded14960ab4ccdbfb51ad3b81f51f3f04a80adac98c985396c9" dependencies = [ "hashbrown 0.14.5", "stacker", ] [[package]] name = "clap" version = "4.5.53" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c9e340e012a1bf4935f5282ed1436d1489548e8f72308207ea5df0e23d2d03f8" dependencies = [ "clap_builder", "clap_derive", ] [[package]] name = "clap_builder" version = "4.5.53" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d76b5d13eaa18c901fd2f7fca939fefe3a0727a953561fefdf3b2922b8569d00" dependencies = [ "anstream", "anstyle", "clap_lex", "strsim", "terminal_size", ] [[package]] name = "clap_complete" version = "4.5.61" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "39615915e2ece2550c0149addac32fb5bd312c657f43845bb9088cb9c8a7c992" dependencies = [ "clap", "clap_lex", "is_executable", "shlex", ] [[package]] name = "clap_derive" version = "4.5.49" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2a0b5487afeab2deb2ff4e03a807ad1a03ac532ff5a2cee5d86884440c7f7671" dependencies = [ "heck", "proc-macro2", "quote", "syn 2.0.111", ] [[package]] name = "clap_lex" version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a1d728cc89cf3aee9ff92b05e62b19ee65a02b5702cff7d5a377e32c6ae29d8d" [[package]] name = "clap_mangen" version = "0.2.31" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "439ea63a92086df93893164221ad4f24142086d535b3a0957b9b9bea2dc86301" dependencies = [ "clap", "roff", ] [[package]] name = "clru" version = "0.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cbd0f76e066e64fdc5631e3bb46381254deab9ef1158292f27c8c57e3bf3fe59" [[package]] name = "color-print" version = "0.3.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3aa954171903797d5623e047d9ab69d91b493657917bdfb8c2c80ecaf9cdb6f4" dependencies = [ "color-print-proc-macro", ] [[package]] name = "color-print-proc-macro" version = "0.3.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "692186b5ebe54007e45a59aea47ece9eb4108e141326c304cdc91699a7118a22" dependencies = [ "nom", "proc-macro2", "quote", "syn 2.0.111", ] [[package]] name = "colorchoice" version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b05b61dc5112cbb17e4b6cd61790d9845d13888356391624cbe7e41efeac1e75" [[package]] name = "configparser" version = "3.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e57e3272f0190c3f1584272d613719ba5fc7df7f4942fe542e63d949cf3a649b" [[package]] name = "const-oid" version = "0.9.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c2459377285ad874054d797f3ccebf984978aa39129f6eafde5cdc8315b612f8" [[package]] name = "const-random" version = "0.1.18" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "87e00182fe74b066627d63b85fd550ac2998d4b0bd86bfed477a0ae4c7c71359" dependencies = [ "const-random-macro", ] [[package]] name = "const-random-macro" version = "0.1.16" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f9d839f2a20b0aee515dc581a6172f2321f96cab76c1a38a4c584a194955390e" dependencies = [ "getrandom 0.2.16", "once_cell", "tiny-keccak", ] [[package]] name = "constant_time_eq" version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7c74b8349d32d297c9134b8c88677813a227df8f779daa29bfc29c183fe3dca6" [[package]] name = "core-foundation" version = "0.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "91e195e091a93c46f7102ec7818a2aa394e1e1771c3ab4825963fa03e45afb8f" dependencies = [ "core-foundation-sys", "libc", ] [[package]] name = "core-foundation" version = "0.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b2a6cd9ae233e7f62ba4e9353e81a88df7fc8a5987b8d445b4d90c879bd156f6" dependencies = [ "core-foundation-sys", "libc", ] [[package]] name = "core-foundation-sys" version = "0.8.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b" [[package]] name = "countme" version = "3.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7704b5fdd17b18ae31c4c1da5a2e0305a2bf17b5249300a9ee9ed7b72114c636" [[package]] name = "cpufeatures" version = "0.2.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "59ed5838eebb26a2bb2e58f6d5b5316989ae9d08bab10e0e6d103e656d1b0280" dependencies = [ "libc", ] [[package]] name = "crates-io" version = "0.40.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "574ce0b8170c097cf174097b84bff181956ad2ab2bbe092ab58d1c08d9f1f417" dependencies = [ "curl", "percent-encoding", "serde", "serde_json", "thiserror 2.0.17", "url", ] [[package]] name = "crc32fast" version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9481c1c90cbf2ac953f07c8d4a58aa3945c425b7185c9154d67a65e4230da511" dependencies = [ "cfg-if", ] [[package]] name = "crossbeam-channel" version = "0.5.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "82b8f8f868b36967f9606790d1903570de9ceaf870a7bf9fbbd3016d636a2cb2" dependencies = [ "crossbeam-utils", ] [[package]] name = "crossbeam-deque" version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9dd111b7b7f7d55b72c0a6ae361660ee5853c9af73f70c3c2ef6858b950e2e51" dependencies = [ "crossbeam-epoch", "crossbeam-utils", ] [[package]] name = "crossbeam-epoch" version = "0.9.18" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5b82ac4a3c2ca9c3460964f020e1402edd5753411d7737aa39c3714ad1b5420e" dependencies = [ "crossbeam-utils", ] [[package]] name = "crossbeam-utils" version = "0.8.21" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28" [[package]] name = "crunchy" version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "460fbee9c2c2f33933d720630a6a0bac33ba7053db5344fac858d4b8952d77d5" [[package]] name = "crypto-bigint" version = "0.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0dc92fb57ca44df6db8059111ab3af99a63d5d0f8375d9972e319a379c6bab76" dependencies = [ "generic-array", "rand_core", "subtle", "zeroize", ] [[package]] name = "crypto-common" version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "78c8292055d1c1df0cce5d180393dc8cce0abec0a7102adb6c7b1eef6016d60a" dependencies = [ "generic-array", "typenum", ] [[package]] name = "csv" version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "52cd9d68cf7efc6ddfaaee42e7288d3a99d613d4b50f76ce9827ae0c6e14f938" dependencies = [ "csv-core", "itoa", "ryu", "serde_core", ] [[package]] name = "csv-core" version = "0.1.13" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "704a3c26996a80471189265814dbc2c257598b96b8a7feae2d31ace646bb9782" dependencies = [ "memchr", ] [[package]] name = "ct-codecs" version = "1.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9b10589d1a5e400d61f9f38f12f884cfd080ff345de8f17efda36fe0e4a02aa8" [[package]] name = "ctor" version = "0.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "eb230974aaf0aca4d71665bed0aca156cf43b764fcb9583b69c6c3e686f35e72" dependencies = [ "ctor-proc-macro", "dtor", ] [[package]] name = "ctor-proc-macro" version = "0.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "52560adf09603e58c9a7ee1fe1dcb95a16927b17c127f0ac02d6e768a0e25bc1" [[package]] name = "curl" version = "0.4.49" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "79fc3b6dd0b87ba36e565715bf9a2ced221311db47bd18011676f24a6066edbc" dependencies = [ "curl-sys", "libc", "openssl-probe", "openssl-sys", "schannel", "socket2", "windows-sys 0.59.0", ] [[package]] name = "curl-sys" version = "0.4.84+curl-8.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "abc4294dc41b882eaff37973c2ec3ae203d0091341ee68fbadd1d06e0c18a73b" dependencies = [ "cc", "libc", "libnghttp2-sys", "libz-sys", "openssl-sys", "pkg-config", "vcpkg", "windows-sys 0.59.0", ] [[package]] name = "data-encoding" version = "2.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2a2330da5de22e8a3cb63252ce2abb30116bf5265e89c0e01bc17015ce30a476" [[package]] name = "dbus" version = "0.9.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "190b6255e8ab55a7b568df5a883e9497edc3e4821c06396612048b430e5ad1e9" dependencies = [ "libc", "libdbus-sys", "windows-sys 0.59.0", ] [[package]] name = "deb822-derive" version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "86bf2d0fa4ce2457e94bd7efb15aeadc115297f04b660bd0da706729e0d91442" dependencies = [ "proc-macro2", "quote", "syn 2.0.111", ] [[package]] name = "deb822-fast" version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f410ccb5cbd9b81d56b290131bad4350ecf8b46416fb901e759dc1e6916a8198" dependencies = [ "deb822-derive", ] [[package]] name = "deb822-lossless" version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cdcadf12851ddb37dc938e724beeb50e83bfe1a1fda3c15b997dc1105ec49e3d" dependencies = [ "regex", "rowan", "serde", ] [[package]] name = "debbugs" version = "0.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b36dd7d7098ea92d5a789d4b83fade23d79014527ae71f079faea9bdaf1914e7" dependencies = [ "debversion", "lazy-regex", "log", "mailparse", "maplit", "reqwest", "tokio", "xmltree 0.11.0", ] [[package]] name = "debcargo" version = "2.7.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "04186f5b1fddb31ed0ef5b1190e98d8aa858be010074159c89157bf67e4eb2f2" dependencies = [ "ansi_term", "anyhow", "cargo", "cargo-util", "cargo-util-schemas", "chrono", "clap", "clap_mangen", "env_logger", "filetime", "flate2", "git2", "glob", "itertools 0.13.0", "log", "regex", "semver", "serde", "serde_derive", "tar", "tempfile", "textwrap", "toml 0.8.23", "walkdir", ] [[package]] name = "debian-changelog" version = "0.2.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c77ec2b64e3c7baf5413982f17c78933713069eba0601064d78a75bbde8b5b4d" dependencies = [ "chrono", "debversion", "lazy-regex", "log", "rowan", "textwrap", "whoami", ] [[package]] name = "debian-control" version = "0.2.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bfc2596d8356567e2fdd8653210d94dd06ca8c4ab9679ec6edf443f9efaeb9c3" dependencies = [ "chrono", "deb822-fast", "deb822-lossless", "debversion", "regex", "rowan", "url", ] [[package]] name = "debian-copyright" version = "0.1.34" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8820cd2195ca3f8159d6488419c02c1cca83535050ed60bbf00e5e54e89586c5" dependencies = [ "deb822-fast", "deb822-lossless", "debversion", "regex", ] [[package]] name = "debian-watch" version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f7af76aefd6ad01b821102369a5026948fe54c57fa79875a4c21f3a3ae767bb5" dependencies = [ "debversion", "m_lexer", "regex", "rowan", "url", ] [[package]] name = "debversion" version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a4f5cc9ce1d5067bee8060dd75208525dd0133ffea0b2960fef64ab85d58c4c5" dependencies = [ "chrono", "lazy-regex", "num-bigint", ] [[package]] name = "der" version = "0.7.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e7c1832837b905bbfb5101e07cc24c8deddf52f93225eee6ead5f4d63d53ddcb" dependencies = [ "const-oid", "pem-rfc7468", "zeroize", ] [[package]] name = "deranged" version = "0.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ececcb659e7ba858fb4f10388c250a7252eb0a27373f1a72b8748afdd248e587" dependencies = [ "powerfmt", "serde_core", ] [[package]] name = "derive_arbitrary" version = "1.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1e567bd82dcff979e4b03460c307b3cdc9e96fde3d73bed1496d2bc75d9dd62a" dependencies = [ "proc-macro2", "quote", "syn 2.0.111", ] [[package]] name = "diff" version = "0.1.13" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "56254986775e3233ffa9c4d7d3faaf6d36a2c09d30b20687e9f88bc8bafc16c8" [[package]] name = "digest" version = "0.10.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" dependencies = [ "block-buffer", "const-oid", "crypto-common", "subtle", ] [[package]] name = "dirs-next" version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b98cf8ebf19c3d1b223e151f99a4f9f0690dca41414773390fc824184ac833e1" dependencies = [ "cfg-if", "dirs-sys-next", ] [[package]] name = "dirs-sys-next" version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4ebda144c4fe02d1f7ea1a7d9641b6fc6b580adcfa024ae48797ecdeb6825b4d" dependencies = [ "libc", "redox_users", "winapi", ] [[package]] name = "dispatch2" version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "89a09f22a6c6069a18470eb92d2298acf25463f14256d24778e1230d789a2aec" dependencies = [ "bitflags", "objc2", ] [[package]] name = "displaydoc" version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" dependencies = [ "proc-macro2", "quote", "syn 2.0.111", ] [[package]] name = "distro-info" version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ef12237f2ced990e453ec0b69230752e73be0a357817448c50a62f8bbbe0ca71" dependencies = [ "chrono", "csv", "failure", ] [[package]] name = "dlv-list" version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "442039f5147480ba31067cb00ada1adae6892028e40e45fc5de7b7df6dcc1b5f" dependencies = [ "const-random", ] [[package]] name = "document_tree" version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6742722dd3e6cd908bc522283cb5502e25f696d1c9904fb251ec266b6b3f9cce" dependencies = [ "anyhow", "regex", "serde", "serde_derive", "url", ] [[package]] name = "dtor" version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "404d02eeb088a82cfd873006cb713fe411306c7d182c344905e101fb1167d301" dependencies = [ "dtor-proc-macro", ] [[package]] name = "dtor-proc-macro" version = "0.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f678cf4a922c215c63e0de95eb1ff08a958a81d47e485cf9da1e27bf6305cfa5" [[package]] name = "dunce" version = "1.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "92773504d58c093f6de2459af4af33faa518c13451eb8f2b5698ed3d36e7c813" [[package]] name = "ecdsa" version = "0.16.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ee27f32b5c5292967d2d4a9d7f1e0b0aed2c15daded5a60300e4abb9d8020bca" dependencies = [ "der", "digest", "elliptic-curve", "rfc6979", "signature", "spki", ] [[package]] name = "ed25519-compact" version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "33ce99a9e19c84beb4cc35ece85374335ccc398240712114c85038319ed709bd" dependencies = [ "getrandom 0.3.4", ] [[package]] name = "either" version = "1.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719" [[package]] name = "elliptic-curve" version = "0.13.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b5e6043086bf7973472e0c7dff2142ea0b680d30e18d9cc40f267efbf222bd47" dependencies = [ "base16ct", "crypto-bigint", "digest", "ff", "generic-array", "group", "hkdf", "pem-rfc7468", "pkcs8", "rand_core", "sec1", "subtle", "zeroize", ] [[package]] name = "ena" version = "0.14.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3d248bdd43ce613d87415282f69b9bb99d947d290b10962dd6c56233312c2ad5" dependencies = [ "log", ] [[package]] name = "encoding_rs" version = "0.8.35" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "75030f3c4f45dafd7586dd6780965a8c7e8e285a5ecb86713e63a79c5b2766f3" dependencies = [ "cfg-if", ] [[package]] name = "env_filter" version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1bf3c259d255ca70051b30e2e95b5446cdb8949ac4cd22c0d7fd634d89f568e2" dependencies = [ "log", "regex", ] [[package]] name = "env_logger" version = "0.11.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "13c863f0904021b108aa8b2f55046443e6b1ebde8fd4a15c399893aae4fa069f" dependencies = [ "anstream", "anstyle", "env_filter", "jiff", "log", ] [[package]] name = "equivalent" version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f" [[package]] name = "erased-serde" version = "0.4.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "89e8918065695684b2b0702da20382d5ae6065cf3327bc2d6436bd49a71ce9f3" dependencies = [ "serde", "serde_core", "typeid", ] [[package]] name = "errno" version = "0.3.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "39cab71617ae0d63f51a36d69f866391735b51691dbda63cf6f96d042b63efeb" dependencies = [ "libc", "windows-sys 0.61.2", ] [[package]] name = "failure" version = "0.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d32e9bd16cc02eae7db7ef620b392808b89f6a5e16bb3497d159c6b92a0f4f86" dependencies = [ "backtrace", "failure_derive", ] [[package]] name = "failure_derive" version = "0.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "aa4da3c766cd7a0db8242e326e9e4e081edd567072893ed320008189715366a4" dependencies = [ "proc-macro2", "quote", "syn 1.0.109", "synstructure 0.12.6", ] [[package]] name = "fallible-iterator" version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2acce4a10f12dc2fb14a218589d4f1f62ef011b2d0cc4b3cb1bba8e94da14649" [[package]] name = "fallible-streaming-iterator" version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7360491ce676a36bf9bb3c56c1aa791658183a54d2744120f27285738d90465a" [[package]] name = "faster-hex" version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a2a2b11eda1d40935b26cf18f6833c526845ae8c41e58d09af6adeb6f0269183" dependencies = [ "serde", ] [[package]] name = "faster-hex" version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7223ae2d2f179b803433d9c830478527e92b8117eab39460edae7f1614d9fb73" dependencies = [ "heapless", "serde", ] [[package]] name = "fastrand" version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" [[package]] name = "ff" version = "0.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c0b50bfb653653f9ca9095b427bed08ab8d75a137839d9ad64eb11810d5b6393" dependencies = [ "rand_core", "subtle", ] [[package]] name = "fiat-crypto" version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "64cd1e32ddd350061ae6edb1b082d7c54915b5c672c389143b9a63403a109f24" [[package]] name = "filetime" version = "0.2.26" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bc0505cd1b6fa6580283f6bdf70a73fcf4aba1184038c90902b92b3dd0df63ed" dependencies = [ "cfg-if", "libc", "libredox", "windows-sys 0.60.2", ] [[package]] name = "find-msvc-tools" version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3a3076410a55c90011c298b04d0cfa770b00fa04e1e3c97d3f6c9de105a03844" [[package]] name = "fixedbitset" version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80" [[package]] name = "flate2" version = "1.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bfe33edd8e85a12a67454e37f8c75e730830d83e313556ab9ebf9ee7fbeb3bfb" dependencies = [ "crc32fast", "libz-rs-sys", "libz-sys", "miniz_oxide", ] [[package]] name = "fnv" version = "1.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" [[package]] name = "foreign-types" version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1" dependencies = [ "foreign-types-shared", ] [[package]] name = "foreign-types-shared" version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" [[package]] name = "form_urlencoded" version = "1.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cb4cb245038516f5f85277875cdaa4f7d2c9a0fa0468de06ed190163b1581fcf" dependencies = [ "percent-encoding", ] [[package]] name = "fs-err" version = "3.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "62d91fd049c123429b018c47887d3f75a265540dd3c30ba9cb7bae9197edb03a" dependencies = [ "autocfg", ] [[package]] name = "futf" version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "df420e2e84819663797d1ec6544b13c5be84629e7bb00dc960d6917db2987843" dependencies = [ "mac", "new_debug_unreachable", ] [[package]] name = "futures" version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "65bc07b1a8bc7c85c5f2e110c476c7389b4554ba72af57d8445ea63a576b0876" dependencies = [ "futures-channel", "futures-core", "futures-executor", "futures-io", "futures-sink", "futures-task", "futures-util", ] [[package]] name = "futures-channel" version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2dff15bf788c671c1934e366d07e30c1814a8ef514e1af724a602e8a2fbe1b10" dependencies = [ "futures-core", "futures-sink", ] [[package]] name = "futures-core" version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e" [[package]] name = "futures-executor" version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1e28d1d997f585e54aebc3f97d39e72338912123a67330d723fdbb564d646c9f" dependencies = [ "futures-core", "futures-task", "futures-util", ] [[package]] name = "futures-io" version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9e5c1b78ca4aae1ac06c48a526a655760685149f0d465d21f37abfe57ce075c6" [[package]] name = "futures-macro" version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" dependencies = [ "proc-macro2", "quote", "syn 2.0.111", ] [[package]] name = "futures-sink" version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e575fab7d1e0dcb8d0c7bcf9a63ee213816ab51902e6d244a95819acacf1d4f7" [[package]] name = "futures-task" version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f90f7dce0722e95104fcb095585910c0977252f286e354b5e3bd38902cd99988" [[package]] name = "futures-util" version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81" dependencies = [ "futures-channel", "futures-core", "futures-io", "futures-macro", "futures-sink", "futures-task", "memchr", "pin-project-lite", "pin-utils", "slab", ] [[package]] name = "generic-array" version = "0.14.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" dependencies = [ "typenum", "version_check", "zeroize", ] [[package]] name = "getopts" version = "0.2.24" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cfe4fbac503b8d1f88e6676011885f34b7174f46e59956bba534ba83abded4df" dependencies = [ "unicode-width", ] [[package]] name = "getrandom" version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "335ff9f135e4384c8150d6f27c6daed433577f86b4750418338c01a1a2528592" dependencies = [ "cfg-if", "libc", "wasi", ] [[package]] name = "getrandom" version = "0.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "899def5c37c4fd7b2664648c28120ecec138e4d395b459e5ca34f9cce2dd77fd" dependencies = [ "cfg-if", "js-sys", "libc", "r-efi", "wasip2", "wasm-bindgen", ] [[package]] name = "gimli" version = "0.32.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e629b9b98ef3dd8afe6ca2bd0f89306cec16d43d907889945bc5d6687f2f13c7" [[package]] name = "git2" version = "0.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b903b73e45dc0c6c596f2d37eccece7c1c8bb6e4407b001096387c63d0d93724" dependencies = [ "bitflags", "libc", "libgit2-sys", "log", "openssl-probe", "openssl-sys", "url", ] [[package]] name = "git2-curl" version = "0.20.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "68ff14527a1c242320039b138376f8e0786697a1b7b172bc44f6efda3ab9079f" dependencies = [ "curl", "git2", "log", "url", ] [[package]] name = "gix" version = "0.69.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8d0eebdaecdcf405d5433a36f85e4f058cf4de48ee2604388be0dbccbaad353e" dependencies = [ "gix-actor 0.33.2", "gix-attributes", "gix-command", "gix-commitgraph", "gix-config 0.42.0", "gix-credentials", "gix-date 0.9.4", "gix-diff", "gix-dir", "gix-discover", "gix-features 0.39.1", "gix-filter", "gix-fs 0.12.1", "gix-glob 0.17.1", "gix-hash 0.15.1", "gix-hashtable 0.6.0", "gix-ignore", "gix-index", "gix-lock 15.0.1", "gix-negotiate", "gix-object 0.46.1", "gix-odb", "gix-pack", "gix-path", "gix-pathspec", "gix-prompt", "gix-protocol", "gix-ref 0.49.1", "gix-refspec", "gix-revision", "gix-revwalk", "gix-sec 0.10.12", "gix-shallow", "gix-submodule", "gix-tempfile 15.0.0", "gix-trace", "gix-transport", "gix-traverse", "gix-url", "gix-utils 0.1.14", "gix-validate 0.9.4", "gix-worktree", "once_cell", "prodash 29.0.2", "smallvec", "thiserror 2.0.17", ] [[package]] name = "gix-actor" version = "0.33.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "20018a1a6332e065f1fcc8305c1c932c6b8c9985edea2284b3c79dc6fa3ee4b2" dependencies = [ "bstr", "gix-date 0.9.4", "gix-utils 0.1.14", "itoa", "thiserror 2.0.17", "winnow 0.6.26", ] [[package]] name = "gix-actor" version = "0.36.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "694f6c16eb88b16b00b1d811e4e4bda6f79e9eb467a1b04fd5b848da677baa81" dependencies = [ "bstr", "gix-date 0.11.0", "gix-utils 0.3.1", "itoa", "thiserror 2.0.17", "winnow 0.7.14", ] [[package]] name = "gix-attributes" version = "0.23.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ddf9bf852194c0edfe699a2d36422d2c1f28f73b7c6d446c3f0ccd3ba232cadc" dependencies = [ "bstr", "gix-glob 0.17.1", "gix-path", "gix-quote", "gix-trace", "kstring", "smallvec", "thiserror 2.0.17", "unicode-bom", ] [[package]] name = "gix-bitmap" version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5e150161b8a75b5860521cb876b506879a3376d3adc857ec7a9d35e7c6a5e531" dependencies = [ "thiserror 2.0.17", ] [[package]] name = "gix-chunk" version = "0.4.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5c356b3825677cb6ff579551bb8311a81821e184453cbd105e2fc5311b288eeb" dependencies = [ "thiserror 2.0.17", ] [[package]] name = "gix-command" version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cb410b84d6575db45e62025a9118bdbf4d4b099ce7575a76161e898d9ca98df1" dependencies = [ "bstr", "gix-path", "gix-trace", "shell-words", ] [[package]] name = "gix-commitgraph" version = "0.25.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a8da6591a7868fb2b6dabddea6b09988b0b05e0213f938dbaa11a03dd7a48d85" dependencies = [ "bstr", "gix-chunk", "gix-features 0.39.1", "gix-hash 0.15.1", "memmap2", "thiserror 2.0.17", ] [[package]] name = "gix-config" version = "0.42.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6649b406ca1f99cb148959cf00468b231f07950f8ec438cc0903cda563606f19" dependencies = [ "bstr", "gix-config-value 0.14.12", "gix-features 0.39.1", "gix-glob 0.17.1", "gix-path", "gix-ref 0.49.1", "gix-sec 0.10.12", "memchr", "once_cell", "smallvec", "thiserror 2.0.17", "unicode-bom", "winnow 0.6.26", ] [[package]] name = "gix-config" version = "0.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9419284839421488b5ab9b9b88386bdc1e159a986c08e17ffa3e9a5cd2b139f5" dependencies = [ "bstr", "gix-config-value 0.15.3", "gix-features 0.44.1", "gix-glob 0.22.1", "gix-path", "gix-ref 0.55.0", "gix-sec 0.12.2", "memchr", "smallvec", "thiserror 2.0.17", "unicode-bom", "winnow 0.7.14", ] [[package]] name = "gix-config-value" version = "0.14.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8dc2c844c4cf141884678cabef736fd91dd73068b9146e6f004ba1a0457944b6" dependencies = [ "bitflags", "bstr", "gix-path", "libc", "thiserror 2.0.17", ] [[package]] name = "gix-config-value" version = "0.15.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2c489abb061c74b0c3ad790e24a606ef968cebab48ec673d6a891ece7d5aef64" dependencies = [ "bitflags", "bstr", "gix-path", "libc", "thiserror 2.0.17", ] [[package]] name = "gix-credentials" version = "0.26.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "82a50c56b785c29a151ab4ccf74a83fe4e21d2feda0d30549504b4baed353e0a" dependencies = [ "bstr", "gix-command", "gix-config-value 0.14.12", "gix-path", "gix-prompt", "gix-sec 0.10.12", "gix-trace", "gix-url", "thiserror 2.0.17", ] [[package]] name = "gix-date" version = "0.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "daa30058ec7d3511fbc229e4f9e696a35abd07ec5b82e635eff864a2726217e4" dependencies = [ "bstr", "itoa", "jiff", "thiserror 2.0.17", ] [[package]] name = "gix-date" version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9f94626a5bc591a57025361a3a890092469e47c7667e59fc143439cd6eaf47fe" dependencies = [ "bstr", "itoa", "jiff", "smallvec", "thiserror 2.0.17", ] [[package]] name = "gix-diff" version = "0.49.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a8e92566eccbca205a0a0f96ffb0327c061e85bc5c95abbcddfe177498aa04f6" dependencies = [ "bstr", "gix-hash 0.15.1", "gix-object 0.46.1", "thiserror 2.0.17", ] [[package]] name = "gix-dir" version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fba2ffbcf4bd34438e8a8367ccbc94870549903d1f193a14f47eb6b0967e1293" dependencies = [ "bstr", "gix-discover", "gix-fs 0.12.1", "gix-ignore", "gix-index", "gix-object 0.46.1", "gix-path", "gix-pathspec", "gix-trace", "gix-utils 0.1.14", "gix-worktree", "thiserror 2.0.17", ] [[package]] name = "gix-discover" version = "0.37.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "83bf6dfa4e266a4a9becb4d18fc801f92c3f7cc6c433dd86fdadbcf315ffb6ef" dependencies = [ "bstr", "dunce", "gix-fs 0.12.1", "gix-hash 0.15.1", "gix-path", "gix-ref 0.49.1", "gix-sec 0.10.12", "thiserror 2.0.17", ] [[package]] name = "gix-features" version = "0.39.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7d85d673f2e022a340dba4713bed77ef2cf4cd737d2f3e0f159d45e0935fd81f" dependencies = [ "bytes", "crc32fast", "crossbeam-channel", "flate2", "gix-hash 0.15.1", "gix-trace", "gix-utils 0.1.14", "libc", "once_cell", "parking_lot", "prodash 29.0.2", "sha1_smol", "thiserror 2.0.17", "walkdir", ] [[package]] name = "gix-features" version = "0.44.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dfa64593d1586135102307fb57fb3a9d3868b6b1f45a4da1352cce5070f8916a" dependencies = [ "gix-path", "gix-trace", "gix-utils 0.3.1", "libc", "prodash 30.0.1", "walkdir", ] [[package]] name = "gix-filter" version = "0.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3d0ecdee5667f840ba20c7fe56d63f8e1dc1e6b3bfd296151fe5ef07c874790a" dependencies = [ "bstr", "encoding_rs", "gix-attributes", "gix-command", "gix-hash 0.15.1", "gix-object 0.46.1", "gix-packetline-blocking", "gix-path", "gix-quote", "gix-trace", "gix-utils 0.1.14", "smallvec", "thiserror 2.0.17", ] [[package]] name = "gix-fs" version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3b3d4fac505a621f97e5ce2c69fdc425742af00c0920363ca4074f0eb48b1db9" dependencies = [ "fastrand", "gix-features 0.39.1", "gix-utils 0.1.14", ] [[package]] name = "gix-fs" version = "0.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3f1ecd896258cdc5ccd94d18386d17906b8de265ad2ecf68e3bea6b007f6a28f" dependencies = [ "bstr", "fastrand", "gix-features 0.44.1", "gix-path", "gix-utils 0.3.1", "thiserror 2.0.17", ] [[package]] name = "gix-glob" version = "0.17.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "aaf69a6bec0a3581567484bf99a4003afcaf6c469fd4214352517ea355cf3435" dependencies = [ "bitflags", "bstr", "gix-features 0.39.1", "gix-path", ] [[package]] name = "gix-glob" version = "0.22.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "74254992150b0a88fdb3ad47635ab649512dff2cbbefca7916bb459894fc9d56" dependencies = [ "bitflags", "bstr", "gix-features 0.44.1", "gix-path", ] [[package]] name = "gix-hash" version = "0.15.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0b5eccc17194ed0e67d49285e4853307e4147e95407f91c1c3e4a13ba9f4e4ce" dependencies = [ "faster-hex 0.9.0", "thiserror 2.0.17", ] [[package]] name = "gix-hash" version = "0.20.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "826036a9bee95945b0be1e2394c64cd4289916c34a639818f8fd5153906985c1" dependencies = [ "faster-hex 0.10.0", "gix-features 0.44.1", "sha1-checked", "thiserror 2.0.17", ] [[package]] name = "gix-hashtable" version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0ef65b256631078ef733bc5530c4e6b1c2e7d5c2830b75d4e9034ab3997d18fe" dependencies = [ "gix-hash 0.15.1", "hashbrown 0.14.5", "parking_lot", ] [[package]] name = "gix-hashtable" version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a27d4a3ea9640da504a2657fef3419c517fd71f1767ad8935298bcc805edd195" dependencies = [ "gix-hash 0.20.1", "hashbrown 0.16.1", "parking_lot", ] [[package]] name = "gix-ignore" version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b6b1fb24d2a4af0aa7438e2771d60c14a80cf2c9bd55c29cf1712b841f05bb8a" dependencies = [ "bstr", "gix-glob 0.17.1", "gix-path", "gix-trace", "unicode-bom", ] [[package]] name = "gix-index" version = "0.37.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "270645fd20556b64c8ffa1540d921b281e6994413a0ca068596f97e9367a257a" dependencies = [ "bitflags", "bstr", "filetime", "fnv", "gix-bitmap", "gix-features 0.39.1", "gix-fs 0.12.1", "gix-hash 0.15.1", "gix-lock 15.0.1", "gix-object 0.46.1", "gix-traverse", "gix-utils 0.1.14", "gix-validate 0.9.4", "hashbrown 0.14.5", "itoa", "libc", "memmap2", "rustix 0.38.44", "smallvec", "thiserror 2.0.17", ] [[package]] name = "gix-lock" version = "15.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1cd3ab68a452db63d9f3ebdacb10f30dba1fa0d31ac64f4203d395ed1102d940" dependencies = [ "gix-tempfile 15.0.0", "gix-utils 0.1.14", "thiserror 2.0.17", ] [[package]] name = "gix-lock" version = "19.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "729d7857429a66023bc0c29d60fa21d0d6ae8862f33c1937ba89e0f74dd5c67f" dependencies = [ "gix-tempfile 19.0.1", "gix-utils 0.3.1", "thiserror 2.0.17", ] [[package]] name = "gix-negotiate" version = "0.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d27f830a16405386e9c83b9d5be8261fe32bbd6b3caf15bd1b284c6b2b7ef1a8" dependencies = [ "bitflags", "gix-commitgraph", "gix-date 0.9.4", "gix-hash 0.15.1", "gix-object 0.46.1", "gix-revwalk", "smallvec", "thiserror 2.0.17", ] [[package]] name = "gix-object" version = "0.46.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e42d58010183ef033f31088479b4eb92b44fe341b35b62d39eb8b185573d77ea" dependencies = [ "bstr", "gix-actor 0.33.2", "gix-date 0.9.4", "gix-features 0.39.1", "gix-hash 0.15.1", "gix-hashtable 0.6.0", "gix-path", "gix-utils 0.1.14", "gix-validate 0.9.4", "itoa", "smallvec", "thiserror 2.0.17", "winnow 0.6.26", ] [[package]] name = "gix-object" version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "84743d1091c501a56f00d7f4c595cb30f20fcef6503b32ac0a1ff3817efd7b5d" dependencies = [ "bstr", "gix-actor 0.36.0", "gix-date 0.11.0", "gix-features 0.44.1", "gix-hash 0.20.1", "gix-hashtable 0.10.0", "gix-path", "gix-utils 0.3.1", "gix-validate 0.10.1", "itoa", "smallvec", "thiserror 2.0.17", "winnow 0.7.14", ] [[package]] name = "gix-odb" version = "0.66.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cb780eceb3372ee204469478de02eaa34f6ba98247df0186337e0333de97d0ae" dependencies = [ "arc-swap", "gix-date 0.9.4", "gix-features 0.39.1", "gix-fs 0.12.1", "gix-hash 0.15.1", "gix-hashtable 0.6.0", "gix-object 0.46.1", "gix-pack", "gix-path", "gix-quote", "parking_lot", "tempfile", "thiserror 2.0.17", ] [[package]] name = "gix-pack" version = "0.56.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4158928929be29cae7ab97afc8e820a932071a7f39d8ba388eed2380c12c566c" dependencies = [ "clru", "gix-chunk", "gix-features 0.39.1", "gix-hash 0.15.1", "gix-hashtable 0.6.0", "gix-object 0.46.1", "gix-path", "gix-tempfile 15.0.0", "memmap2", "parking_lot", "smallvec", "thiserror 2.0.17", ] [[package]] name = "gix-packetline" version = "0.18.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "123844a70cf4d5352441dc06bab0da8aef61be94ec239cb631e0ba01dc6d3a04" dependencies = [ "bstr", "faster-hex 0.9.0", "gix-trace", "thiserror 2.0.17", ] [[package]] name = "gix-packetline-blocking" version = "0.18.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1ecf3ea2e105c7e45587bac04099824301262a6c43357fad5205da36dbb233b3" dependencies = [ "bstr", "faster-hex 0.9.0", "gix-trace", "thiserror 2.0.17", ] [[package]] name = "gix-path" version = "0.10.22" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7cb06c3e4f8eed6e24fd915fa93145e28a511f4ea0e768bae16673e05ed3f366" dependencies = [ "bstr", "gix-trace", "gix-validate 0.10.1", "thiserror 2.0.17", ] [[package]] name = "gix-pathspec" version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4c472dfbe4a4e96fcf7efddcd4771c9037bb4fdea2faaabf2f4888210c75b81e" dependencies = [ "bitflags", "bstr", "gix-attributes", "gix-config-value 0.14.12", "gix-glob 0.17.1", "gix-path", "thiserror 2.0.17", ] [[package]] name = "gix-prompt" version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "79f2185958e1512b989a007509df8d61dca014aa759a22bee80cfa6c594c3b6d" dependencies = [ "gix-command", "gix-config-value 0.14.12", "parking_lot", "rustix 0.38.44", "thiserror 2.0.17", ] [[package]] name = "gix-protocol" version = "0.47.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c84642e8b6fed7035ce9cc449593019c55b0ec1af7a5dce1ab8a0636eaaeb067" dependencies = [ "bstr", "gix-credentials", "gix-date 0.9.4", "gix-features 0.39.1", "gix-hash 0.15.1", "gix-lock 15.0.1", "gix-negotiate", "gix-object 0.46.1", "gix-ref 0.49.1", "gix-refspec", "gix-revwalk", "gix-shallow", "gix-trace", "gix-transport", "gix-utils 0.1.14", "maybe-async", "thiserror 2.0.17", "winnow 0.6.26", ] [[package]] name = "gix-quote" version = "0.4.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e49357fccdb0c85c0d3a3292a9f6db32d9b3535959b5471bb9624908f4a066c6" dependencies = [ "bstr", "gix-utils 0.1.14", "thiserror 2.0.17", ] [[package]] name = "gix-ref" version = "0.49.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a91b61776c839d0f1b7114901179afb0947aa7f4d30793ca1c56d335dfef485f" dependencies = [ "gix-actor 0.33.2", "gix-features 0.39.1", "gix-fs 0.12.1", "gix-hash 0.15.1", "gix-lock 15.0.1", "gix-object 0.46.1", "gix-path", "gix-tempfile 15.0.0", "gix-utils 0.1.14", "gix-validate 0.9.4", "memmap2", "thiserror 2.0.17", "winnow 0.6.26", ] [[package]] name = "gix-ref" version = "0.55.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e51330a32f173c8e831731dfef8e93a748c23c057f4b028841f222564cad84cb" dependencies = [ "gix-actor 0.36.0", "gix-features 0.44.1", "gix-fs 0.17.0", "gix-hash 0.20.1", "gix-lock 19.0.0", "gix-object 0.52.0", "gix-path", "gix-tempfile 19.0.1", "gix-utils 0.3.1", "gix-validate 0.10.1", "memmap2", "thiserror 2.0.17", "winnow 0.7.14", ] [[package]] name = "gix-refspec" version = "0.27.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "00c056bb747868c7eb0aeb352c9f9181ab8ca3d0a2550f16470803500c6c413d" dependencies = [ "bstr", "gix-hash 0.15.1", "gix-revision", "gix-validate 0.9.4", "smallvec", "thiserror 2.0.17", ] [[package]] name = "gix-revision" version = "0.31.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "61e1ddc474405a68d2ce8485705dd72fe6ce959f2f5fe718601ead5da2c8f9e7" dependencies = [ "bstr", "gix-commitgraph", "gix-date 0.9.4", "gix-hash 0.15.1", "gix-object 0.46.1", "gix-revwalk", "thiserror 2.0.17", ] [[package]] name = "gix-revwalk" version = "0.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "510026fc32f456f8f067d8f37c34088b97a36b2229d88a6a5023ef179fcb109d" dependencies = [ "gix-commitgraph", "gix-date 0.9.4", "gix-hash 0.15.1", "gix-hashtable 0.6.0", "gix-object 0.46.1", "smallvec", "thiserror 2.0.17", ] [[package]] name = "gix-sec" version = "0.10.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "47aeb0f13de9ef2f3033f5ff218de30f44db827ac9f1286f9ef050aacddd5888" dependencies = [ "bitflags", "gix-path", "libc", "windows-sys 0.52.0", ] [[package]] name = "gix-sec" version = "0.12.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ea9962ed6d9114f7f100efe038752f41283c225bb507a2888903ac593dffa6be" dependencies = [ "bitflags", "gix-path", "libc", "windows-sys 0.61.2", ] [[package]] name = "gix-shallow" version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "88d2673242e87492cb6ff671f0c01f689061ca306c4020f137197f3abc84ce01" dependencies = [ "bstr", "gix-hash 0.15.1", "gix-lock 15.0.1", "thiserror 2.0.17", ] [[package]] name = "gix-submodule" version = "0.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a2455f8c0fcb6ebe2a6e83c8f522d30615d763eb2ef7a23c7d929f9476e89f5c" dependencies = [ "bstr", "gix-config 0.42.0", "gix-path", "gix-pathspec", "gix-refspec", "gix-url", "thiserror 2.0.17", ] [[package]] name = "gix-tempfile" version = "15.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2feb86ef094cc77a4a9a5afbfe5de626897351bbbd0de3cb9314baf3049adb82" dependencies = [ "gix-fs 0.12.1", "libc", "once_cell", "parking_lot", "tempfile", ] [[package]] name = "gix-tempfile" version = "19.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e265fc6b54e57693232a79d84038381ebfda7b1a3b1b8a9320d4d5fe6e820086" dependencies = [ "gix-fs 0.17.0", "libc", "parking_lot", "tempfile", ] [[package]] name = "gix-trace" version = "0.1.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1d3f59a8de2934f6391b6b3a1a7654eae18961fcb9f9c843533fed34ad0f3457" [[package]] name = "gix-transport" version = "0.44.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dd04d91e507a8713cfa2318d5a85d75b36e53a40379cc7eb7634ce400ecacbaf" dependencies = [ "base64", "bstr", "curl", "gix-command", "gix-credentials", "gix-features 0.39.1", "gix-packetline", "gix-quote", "gix-sec 0.10.12", "gix-url", "thiserror 2.0.17", ] [[package]] name = "gix-traverse" version = "0.43.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6ed47d648619e23e93f971d2bba0d10c1100e54ef95d2981d609907a8cabac89" dependencies = [ "bitflags", "gix-commitgraph", "gix-date 0.9.4", "gix-hash 0.15.1", "gix-hashtable 0.6.0", "gix-object 0.46.1", "gix-revwalk", "smallvec", "thiserror 2.0.17", ] [[package]] name = "gix-url" version = "0.28.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d096fb733ba6bd3f5403dba8bd72bdd8809fe2b347b57844040b8f49c93492d9" dependencies = [ "bstr", "gix-features 0.39.1", "gix-path", "percent-encoding", "thiserror 2.0.17", "url", ] [[package]] name = "gix-utils" version = "0.1.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ff08f24e03ac8916c478c8419d7d3c33393da9bb41fa4c24455d5406aeefd35f" dependencies = [ "bstr", "fastrand", "unicode-normalization", ] [[package]] name = "gix-utils" version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "befcdbdfb1238d2854591f760a48711bed85e72d80a10e8f2f93f656746ef7c5" dependencies = [ "fastrand", "unicode-normalization", ] [[package]] name = "gix-validate" version = "0.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "34b5f1253109da6c79ed7cf6e1e38437080bb6d704c76af14c93e2f255234084" dependencies = [ "bstr", "thiserror 2.0.17", ] [[package]] name = "gix-validate" version = "0.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5b1e63a5b516e970a594f870ed4571a8fdcb8a344e7bd407a20db8bd61dbfde4" dependencies = [ "bstr", "thiserror 2.0.17", ] [[package]] name = "gix-worktree" version = "0.38.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "756dbbe15188fa22540d5eab941f8f9cf511a5364d5aec34c88083c09f4bea13" dependencies = [ "bstr", "gix-attributes", "gix-features 0.39.1", "gix-fs 0.12.1", "gix-glob 0.17.1", "gix-hash 0.15.1", "gix-ignore", "gix-index", "gix-object 0.46.1", "gix-path", "gix-validate 0.9.4", ] [[package]] name = "glob" version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0cc23270f6e1808e30a928bdc84dea0b9b4136a8bc82338574f23baf47bbd280" [[package]] name = "globset" version = "0.4.18" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "52dfc19153a48bde0cbd630453615c8151bce3a5adfac7a0aebfbf0a1e1f57e3" dependencies = [ "aho-corasick", "bstr", "log", "regex-automata", "regex-syntax 0.8.8", ] [[package]] name = "group" version = "0.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f0f9ef7462f7c099f518d754361858f86d8a07af53ba9af0fe635bbccb151a63" dependencies = [ "ff", "rand_core", "subtle", ] [[package]] name = "h2" version = "0.4.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f3c0b69cfcb4e1b9f1bf2f53f95f766e4661169728ec61cd3fe5a0166f2d1386" dependencies = [ "atomic-waker", "bytes", "fnv", "futures-core", "futures-sink", "http", "indexmap", "slab", "tokio", "tokio-util", "tracing", ] [[package]] name = "hash32" version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "47d60b12902ba28e2730cd37e95b8c9223af2808df9e902d4df49588d1470606" dependencies = [ "byteorder", ] [[package]] name = "hashbrown" version = "0.14.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" dependencies = [ "ahash", "allocator-api2", ] [[package]] name = "hashbrown" version = "0.16.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "841d1cc9bed7f9236f321df977030373f4a4163ae1a7dbfe1a51a2c1a51d9100" [[package]] name = "hashlink" version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6ba4ff7128dee98c7dc9794b6a411377e1404dba1c97deb8d1a55297bd25d8af" dependencies = [ "hashbrown 0.14.5", ] [[package]] name = "heapless" version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0bfb9eb618601c89945a70e254898da93b13be0388091d42117462b265bb3fad" dependencies = [ "hash32", "stable_deref_trait", ] [[package]] name = "heck" version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" [[package]] name = "hermit-abi" version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fc0fef456e4baa96da950455cd02c081ca953b141298e41db3fc7e36b1da849c" [[package]] name = "hex" version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" [[package]] name = "hkdf" version = "0.12.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7b5f8eb2ad728638ea2c7d47a21db23b7b58a72ed6a38256b8a1849f15fbbdf7" dependencies = [ "hmac", ] [[package]] name = "hmac" version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6c49c37c09c17a53d937dfbb742eb3a961d65a994e6bcdcf37e7399d0cc8ab5e" dependencies = [ "digest", ] [[package]] name = "home" version = "0.5.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cc627f471c528ff0c4a49e1d5e60450c8f6461dd6d10ba9dcd3a61d3dff7728d" dependencies = [ "windows-sys 0.61.2", ] [[package]] name = "html5ever" version = "0.26.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bea68cab48b8459f17cf1c944c67ddc572d272d9f2b274140f223ecb1da4a3b7" dependencies = [ "log", "mac", "markup5ever 0.11.0", "proc-macro2", "quote", "syn 1.0.109", ] [[package]] name = "html5ever" version = "0.36.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6452c4751a24e1b99c3260d505eaeee76a050573e61f30ac2c924ddc7236f01e" dependencies = [ "log", "markup5ever 0.36.1", ] [[package]] name = "http" version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e3ba2a386d7f85a81f119ad7498ebe444d2e22c2af0b86b069416ace48b3311a" dependencies = [ "bytes", "itoa", ] [[package]] name = "http-auth" version = "0.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "150fa4a9462ef926824cf4519c84ed652ca8f4fbae34cb8af045b5cbcaf98822" dependencies = [ "memchr", ] [[package]] name = "http-body" version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184" dependencies = [ "bytes", "http", ] [[package]] name = "http-body-util" version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b021d93e26becf5dc7e1b75b1bed1fd93124b374ceb73f43d4d4eafec896a64a" dependencies = [ "bytes", "futures-core", "http", "http-body", "pin-project-lite", ] [[package]] name = "httparse" version = "1.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6dbf3de79e51f3d586ab4cb9d5c3e2c14aa28ed23d180cf89b4df0454a69cc87" [[package]] name = "humantime" version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "135b12329e5e3ce057a9f972339ea52bc954fe1e9358ef27f95e89716fbc5424" [[package]] name = "hyper" version = "1.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2ab2d4f250c3d7b1c9fcdff1cece94ea4e2dfbec68614f7b87cb205f24ca9d11" dependencies = [ "atomic-waker", "bytes", "futures-channel", "futures-core", "h2", "http", "http-body", "httparse", "itoa", "pin-project-lite", "pin-utils", "smallvec", "tokio", "want", ] [[package]] name = "hyper-rustls" version = "0.27.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e3c93eb611681b207e1fe55d5a71ecf91572ec8a6705cdb6857f7d8d5242cf58" dependencies = [ "http", "hyper", "hyper-util", "rustls", "rustls-pki-types", "tokio", "tokio-rustls", "tower-service", ] [[package]] name = "hyper-tls" version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "70206fc6890eaca9fde8a0bf71caa2ddfc9fe045ac9e5c70df101a7dbde866e0" dependencies = [ "bytes", "http-body-util", "hyper", "hyper-util", "native-tls", "tokio", "tokio-native-tls", "tower-service", ] [[package]] name = "hyper-util" version = "0.1.18" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "52e9a2a24dc5c6821e71a7030e1e14b7b632acac55c40e9d2e082c621261bb56" dependencies = [ "base64", "bytes", "futures-channel", "futures-core", "futures-util", "http", "http-body", "hyper", "ipnet", "libc", "percent-encoding", "pin-project-lite", "socket2", "system-configuration", "tokio", "tower-service", "tracing", "windows-registry", ] [[package]] name = "iana-time-zone" version = "0.1.64" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "33e57f83510bb73707521ebaffa789ec8caf86f9657cad665b092b581d40e9fb" dependencies = [ "android_system_properties", "core-foundation-sys", "iana-time-zone-haiku", "js-sys", "log", "wasm-bindgen", "windows-core", ] [[package]] name = "iana-time-zone-haiku" version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f31827a206f56af32e590ba56d5d2d085f558508192593743f16b2306495269f" dependencies = [ "cc", ] [[package]] name = "icu_collections" version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4c6b649701667bbe825c3b7e6388cb521c23d88644678e83c0c4d0a621a34b43" dependencies = [ "displaydoc", "potential_utf", "yoke", "zerofrom", "zerovec", ] [[package]] name = "icu_locale_core" version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "edba7861004dd3714265b4db54a3c390e880ab658fec5f7db895fae2046b5bb6" dependencies = [ "displaydoc", "litemap", "tinystr", "writeable", "zerovec", ] [[package]] name = "icu_normalizer" version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5f6c8828b67bf8908d82127b2054ea1b4427ff0230ee9141c54251934ab1b599" dependencies = [ "icu_collections", "icu_normalizer_data", "icu_properties", "icu_provider", "smallvec", "zerovec", ] [[package]] name = "icu_normalizer_data" version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7aedcccd01fc5fe81e6b489c15b247b8b0690feb23304303a9e560f37efc560a" [[package]] name = "icu_properties" version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e93fcd3157766c0c8da2f8cff6ce651a31f0810eaa1c51ec363ef790bbb5fb99" dependencies = [ "icu_collections", "icu_locale_core", "icu_properties_data", "icu_provider", "zerotrie", "zerovec", ] [[package]] name = "icu_properties_data" version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "02845b3647bb045f1100ecd6480ff52f34c35f82d9880e029d329c21d1054899" [[package]] name = "icu_provider" version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "85962cf0ce02e1e0a629cc34e7ca3e373ce20dda4c4d7294bbd0bf1fdb59e614" dependencies = [ "displaydoc", "icu_locale_core", "writeable", "yoke", "zerofrom", "zerotrie", "zerovec", ] [[package]] name = "idna" version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3b0875f23caa03898994f6ddc501886a45c7d3d62d04d2d90788d47be1b1e4de" dependencies = [ "idna_adapter", "smallvec", "utf8_iter", ] [[package]] name = "idna_adapter" version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3acae9609540aa318d1bc588455225fb2085b9ed0c4f6bd0d9d5bcd86f1a0344" dependencies = [ "icu_normalizer", "icu_properties", ] [[package]] name = "ignore" version = "0.4.25" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d3d782a365a015e0f5c04902246139249abf769125006fbe7649e2ee88169b4a" dependencies = [ "crossbeam-deque", "globset", "log", "memchr", "regex-automata", "same-file", "walkdir", "winapi-util", ] [[package]] name = "im-rc" version = "15.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "af1955a75fa080c677d3972822ec4bad316169ab1cfc6c257a942c2265dbe5fe" dependencies = [ "bitmaps", "rand_core", "rand_xoshiro", "sized-chunks", "typenum", "version_check", ] [[package]] name = "indexmap" version = "2.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0ad4bb2b565bca0645f4d68c5c9af97fba094e9791da685bf83cb5f3ce74acf2" dependencies = [ "equivalent", "hashbrown 0.16.1", "serde", "serde_core", ] [[package]] name = "indoc" version = "2.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "79cf5c93f93228cf8efb3ba362535fb11199ac548a09ce117c9b1adc3030d706" dependencies = [ "rustversion", ] [[package]] name = "ipnet" version = "2.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "469fb0b9cefa57e3ef31275ee7cacb78f2fdca44e4765491884a2b119d4eb130" [[package]] name = "iri-string" version = "0.7.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4f867b9d1d896b67beb18518eda36fdb77a32ea590de864f1325b294a6d14397" dependencies = [ "memchr", "serde", ] [[package]] name = "is-terminal" version = "0.4.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3640c1c38b8e4e43584d8df18be5fc6b0aa314ce6ebf51b53313d4306cca8e46" dependencies = [ "hermit-abi", "libc", "windows-sys 0.61.2", ] [[package]] name = "is_executable" version = "1.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "baabb8b4867b26294d818bf3f651a454b6901431711abb96e296245888d6e8c4" dependencies = [ "windows-sys 0.60.2", ] [[package]] name = "is_terminal_polyfill" version = "1.70.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a6cb138bb79a146c1bd460005623e142ef0181e3d0219cb493e02f7d08a35695" [[package]] name = "itertools" version = "0.10.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473" dependencies = [ "either", ] [[package]] name = "itertools" version = "0.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "413ee7dfc52ee1a4949ceeb7dbc8a33f2d6c088194d9f922fb8318faf1f01186" dependencies = [ "either", ] [[package]] name = "itoa" version = "1.0.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c" [[package]] name = "jiff" version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "49cce2b81f2098e7e3efc35bc2e0a6b7abec9d34128283d7a26fa8f32a6dbb35" dependencies = [ "jiff-static", "jiff-tzdb-platform", "log", "portable-atomic", "portable-atomic-util", "serde_core", "windows-sys 0.61.2", ] [[package]] name = "jiff-static" version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "980af8b43c3ad5d8d349ace167ec8170839f753a42d233ba19e08afe1850fa69" dependencies = [ "proc-macro2", "quote", "syn 2.0.111", ] [[package]] name = "jiff-tzdb" version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c1283705eb0a21404d2bfd6eef2a7593d240bc42a0bdb39db0ad6fa2ec026524" [[package]] name = "jiff-tzdb-platform" version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "875a5a69ac2bab1a891711cf5eccbec1ce0341ea805560dcd90b7a2e925132e8" dependencies = [ "jiff-tzdb", ] [[package]] name = "jobserver" version = "0.1.34" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9afb3de4395d6b3e67a780b6de64b51c978ecf11cb9a462c66be7d4ca9039d33" dependencies = [ "getrandom 0.3.4", "libc", ] [[package]] name = "js-sys" version = "0.3.83" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "464a3709c7f55f1f721e5389aa6ea4e3bc6aba669353300af094b29ffbdde1d8" dependencies = [ "once_cell", "wasm-bindgen", ] [[package]] name = "kstring" version = "2.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "558bf9508a558512042d3095138b1f7b8fe90c5467d94f9f1da28b3731c5dbd1" dependencies = [ "static_assertions", ] [[package]] name = "lalrpop" version = "0.19.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0a1cbf952127589f2851ab2046af368fd20645491bb4b376f04b7f94d7a9837b" dependencies = [ "ascii-canvas", "bit-set 0.5.3", "diff", "ena", "is-terminal", "itertools 0.10.5", "lalrpop-util", "petgraph", "regex", "regex-syntax 0.6.29", "string_cache 0.8.9", "term", "tiny-keccak", "unicode-xid", ] [[package]] name = "lalrpop-util" version = "0.19.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d3c48237b9604c5a4702de6b824e02006c3214327564636aef27c1028a8fa0ed" dependencies = [ "regex", ] [[package]] name = "lazy-regex" version = "3.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "191898e17ddee19e60bccb3945aa02339e81edd4a8c50e21fd4d48cdecda7b29" dependencies = [ "lazy-regex-proc_macros", "once_cell", "regex", ] [[package]] name = "lazy-regex-proc_macros" version = "3.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c35dc8b0da83d1a9507e12122c80dea71a9c7c613014347392483a83ea593e04" dependencies = [ "proc-macro2", "quote", "regex", "syn 2.0.111", ] [[package]] name = "lazy_static" version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" [[package]] name = "lazycell" version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55" [[package]] name = "libc" version = "0.2.178" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "37c93d8daa9d8a012fd8ab92f088405fb202ea0b6ab73ee2482ae66af4f42091" [[package]] name = "libdbus-sys" version = "0.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5cbe856efeb50e4681f010e9aaa2bf0a644e10139e54cde10fc83a307c23bd9f" dependencies = [ "cc", "pkg-config", ] [[package]] name = "libgit2-sys" version = "0.17.0+1.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "10472326a8a6477c3c20a64547b0059e4b0d086869eee31e6d7da728a8eb7224" dependencies = [ "cc", "libc", "libssh2-sys", "libz-sys", "openssl-sys", "pkg-config", ] [[package]] name = "libloading" version = "0.8.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d7c4b02199fee7c5d21a5ae7d8cfa79a6ef5bb2fc834d6e9058e89c825efdc55" dependencies = [ "cfg-if", "windows-link", ] [[package]] name = "libnghttp2-sys" version = "0.1.11+1.64.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1b6c24e48a7167cffa7119da39d577fa482e66c688a4aac016bee862e1a713c4" dependencies = [ "cc", "libc", ] [[package]] name = "libredox" version = "0.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "416f7e718bdb06000964960ffa43b4335ad4012ae8b99060261aa4a8088d5ccb" dependencies = [ "bitflags", "libc", "redox_syscall", ] [[package]] name = "libsqlite3-sys" version = "0.30.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2e99fb7a497b1e3339bc746195567ed8d3e24945ecd636e3619d20b9de9e9149" dependencies = [ "cc", "pkg-config", "vcpkg", ] [[package]] name = "libssh2-sys" version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "220e4f05ad4a218192533b300327f5150e809b54c4ec83b5a1d91833601811b9" dependencies = [ "cc", "libc", "libz-sys", "openssl-sys", "pkg-config", "vcpkg", ] [[package]] name = "libz-rs-sys" version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "840db8cf39d9ec4dd794376f38acc40d0fc65eec2a8f484f7fd375b84602becd" dependencies = [ "zlib-rs", ] [[package]] name = "libz-sys" version = "1.1.23" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "15d118bbf3771060e7311cc7bb0545b01d08a8b4a7de949198dec1fa0ca1c0f7" dependencies = [ "cc", "libc", "pkg-config", "vcpkg", ] [[package]] name = "linux-raw-sys" version = "0.4.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d26c52dbd32dccf2d10cac7725f8eae5296885fb5703b261f7d0a0739ec807ab" [[package]] name = "linux-raw-sys" version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "df1d3c3b53da64cf5760482273a98e575c651a67eec7f77df96b5b642de8f039" [[package]] name = "litemap" version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6373607a59f0be73a39b6fe456b8192fcc3585f602af20751600e974dd455e77" [[package]] name = "lock_api" version = "0.4.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "224399e74b87b5f3557511d98dff8b14089b3dadafcab6bb93eab67d3aace965" dependencies = [ "scopeguard", ] [[package]] name = "log" version = "0.4.29" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5e5032e24019045c762d3c0f28f5b6b8bbf38563a65908389bf7978758920897" [[package]] name = "m_lexer" version = "0.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a7e51ebf91162d585a5bae05e4779efc4a276171cb880d61dd6fab11c98467a7" dependencies = [ "regex", ] [[package]] name = "mac" version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c41e0c4fef86961ac6d6f8a82609f55f31b05e4fce149ac5710e439df7619ba4" [[package]] name = "mailparse" version = "0.16.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "60819a97ddcb831a5614eb3b0174f3620e793e97e09195a395bfa948fd68ed2f" dependencies = [ "charset", "data-encoding", "quoted_printable", ] [[package]] name = "makefile-lossless" version = "0.3.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7a4db5908bb68ae2e0afc45301a4cb9aaceb364a06865a08efe28324a60f9217" dependencies = [ "log", "rowan", ] [[package]] name = "maplit" version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3e2e65a1a2e43cfcb47a895c4c8b10d1f4a61097f9f254f183aee60cad9c651d" [[package]] name = "markup5ever" version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7a2629bb1404f3d34c2e921f21fd34ba00b206124c81f65c50b43b6aaefeb016" dependencies = [ "log", "phf 0.10.1", "phf_codegen 0.10.0", "string_cache 0.8.9", "string_cache_codegen 0.5.4", "tendril", ] [[package]] name = "markup5ever" version = "0.36.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6c3294c4d74d0742910f8c7b466f44dda9eb2d5742c1e430138df290a1e8451c" dependencies = [ "log", "tendril", "web_atoms", ] [[package]] name = "markup5ever_rcdom" version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b9521dd6750f8e80ee6c53d65e2e4656d7de37064f3a7a5d2d11d05df93839c2" dependencies = [ "html5ever 0.26.0", "markup5ever 0.11.0", "tendril", "xml5ever", ] [[package]] name = "matchers" version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d1525a2a28c7f4fa0fc98bb91ae755d1e2d1505079e05539e35bc876b5d65ae9" dependencies = [ "regex-automata", ] [[package]] name = "maybe-async" version = "0.2.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5cf92c10c7e361d6b99666ec1c6f9805b0bea2c3bd8c78dc6fe98ac5bd78db11" dependencies = [ "proc-macro2", "quote", "syn 2.0.111", ] [[package]] name = "memchr" version = "2.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f52b00d39961fc5b2736ea853c9cc86238e165017a493d1d5c8eac6bdc4cc273" [[package]] name = "memmap2" version = "0.9.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "744133e4a0e0a658e1374cf3bf8e415c4052a15a111acd372764c55b4177d490" dependencies = [ "libc", ] [[package]] name = "memoffset" version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "488016bfae457b036d996092f6cb448677611ce4449e970ceaf42695203f218a" dependencies = [ "autocfg", ] [[package]] name = "mime" version = "0.3.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" [[package]] name = "minimal-lexical" version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" [[package]] name = "miniz_oxide" version = "0.8.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1fa76a2c86f704bdb222d66965fb3d63269ce38518b83cb0575fca855ebb6316" dependencies = [ "adler2", "simd-adler32", ] [[package]] name = "mio" version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "69d83b0086dc8ecf3ce9ae2874b2d1290252e2a30720bea58a5c6639b0092873" dependencies = [ "libc", "wasi", "windows-sys 0.61.2", ] [[package]] name = "miow" version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "536bfad37a309d62069485248eeaba1e8d9853aaf951caaeaed0585a95346f08" dependencies = [ "windows-sys 0.61.2", ] [[package]] name = "native-tls" version = "0.2.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "87de3442987e9dbec73158d5c715e7ad9072fda936bb03d19d7fa10e00520f0e" dependencies = [ "libc", "log", "openssl", "openssl-probe", "openssl-sys", "schannel", "security-framework 2.11.1", "security-framework-sys", "tempfile", ] [[package]] name = "new_debug_unreachable" version = "1.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "650eef8c711430f1a879fdd01d4745a7deea475becfb90269c06775983bbf086" [[package]] name = "nix" version = "0.30.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "74523f3a35e05aba87a1d978330aef40f67b0304ac79c1c00b294c9830543db6" dependencies = [ "bitflags", "cfg-if", "cfg_aliases", "libc", ] [[package]] name = "nom" version = "7.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a" dependencies = [ "memchr", "minimal-lexical", ] [[package]] name = "normpath" version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bf23ab2b905654b4cb177e30b629937b3868311d4e1cba859f899c041046e69b" dependencies = [ "windows-sys 0.61.2", ] [[package]] name = "nu-ansi-term" version = "0.50.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7957b9740744892f114936ab4a57b3f487491bbeafaf8083688b16841a4240e5" dependencies = [ "windows-sys 0.61.2", ] [[package]] name = "num-bigint" version = "0.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a5e44f723f1133c9deac646763579fdb3ac745e418f2a7af9cd0c431da1f20b9" dependencies = [ "num-integer", "num-traits", ] [[package]] name = "num-conv" version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9" [[package]] name = "num-integer" version = "0.1.46" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7969661fd2958a5cb096e56c8e1ad0444ac2bbcd0061bd28660485a44879858f" dependencies = [ "num-traits", ] [[package]] name = "num-traits" version = "0.2.19" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841" dependencies = [ "autocfg", ] [[package]] name = "objc2" version = "0.6.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b7c2599ce0ec54857b29ce62166b0ed9b4f6f1a70ccc9a71165b6154caca8c05" dependencies = [ "objc2-encode", ] [[package]] name = "objc2-cloud-kit" version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "73ad74d880bb43877038da939b7427bba67e9dd42004a18b809ba7d87cee241c" dependencies = [ "bitflags", "objc2", "objc2-foundation", ] [[package]] name = "objc2-core-data" version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0b402a653efbb5e82ce4df10683b6b28027616a2715e90009947d50b8dd298fa" dependencies = [ "objc2", "objc2-foundation", ] [[package]] name = "objc2-core-foundation" version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2a180dd8642fa45cdb7dd721cd4c11b1cadd4929ce112ebd8b9f5803cc79d536" dependencies = [ "bitflags", "dispatch2", "objc2", ] [[package]] name = "objc2-core-graphics" version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e022c9d066895efa1345f8e33e584b9f958da2fd4cd116792e15e07e4720a807" dependencies = [ "bitflags", "dispatch2", "objc2", "objc2-core-foundation", "objc2-io-surface", ] [[package]] name = "objc2-core-image" version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e5d563b38d2b97209f8e861173de434bd0214cf020e3423a52624cd1d989f006" dependencies = [ "objc2", "objc2-foundation", ] [[package]] name = "objc2-core-location" version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ca347214e24bc973fc025fd0d36ebb179ff30536ed1f80252706db19ee452009" dependencies = [ "objc2", "objc2-foundation", ] [[package]] name = "objc2-core-text" version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0cde0dfb48d25d2b4862161a4d5fcc0e3c24367869ad306b0c9ec0073bfed92d" dependencies = [ "bitflags", "objc2", "objc2-core-foundation", "objc2-core-graphics", ] [[package]] name = "objc2-encode" version = "4.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ef25abbcd74fb2609453eb695bd2f860d389e457f67dc17cafc8b8cbc89d0c33" [[package]] name = "objc2-foundation" version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e3e0adef53c21f888deb4fa59fc59f7eb17404926ee8a6f59f5df0fd7f9f3272" dependencies = [ "bitflags", "block2", "libc", "objc2", "objc2-core-foundation", ] [[package]] name = "objc2-io-surface" version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "180788110936d59bab6bd83b6060ffdfffb3b922ba1396b312ae795e1de9d81d" dependencies = [ "bitflags", "objc2", "objc2-core-foundation", ] [[package]] name = "objc2-quartz-core" version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "96c1358452b371bf9f104e21ec536d37a650eb10f7ee379fff67d2e08d537f1f" dependencies = [ "bitflags", "objc2", "objc2-core-foundation", "objc2-foundation", ] [[package]] name = "objc2-ui-kit" version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d87d638e33c06f577498cbcc50491496a3ed4246998a7fbba7ccb98b1e7eab22" dependencies = [ "bitflags", "block2", "objc2", "objc2-cloud-kit", "objc2-core-data", "objc2-core-foundation", "objc2-core-graphics", "objc2-core-image", "objc2-core-location", "objc2-core-text", "objc2-foundation", "objc2-quartz-core", "objc2-user-notifications", ] [[package]] name = "objc2-user-notifications" version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9df9128cbbfef73cda168416ccf7f837b62737d748333bfe9ab71c245d76613e" dependencies = [ "objc2", "objc2-foundation", ] [[package]] name = "object" version = "0.32.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a6a622008b6e321afc04970976f62ee297fdbaa6f95318ca343e3eebb9648441" dependencies = [ "memchr", ] [[package]] name = "object" version = "0.37.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ff76201f031d8863c38aa7f905eca4f53abbfa15f609db4277d44cd8938f33fe" dependencies = [ "memchr", ] [[package]] name = "once_cell" version = "1.21.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d" [[package]] name = "once_cell_polyfill" version = "1.70.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "384b8ab6d37215f3c5301a95a4accb5d64aa607f1fcb26a11b5303878451b4fe" [[package]] name = "opam-file-rs" version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4dc9fde26706c9170630772dd86981d874e9a3107cc456c811e1ee234e0c4863" dependencies = [ "lalrpop", "lalrpop-util", "thiserror 1.0.69", ] [[package]] name = "opener" version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d0812e5e4df08da354c851a3376fead46db31c2214f849d3de356d774d057681" dependencies = [ "bstr", "dbus", "normpath", "windows-sys 0.59.0", ] [[package]] name = "openssl" version = "0.10.75" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "08838db121398ad17ab8531ce9de97b244589089e290a384c900cb9ff7434328" dependencies = [ "bitflags", "cfg-if", "foreign-types", "libc", "once_cell", "openssl-macros", "openssl-sys", ] [[package]] name = "openssl-macros" version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" dependencies = [ "proc-macro2", "quote", "syn 2.0.111", ] [[package]] name = "openssl-probe" version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d05e27ee213611ffe7d6348b942e8f942b37114c00cc03cec254295a4a17852e" [[package]] name = "openssl-sys" version = "0.9.111" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "82cab2d520aa75e3c58898289429321eb788c3106963d0dc886ec7a5f4adc321" dependencies = [ "cc", "libc", "pkg-config", "vcpkg", ] [[package]] name = "ordered-float" version = "2.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "68f19d67e5a2795c94e73e0bb1cc1a7edeb2e28efd39e2e1c9b7a40c1108b11c" dependencies = [ "num-traits", ] [[package]] name = "ordered-multimap" version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "49203cdcae0030493bad186b28da2fa25645fa276a51b6fec8010d281e02ef79" dependencies = [ "dlv-list", "hashbrown 0.14.5", ] [[package]] name = "orion" version = "0.17.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "21b3da83b2b4cdc74ab6a556b2e7b473da046d5aa4008c0a7a3ae96b1b4aabb4" dependencies = [ "fiat-crypto", "subtle", "zeroize", ] [[package]] name = "os_info" version = "3.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7c39b5918402d564846d5aba164c09a66cc88d232179dfd3e3c619a25a268392" dependencies = [ "android_system_properties", "log", "nix", "objc2", "objc2-foundation", "objc2-ui-kit", "windows-sys 0.61.2", ] [[package]] name = "p384" version = "0.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fe42f1670a52a47d448f14b6a5c61dd78fce51856e68edaa38f7ae3a46b8d6b6" dependencies = [ "ecdsa", "elliptic-curve", "primeorder", "sha2", ] [[package]] name = "parking_lot" version = "0.12.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "93857453250e3077bd71ff98b6a65ea6621a19bb0f559a85248955ac12c45a1a" dependencies = [ "lock_api", "parking_lot_core", ] [[package]] name = "parking_lot_core" version = "0.9.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2621685985a2ebf1c516881c026032ac7deafcda1a2c9b7850dc81e3dfcb64c1" dependencies = [ "cfg-if", "libc", "redox_syscall", "smallvec", "windows-link", ] [[package]] name = "pasetors" version = "0.7.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "03e1ed71dcdf863d9f66d9de86de714db38aedc2fcabc1a60207d1fde603e2d5" dependencies = [ "ct-codecs", "ed25519-compact", "getrandom 0.3.4", "orion", "p384", "rand_core", "regex", "serde", "serde_derive", "serde_json", "sha2", "subtle", "time", "zeroize", ] [[package]] name = "patchkit" version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f21e87e02a475262c3166d32fea34710510448b37117cc448c1be03975816baf" dependencies = [ "chrono", "lazy-regex", "lazy_static", "once_cell", "proc-macro2", "regex", "rowan", ] [[package]] name = "pathdiff" version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "df94ce210e5bc13cb6651479fa48d14f601d9858cfe0467f43ae157023b938d3" [[package]] name = "pem-rfc7468" version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "88b39c9bfcfc231068454382784bb460aae594343fb030d46e9f50a645418412" dependencies = [ "base64ct", ] [[package]] name = "pep440_rs" version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "31095ca1f396e3de32745f42b20deef7bc09077f918b085307e8eab6ddd8fb9c" dependencies = [ "once_cell", "serde", "unicode-width", "unscanny", "version-ranges", ] [[package]] name = "pep508_rs" version = "0.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "faee7227064121fcadcd2ff788ea26f0d8f2bd23a0574da11eca23bc935bcc05" dependencies = [ "boxcar", "indexmap", "itertools 0.13.0", "once_cell", "pep440_rs", "regex", "rustc-hash 2.1.1", "serde", "smallvec", "thiserror 1.0.69", "unicode-width", "url", "urlencoding", "version-ranges", ] [[package]] name = "percent-encoding" version = "2.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9b4f627cb1b25917193a259e49bdad08f671f8d9708acfd5fe0a8c1455d87220" [[package]] name = "pest" version = "2.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cbcfd20a6d4eeba40179f05735784ad32bdaef05ce8e8af05f180d45bb3e7e22" dependencies = [ "memchr", "ucd-trie", ] [[package]] name = "pest_derive" version = "2.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "51f72981ade67b1ca6adc26ec221be9f463f2b5839c7508998daa17c23d94d7f" dependencies = [ "pest", "pest_generator", ] [[package]] name = "pest_generator" version = "2.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dee9efd8cdb50d719a80088b76f81aec7c41ed6d522ee750178f83883d271625" dependencies = [ "pest", "pest_meta", "proc-macro2", "quote", "syn 2.0.111", ] [[package]] name = "pest_meta" version = "2.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bf1d70880e76bdc13ba52eafa6239ce793d85c8e43896507e43dd8984ff05b82" dependencies = [ "pest", "sha2", ] [[package]] name = "petgraph" version = "0.6.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b4c5cc86750666a3ed20bdaf5ca2a0344f9c67674cae0515bec2da16fbaa47db" dependencies = [ "fixedbitset", "indexmap", ] [[package]] name = "phf" version = "0.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fabbf1ead8a5bcbc20f5f8b939ee3f5b0f6f281b6ad3468b84656b658b455259" dependencies = [ "phf_shared 0.10.0", ] [[package]] name = "phf" version = "0.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c1562dc717473dbaa4c1f85a36410e03c047b2e7df7f45ee938fbef64ae7fadf" dependencies = [ "phf_shared 0.13.1", "serde", ] [[package]] name = "phf_codegen" version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4fb1c3a8bc4dd4e5cfce29b44ffc14bedd2ee294559a294e2a4d4c9e9a6a13cd" dependencies = [ "phf_generator 0.10.0", "phf_shared 0.10.0", ] [[package]] name = "phf_codegen" version = "0.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "49aa7f9d80421bca176ca8dbfebe668cc7a2684708594ec9f3c0db0805d5d6e1" dependencies = [ "phf_generator 0.13.1", "phf_shared 0.13.1", ] [[package]] name = "phf_generator" version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5d5285893bb5eb82e6aaf5d59ee909a06a16737a8970984dd7746ba9283498d6" dependencies = [ "phf_shared 0.10.0", "rand", ] [[package]] name = "phf_generator" version = "0.11.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3c80231409c20246a13fddb31776fb942c38553c51e871f8cbd687a4cfb5843d" dependencies = [ "phf_shared 0.11.3", "rand", ] [[package]] name = "phf_generator" version = "0.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "135ace3a761e564ec88c03a77317a7c6b80bb7f7135ef2544dbe054243b89737" dependencies = [ "fastrand", "phf_shared 0.13.1", ] [[package]] name = "phf_shared" version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b6796ad771acdc0123d2a88dc428b5e38ef24456743ddb1744ed628f9815c096" dependencies = [ "siphasher 0.3.11", ] [[package]] name = "phf_shared" version = "0.11.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "67eabc2ef2a60eb7faa00097bd1ffdb5bd28e62bf39990626a582201b7a754e5" dependencies = [ "siphasher 1.0.1", ] [[package]] name = "phf_shared" version = "0.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e57fef6bc5981e38c2ce2d63bfa546861309f875b8a75f092d1d54ae2d64f266" dependencies = [ "siphasher 1.0.1", ] [[package]] name = "pin-project-lite" version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3b3cff922bd51709b605d9ead9aa71031d81447142d828eb4a6eba76fe619f9b" [[package]] name = "pin-utils" version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" [[package]] name = "pkcs8" version = "0.10.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f950b2377845cebe5cf8b5165cb3cc1a5e0fa5cfa3e1f7f55707d8fd82e0a7b7" dependencies = [ "der", "spki", ] [[package]] name = "pkg-config" version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7edddbd0b52d732b21ad9a5fab5c704c14cd949e5e9a1ec5929a24fded1b904c" [[package]] name = "portable-atomic" version = "1.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f84267b20a16ea918e43c6a88433c2d54fa145c92a811b5b047ccbe153674483" [[package]] name = "portable-atomic-util" version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d8a2f0d8d040d7848a709caf78912debcc3f33ee4b3cac47d73d1e1069e83507" dependencies = [ "portable-atomic", ] [[package]] name = "potential_utf" version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b73949432f5e2a09657003c25bca5e19a0e9c84f8058ca374f49e0ebe605af77" dependencies = [ "zerovec", ] [[package]] name = "powerfmt" version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391" [[package]] name = "ppv-lite86" version = "0.2.21" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "85eae3c4ed2f50dcfe72643da4befc30deadb458a9b590d720cde2f2b1e97da9" dependencies = [ "zerocopy", ] [[package]] name = "precomputed-hash" version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "925383efa346730478fb4838dbe9137d2a47675ad789c546d150a6e1dd4ab31c" [[package]] name = "pretty_assertions" version = "1.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3ae130e2f271fbc2ac3a40fb1d07180839cdbbe443c7a27e1e3c13c5cac0116d" dependencies = [ "diff", "yansi", ] [[package]] name = "primeorder" version = "0.13.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "353e1ca18966c16d9deb1c69278edbc5f194139612772bd9537af60ac231e1e6" dependencies = [ "elliptic-curve", ] [[package]] name = "proc-macro2" version = "1.0.103" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5ee95bc4ef87b8d5ba32e8b7714ccc834865276eab0aed5c9958d00ec45f49e8" dependencies = [ "unicode-ident", ] [[package]] name = "prodash" version = "29.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f04bb108f648884c23b98a0e940ebc2c93c0c3b89f04dbaf7eb8256ce617d1bc" dependencies = [ "log", "parking_lot", ] [[package]] name = "prodash" version = "30.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5a6efc566849d3d9d737c5cb06cc50e48950ebe3d3f9d70631490fff3a07b139" dependencies = [ "parking_lot", ] [[package]] name = "psm" version = "0.1.28" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d11f2fedc3b7dafdc2851bc52f277377c5473d378859be234bc7ebb593144d01" dependencies = [ "ar_archive_writer", "cc", ] [[package]] name = "pulldown-cmark" version = "0.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1e8bbe1a966bd2f362681a44f6edce3c2310ac21e4d5067a6e7ec396297a6ea0" dependencies = [ "bitflags", "getopts", "memchr", "pulldown-cmark-escape", "unicase", ] [[package]] name = "pulldown-cmark-escape" version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "007d8adb5ddab6f8e3f491ac63566a7d5002cc7ed73901f72057943fa71ae1ae" [[package]] name = "pyo3" version = "0.27.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ab53c047fcd1a1d2a8820fe84f05d6be69e9526be40cb03b73f86b6b03e6d87d" dependencies = [ "chrono", "indoc", "libc", "memoffset", "once_cell", "portable-atomic", "pyo3-build-config", "pyo3-ffi", "pyo3-macros", "serde", "unindent", ] [[package]] name = "pyo3-build-config" version = "0.27.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b455933107de8642b4487ed26d912c2d899dec6114884214a0b3bb3be9261ea6" dependencies = [ "target-lexicon", ] [[package]] name = "pyo3-ffi" version = "0.27.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1c85c9cbfaddf651b1221594209aed57e9e5cff63c4d11d1feead529b872a089" dependencies = [ "libc", "pyo3-build-config", ] [[package]] name = "pyo3-filelike" version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "57429f455b9811f2a8af73d8bae91e028fbf6f62ad4011073c2248bb028a2288" dependencies = [ "pyo3", ] [[package]] name = "pyo3-macros" version = "0.27.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0a5b10c9bf9888125d917fb4d2ca2d25c8df94c7ab5a52e13313a07e050a3b02" dependencies = [ "proc-macro2", "pyo3-macros-backend", "quote", "syn 2.0.111", ] [[package]] name = "pyo3-macros-backend" version = "0.27.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "03b51720d314836e53327f5871d4c0cfb4fb37cc2c4a11cc71907a86342c40f9" dependencies = [ "heck", "proc-macro2", "pyo3-build-config", "quote", "syn 2.0.111", ] [[package]] name = "pyproject-toml" version = "0.13.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f6d755483ad14b49e76713b52285235461a5b4f73f17612353e11a5de36a5fd2" dependencies = [ "indexmap", "pep440_rs", "pep508_rs", "serde", "thiserror 2.0.17", "toml 0.9.8", ] [[package]] name = "python-pkginfo" version = "0.6.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "464e5e2e0fb6c8c2c7aedc0cd6615258a3def4e34b417f6bf8835e76e7d441d4" dependencies = [ "flate2", "fs-err", "mailparse", "rfc2047-decoder", "tar", "thiserror 2.0.17", "zip", ] [[package]] name = "quote" version = "1.0.42" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a338cc41d27e6cc6dce6cefc13a0729dfbb81c262b1f519331575dd80ef3067f" dependencies = [ "proc-macro2", ] [[package]] name = "quoted_printable" version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "640c9bd8497b02465aeef5375144c26062e0dcd5939dfcbb0f5db76cb8c17c73" [[package]] name = "r-description" version = "0.3.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f97b1f65f8b0a2687d665c2d16e866f69c5c2f0486ccf905b776badacb3ebb53" dependencies = [ "deb822-derive", "deb822-fast", "deb822-lossless", "rowan", "url", ] [[package]] name = "r-efi" version = "5.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f" [[package]] name = "rand" version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" dependencies = [ "libc", "rand_chacha", "rand_core", ] [[package]] name = "rand_chacha" version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" dependencies = [ "ppv-lite86", "rand_core", ] [[package]] name = "rand_core" version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" dependencies = [ "getrandom 0.2.16", ] [[package]] name = "rand_xoshiro" version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6f97cdb2a36ed4183de61b2f824cc45c9f1037f28afe0a322e9fff4c108b5aaa" dependencies = [ "rand_core", ] [[package]] name = "redox_syscall" version = "0.5.18" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ed2bf2547551a7053d6fdfafda3f938979645c44812fbfcda098faae3f1a362d" dependencies = [ "bitflags", ] [[package]] name = "redox_users" version = "0.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ba009ff324d1fc1b900bd1fdb31564febe58a8ccc8a6fdbb93b543d33b13ca43" dependencies = [ "getrandom 0.2.16", "libredox", "thiserror 1.0.69", ] [[package]] name = "regex" version = "1.12.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "843bc0191f75f3e22651ae5f1e72939ab2f72a4bc30fa80a066bd66edefc24d4" dependencies = [ "aho-corasick", "memchr", "regex-automata", "regex-syntax 0.8.8", ] [[package]] name = "regex-automata" version = "0.4.13" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5276caf25ac86c8d810222b3dbb938e512c55c6831a10f3e6ed1c93b84041f1c" dependencies = [ "aho-corasick", "memchr", "regex-syntax 0.8.8", ] [[package]] name = "regex-syntax" version = "0.6.29" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1" [[package]] name = "regex-syntax" version = "0.8.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7a2d987857b319362043e95f5353c0535c1f58eec5336fdfcf626430af7def58" [[package]] name = "reqwest" version = "0.12.24" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9d0946410b9f7b082a427e4ef5c8ff541a88b357bc6c637c40db3a68ac70a36f" dependencies = [ "base64", "bytes", "encoding_rs", "futures-channel", "futures-core", "futures-util", "h2", "http", "http-body", "http-body-util", "hyper", "hyper-rustls", "hyper-tls", "hyper-util", "js-sys", "log", "mime", "native-tls", "percent-encoding", "pin-project-lite", "rustls-pki-types", "serde", "serde_json", "serde_urlencoded", "sync_wrapper", "tokio", "tokio-native-tls", "tower", "tower-http", "tower-service", "url", "wasm-bindgen", "wasm-bindgen-futures", "web-sys", ] [[package]] name = "rfc2047-decoder" version = "1.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bc36545d1021456a751b573517cb52e8c339b2f662e6b2778ef629282678de29" dependencies = [ "base64", "charset", "chumsky", "memchr", "quoted_printable", "thiserror 2.0.17", ] [[package]] name = "rfc6979" version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f8dd2a808d456c4a54e300a23e9f5a67e122c3024119acbfd73e3bf664491cb2" dependencies = [ "hmac", "subtle", ] [[package]] name = "ring" version = "0.17.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a4689e6c2294d81e88dc6261c768b63bc4fcdb852be6d1352498b114f61383b7" dependencies = [ "cc", "cfg-if", "getrandom 0.2.16", "libc", "untrusted", "windows-sys 0.52.0", ] [[package]] name = "roff" version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "88f8660c1ff60292143c98d08fc6e2f654d722db50410e3f3797d40baaf9d8f3" [[package]] name = "rowan" version = "0.16.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "417a3a9f582e349834051b8a10c8d71ca88da4211e4093528e36b9845f6b5f21" dependencies = [ "countme", "hashbrown 0.14.5", "rustc-hash 1.1.0", "text-size", ] [[package]] name = "rst_parser" version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6f3029872a42c0be67d86e3e88bf8c1e73d1da3d714da00b9c29f60a4605bfb1" dependencies = [ "anyhow", "document_tree", "pest", "pest_derive", ] [[package]] name = "rst_renderer" version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cf982408766e5055367c60382b78dcee50c83b2b731e036a8b510e0aedf1efa1" dependencies = [ "anyhow", "document_tree", "serde-xml-rs", "serde_json", ] [[package]] name = "rusqlite" version = "0.32.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7753b721174eb8ff87a9a0e799e2d7bc3749323e773db92e0984debb00019d6e" dependencies = [ "bitflags", "fallible-iterator", "fallible-streaming-iterator", "hashlink", "libsqlite3-sys", "smallvec", ] [[package]] name = "rust-ini" version = "0.21.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "796e8d2b6696392a43bea58116b667fb4c29727dc5abd27d6acf338bb4f688c7" dependencies = [ "cfg-if", "ordered-multimap", ] [[package]] name = "rustc-demangle" version = "0.1.26" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "56f7d92ca342cea22a06f2121d944b4fd82af56988c270852495420f961d4ace" [[package]] name = "rustc-hash" version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" [[package]] name = "rustc-hash" version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "357703d41365b4b27c590e3ed91eabb1b663f07c4c084095e60cbed4362dff0d" [[package]] name = "rustc-stable-hash" version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "781442f29170c5c93b7185ad559492601acdc71d5bb0706f5868094f45cfcd08" [[package]] name = "rustfix" version = "0.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "267bf52289c9e66a8f140f1c8109c1324f5f39248b8af5997bd0d78ec8d6ffd2" dependencies = [ "serde", "serde_json", "thiserror 2.0.17", "tracing", ] [[package]] name = "rustix" version = "0.38.44" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fdb5bc1ae2baa591800df16c9ca78619bf65c0488b41b96ccec5d11220d8c154" dependencies = [ "bitflags", "errno", "libc", "linux-raw-sys 0.4.15", "windows-sys 0.59.0", ] [[package]] name = "rustix" version = "1.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cd15f8a2c5551a84d56efdc1cd049089e409ac19a3072d5037a17fd70719ff3e" dependencies = [ "bitflags", "errno", "libc", "linux-raw-sys 0.11.0", "windows-sys 0.61.2", ] [[package]] name = "rustls" version = "0.23.35" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "533f54bc6a7d4f647e46ad909549eda97bf5afc1585190ef692b4286b198bd8f" dependencies = [ "once_cell", "rustls-pki-types", "rustls-webpki", "subtle", "zeroize", ] [[package]] name = "rustls-pki-types" version = "1.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "708c0f9d5f54ba0272468c1d306a52c495b31fa155e91bc25371e6df7996908c" dependencies = [ "zeroize", ] [[package]] name = "rustls-webpki" version = "0.103.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2ffdfa2f5286e2247234e03f680868ac2815974dc39e00ea15adc445d0aafe52" dependencies = [ "ring", "rustls-pki-types", "untrusted", ] [[package]] name = "rustversion" version = "1.0.22" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b39cdef0fa800fc44525c84ccb54a029961a8215f9619753635a9c0d2538d46d" [[package]] name = "ryu" version = "1.0.20" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f" [[package]] name = "same-file" version = "1.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502" dependencies = [ "winapi-util", ] [[package]] name = "schannel" version = "0.1.28" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "891d81b926048e76efe18581bf793546b4c0eaf8448d72be8de2bbee5fd166e1" dependencies = [ "windows-sys 0.61.2", ] [[package]] name = "scopeguard" version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" [[package]] name = "sec1" version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d3e97a565f76233a6003f9f5c54be1d9c5bdfa3eccfb189469f11ec4901c47dc" dependencies = [ "base16ct", "der", "generic-array", "pkcs8", "subtle", "zeroize", ] [[package]] name = "security-framework" version = "2.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "897b2245f0b511c87893af39b033e5ca9cce68824c4d7e7630b5a1d339658d02" dependencies = [ "bitflags", "core-foundation 0.9.4", "core-foundation-sys", "libc", "security-framework-sys", ] [[package]] name = "security-framework" version = "3.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b3297343eaf830f66ede390ea39da1d462b6b0c1b000f420d0a83f898bbbe6ef" dependencies = [ "bitflags", "core-foundation 0.10.1", "core-foundation-sys", "libc", "security-framework-sys", ] [[package]] name = "security-framework-sys" version = "2.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cc1f0cbffaac4852523ce30d8bd3c5cdc873501d96ff467ca09b6767bb8cd5c0" dependencies = [ "core-foundation-sys", "libc", ] [[package]] name = "select" version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5910c1d91bd7e6e178c0f8eb9e4ad01f814064b4a1c0ae3c906224a3cbf12879" dependencies = [ "bit-set 0.5.3", "html5ever 0.26.0", "markup5ever_rcdom", ] [[package]] name = "semver" version = "1.0.27" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d767eb0aabc880b29956c35734170f26ed551a859dbd361d140cdbeca61ab1e2" dependencies = [ "serde", "serde_core", ] [[package]] name = "serde" version = "1.0.228" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9a8e94ea7f378bd32cbbd37198a4a91436180c5bb472411e48b5ec2e2124ae9e" dependencies = [ "serde_core", "serde_derive", ] [[package]] name = "serde-untagged" version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f9faf48a4a2d2693be24c6289dbe26552776eb7737074e6722891fadbe6c5058" dependencies = [ "erased-serde", "serde", "serde_core", "typeid", ] [[package]] name = "serde-value" version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f3a1a3341211875ef120e117ea7fd5228530ae7e7036a779fdc9117be6b3282c" dependencies = [ "ordered-float", "serde", ] [[package]] name = "serde-xml-rs" version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "65162e9059be2f6a3421ebbb4fef3e74b7d9e7c60c50a0e292c6239f19f1edfa" dependencies = [ "log", "serde", "thiserror 1.0.69", "xml-rs", ] [[package]] name = "serde_core" version = "1.0.228" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "41d385c7d4ca58e59fc732af25c3983b67ac852c1a25000afe1175de458b67ad" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" version = "1.0.228" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79" dependencies = [ "proc-macro2", "quote", "syn 2.0.111", ] [[package]] name = "serde_ignored" version = "0.1.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "115dffd5f3853e06e746965a20dcbae6ee747ae30b543d91b0e089668bb07798" dependencies = [ "serde", "serde_core", ] [[package]] name = "serde_json" version = "1.0.145" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "402a6f66d8c709116cf22f558eab210f5a50187f702eb4d7e5ef38d9a7f1c79c" dependencies = [ "itoa", "memchr", "ryu", "serde", "serde_core", ] [[package]] name = "serde_spanned" version = "0.6.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bf41e0cfaf7226dca15e8197172c295a782857fcb97fad1808a166870dee75a3" dependencies = [ "serde", ] [[package]] name = "serde_spanned" version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e24345aa0fe688594e73770a5f6d1b216508b4f93484c0026d521acd30134392" dependencies = [ "serde_core", ] [[package]] name = "serde_urlencoded" version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd" dependencies = [ "form_urlencoded", "itoa", "ryu", "serde", ] [[package]] name = "serde_yaml" version = "0.9.34+deprecated" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6a8b1a1a2ebf674015cc02edccce75287f1a0130d394307b36743c2f5d504b47" dependencies = [ "indexmap", "itoa", "ryu", "serde", "unsafe-libyaml", ] [[package]] name = "sha1" version = "0.10.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba" dependencies = [ "cfg-if", "cpufeatures", "digest", ] [[package]] name = "sha1-checked" version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "89f599ac0c323ebb1c6082821a54962b839832b03984598375bff3975b804423" dependencies = [ "digest", "sha1", ] [[package]] name = "sha1_smol" version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bbfa15b3dddfee50a0fff136974b3e1bde555604ba463834a7eb7deb6417705d" [[package]] name = "sha2" version = "0.10.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a7507d819769d01a365ab707794a4084392c824f54a7a6a7862f8c3d0892b283" dependencies = [ "cfg-if", "cpufeatures", "digest", ] [[package]] name = "sharded-slab" version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f40ca3c46823713e0d4209592e8d6e826aa57e928f09752619fc696c499637f6" dependencies = [ "lazy_static", ] [[package]] name = "shell-escape" version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "45bb67a18fa91266cc7807181f62f9178a6873bfad7dc788c42e6430db40184f" [[package]] name = "shell-words" version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "24188a676b6ae68c3b2cb3a01be17fbf7240ce009799bb56d5b1409051e78fde" [[package]] name = "shlex" version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" [[package]] name = "signal-hook-registry" version = "1.4.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7664a098b8e616bdfcc2dc0e9ac44eb231eedf41db4e9fe95d8d32ec728dedad" dependencies = [ "libc", ] [[package]] name = "signature" version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "77549399552de45a898a580c1b41d445bf730df867cc44e6c0233bbc4b8329de" dependencies = [ "digest", "rand_core", ] [[package]] name = "simd-adler32" version = "0.3.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d66dc143e6b11c1eddc06d5c423cfc97062865baf299914ab64caa38182078fe" [[package]] name = "siphasher" version = "0.3.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "38b58827f4464d87d377d175e90bf58eb00fd8716ff0a62f80356b5e61555d0d" [[package]] name = "siphasher" version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "56199f7ddabf13fe5074ce809e7d3f42b42ae711800501b5b16ea82ad029c39d" [[package]] name = "sized-chunks" version = "0.6.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "16d69225bde7a69b235da73377861095455d298f2b970996eec25ddbb42b3d1e" dependencies = [ "bitmaps", "typenum", ] [[package]] name = "slab" version = "0.4.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7a2ae44ef20feb57a68b23d846850f861394c2e02dc425a50098ae8c90267589" [[package]] name = "smallvec" version = "1.15.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03" [[package]] name = "smawk" version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b7c388c1b5e93756d0c740965c41e8822f866621d41acbdf6336a6a168f8840c" [[package]] name = "socket2" version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "17129e116933cf371d018bb80ae557e889637989d8638274fb25622827b03881" dependencies = [ "libc", "windows-sys 0.60.2", ] [[package]] name = "spki" version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d91ed6c858b01f942cd56b37a94b3e0a1798290327d1236e4d9cf4eaca44d29d" dependencies = [ "base64ct", "der", ] [[package]] name = "stable_deref_trait" version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6ce2be8dc25455e1f91df71bfa12ad37d7af1092ae736f3a6cd0e37bc7810596" [[package]] name = "stacker" version = "0.1.22" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e1f8b29fb42aafcea4edeeb6b2f2d7ecd0d969c48b4cf0d2e64aafc471dd6e59" dependencies = [ "cc", "cfg-if", "libc", "psm", "windows-sys 0.59.0", ] [[package]] name = "static_assertions" version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" [[package]] name = "string_cache" version = "0.8.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bf776ba3fa74f83bf4b63c3dcbbf82173db2632ed8452cb2d891d33f459de70f" dependencies = [ "new_debug_unreachable", "parking_lot", "phf_shared 0.11.3", "precomputed-hash", "serde", ] [[package]] name = "string_cache" version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a18596f8c785a729f2819c0f6a7eae6ebeebdfffbfe4214ae6b087f690e31901" dependencies = [ "new_debug_unreachable", "parking_lot", "phf_shared 0.13.1", "precomputed-hash", "serde", ] [[package]] name = "string_cache_codegen" version = "0.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c711928715f1fe0fe509c53b43e993a9a557babc2d0a3567d0a3006f1ac931a0" dependencies = [ "phf_generator 0.11.3", "phf_shared 0.11.3", "proc-macro2", "quote", ] [[package]] name = "string_cache_codegen" version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "585635e46db231059f76c5849798146164652513eb9e8ab2685939dd90f29b69" dependencies = [ "phf_generator 0.13.1", "phf_shared 0.13.1", "proc-macro2", "quote", ] [[package]] name = "strsim" version = "0.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" [[package]] name = "subtle" version = "2.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" [[package]] name = "supports-hyperlinks" version = "3.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "804f44ed3c63152de6a9f90acbea1a110441de43006ea51bcce8f436196a288b" [[package]] name = "supports-unicode" version = "3.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b7401a30af6cb5818bb64852270bb722533397edcfc7344954a38f420819ece2" [[package]] name = "syn" version = "1.0.109" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" dependencies = [ "proc-macro2", "quote", "unicode-ident", ] [[package]] name = "syn" version = "2.0.111" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "390cc9a294ab71bdb1aa2e99d13be9c753cd2d7bd6560c77118597410c4d2e87" dependencies = [ "proc-macro2", "quote", "unicode-ident", ] [[package]] name = "sync_wrapper" version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0bf256ce5efdfa370213c1dabab5935a12e49f2c58d15e9eac2870d3b4f27263" dependencies = [ "futures-core", ] [[package]] name = "synstructure" version = "0.12.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f36bdaa60a83aca3921b5259d5400cbf5e90fc51931376a9bd4a0eb79aa7210f" dependencies = [ "proc-macro2", "quote", "syn 1.0.109", "unicode-xid", ] [[package]] name = "synstructure" version = "0.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "728a70f3dbaf5bab7f0c4b1ac8d7ae5ea60a4b5549c8a5914361c99147a709d2" dependencies = [ "proc-macro2", "quote", "syn 2.0.111", ] [[package]] name = "system-configuration" version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3c879d448e9d986b661742763247d3693ed13609438cf3d006f51f5368a5ba6b" dependencies = [ "bitflags", "core-foundation 0.9.4", "system-configuration-sys", ] [[package]] name = "system-configuration-sys" version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8e1d1b10ced5ca923a1fcb8d03e96b8d3268065d724548c0211415ff6ac6bac4" dependencies = [ "core-foundation-sys", "libc", ] [[package]] name = "tar" version = "0.4.44" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1d863878d212c87a19c1a610eb53bb01fe12951c0501cf5a0d65f724914a667a" dependencies = [ "filetime", "libc", "xattr", ] [[package]] name = "target-lexicon" version = "0.13.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "df7f62577c25e07834649fc3b39fafdc597c0a3527dc1c60129201ccfcbaa50c" [[package]] name = "tempfile" version = "3.23.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2d31c77bdf42a745371d260a26ca7163f1e0924b64afa0b688e61b5a9fa02f16" dependencies = [ "fastrand", "getrandom 0.3.4", "once_cell", "rustix 1.1.2", "windows-sys 0.61.2", ] [[package]] name = "tendril" version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d24a120c5fc464a3458240ee02c299ebcb9d67b5249c8848b09d639dca8d7bb0" dependencies = [ "futf", "mac", "utf-8", ] [[package]] name = "term" version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c59df8ac95d96ff9bede18eb7300b0fda5e5d8d90960e76f8e14ae765eedbf1f" dependencies = [ "dirs-next", "rustversion", "winapi", ] [[package]] name = "terminal_size" version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "60b8cb979cb11c32ce1603f8137b22262a9d131aaa5c37b5678025f22b8becd0" dependencies = [ "rustix 1.1.2", "windows-sys 0.60.2", ] [[package]] name = "text-size" version = "1.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f18aa187839b2bdb1ad2fa35ead8c4c2976b64e4363c386d45ac0f7ee85c9233" [[package]] name = "textwrap" version = "0.16.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c13547615a44dc9c452a8a534638acdf07120d4b6847c8178705da06306a3057" dependencies = [ "smawk", "unicode-linebreak", "unicode-width", ] [[package]] name = "thiserror" version = "1.0.69" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52" dependencies = [ "thiserror-impl 1.0.69", ] [[package]] name = "thiserror" version = "2.0.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f63587ca0f12b72a0600bcba1d40081f830876000bb46dd2337a3051618f4fc8" dependencies = [ "thiserror-impl 2.0.17", ] [[package]] name = "thiserror-impl" version = "1.0.69" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" dependencies = [ "proc-macro2", "quote", "syn 2.0.111", ] [[package]] name = "thiserror-impl" version = "2.0.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3ff15c8ecd7de3849db632e14d18d2571fa09dfc5ed93479bc4485c7a517c913" dependencies = [ "proc-macro2", "quote", "syn 2.0.111", ] [[package]] name = "thread_local" version = "1.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f60246a4944f24f6e018aa17cdeffb7818b76356965d03b07d6a9886e8962185" dependencies = [ "cfg-if", ] [[package]] name = "time" version = "0.3.44" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "91e7d9e3bb61134e77bde20dd4825b97c010155709965fedf0f49bb138e52a9d" dependencies = [ "deranged", "itoa", "num-conv", "powerfmt", "serde", "time-core", "time-macros", ] [[package]] name = "time-core" version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "40868e7c1d2f0b8d73e4a8c7f0ff63af4f6d19be117e90bd73eb1d62cf831c6b" [[package]] name = "time-macros" version = "0.2.24" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "30cfb0125f12d9c277f35663a0a33f8c30190f4e4574868a330595412d34ebf3" dependencies = [ "num-conv", "time-core", ] [[package]] name = "tiny-keccak" version = "2.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2c9d3793400a45f954c52e73d068316d76b6f4e36977e3fcebb13a2721e80237" dependencies = [ "crunchy", ] [[package]] name = "tinystr" version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "42d3e9c45c09de15d06dd8acf5f4e0e399e85927b7f00711024eb7ae10fa4869" dependencies = [ "displaydoc", "zerovec", ] [[package]] name = "tinyvec" version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bfa5fdc3bce6191a1dbc8c02d5c8bffcf557bafa17c124c5264a458f1b0613fa" dependencies = [ "tinyvec_macros", ] [[package]] name = "tinyvec_macros" version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tokio" version = "1.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ff360e02eab121e0bc37a2d3b4d4dc622e6eda3a8e5253d5435ecf5bd4c68408" dependencies = [ "bytes", "libc", "mio", "parking_lot", "pin-project-lite", "signal-hook-registry", "socket2", "tokio-macros", "windows-sys 0.61.2", ] [[package]] name = "tokio-macros" version = "2.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "af407857209536a95c8e56f8231ef2c2e2aff839b22e07a1ffcbc617e9db9fa5" dependencies = [ "proc-macro2", "quote", "syn 2.0.111", ] [[package]] name = "tokio-native-tls" version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bbae76ab933c85776efabc971569dd6119c580d8f5d448769dec1764bf796ef2" dependencies = [ "native-tls", "tokio", ] [[package]] name = "tokio-rustls" version = "0.26.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1729aa945f29d91ba541258c8df89027d5792d85a8841fb65e8bf0f4ede4ef61" dependencies = [ "rustls", "tokio", ] [[package]] name = "tokio-util" version = "0.7.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2efa149fe76073d6e8fd97ef4f4eca7b67f599660115591483572e406e165594" dependencies = [ "bytes", "futures-core", "futures-sink", "pin-project-lite", "tokio", ] [[package]] name = "toml" version = "0.8.23" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dc1beb996b9d83529a9e75c17a1686767d148d70663143c7854d8b4a09ced362" dependencies = [ "serde", "serde_spanned 0.6.9", "toml_datetime 0.6.11", "toml_edit", ] [[package]] name = "toml" version = "0.9.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f0dc8b1fb61449e27716ec0e1bdf0f6b8f3e8f6b05391e8497b8b6d7804ea6d8" dependencies = [ "indexmap", "serde_core", "serde_spanned 1.0.3", "toml_datetime 0.7.3", "toml_parser", "toml_writer", "winnow 0.7.14", ] [[package]] name = "toml_datetime" version = "0.6.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "22cddaf88f4fbc13c51aebbf5f8eceb5c7c5a9da2ac40a13519eb5b0a0e8f11c" dependencies = [ "serde", ] [[package]] name = "toml_datetime" version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f2cdb639ebbc97961c51720f858597f7f24c4fc295327923af55b74c3c724533" dependencies = [ "serde_core", ] [[package]] name = "toml_edit" version = "0.22.27" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "41fe8c660ae4257887cf66394862d21dbca4a6ddd26f04a3560410406a2f819a" dependencies = [ "indexmap", "serde", "serde_spanned 0.6.9", "toml_datetime 0.6.11", "toml_write", "winnow 0.7.14", ] [[package]] name = "toml_parser" version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c0cbe268d35bdb4bb5a56a2de88d0ad0eb70af5384a99d648cd4b3d04039800e" dependencies = [ "winnow 0.7.14", ] [[package]] name = "toml_write" version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5d99f8c9a7727884afe522e9bd5edbfc91a3312b36a77b5fb8926e4c31a41801" [[package]] name = "toml_writer" version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "df8b2b54733674ad286d16267dcfc7a71ed5c776e4ac7aa3c3e2561f7c637bf2" [[package]] name = "tower" version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d039ad9159c98b70ecfd540b2573b97f7f52c3e8d9f8ad57a24b916a536975f9" dependencies = [ "futures-core", "futures-util", "pin-project-lite", "sync_wrapper", "tokio", "tower-layer", "tower-service", ] [[package]] name = "tower-http" version = "0.6.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9cf146f99d442e8e68e585f5d798ccd3cad9a7835b917e09728880a862706456" dependencies = [ "bitflags", "bytes", "futures-util", "http", "http-body", "iri-string", "pin-project-lite", "tower", "tower-layer", "tower-service", ] [[package]] name = "tower-layer" version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "121c2a6cda46980bb0fcd1647ffaf6cd3fc79a013de288782836f6df9c48780e" [[package]] name = "tower-service" version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3" [[package]] name = "tracing" version = "0.1.43" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2d15d90a0b5c19378952d479dc858407149d7bb45a14de0142f6c534b16fc647" dependencies = [ "pin-project-lite", "tracing-attributes", "tracing-core", ] [[package]] name = "tracing-attributes" version = "0.1.31" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7490cfa5ec963746568740651ac6781f701c9c5ea257c58e057f3ba8cf69e8da" dependencies = [ "proc-macro2", "quote", "syn 2.0.111", ] [[package]] name = "tracing-chrome" version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bf0a738ed5d6450a9fb96e86a23ad808de2b727fd1394585da5cdd6788ffe724" dependencies = [ "serde_json", "tracing-core", "tracing-subscriber", ] [[package]] name = "tracing-core" version = "0.1.35" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7a04e24fab5c89c6a36eb8558c9656f30d81de51dfa4d3b45f26b21d61fa0a6c" dependencies = [ "once_cell", "valuable", ] [[package]] name = "tracing-log" version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ee855f1f400bd0e5c02d150ae5de3840039a3f54b025156404e34c23c03f47c3" dependencies = [ "log", "once_cell", "tracing-core", ] [[package]] name = "tracing-subscriber" version = "0.3.22" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2f30143827ddab0d256fd843b7a66d164e9f271cfa0dde49142c5ca0ca291f1e" dependencies = [ "matchers", "nu-ansi-term", "once_cell", "regex-automata", "sharded-slab", "smallvec", "thread_local", "tracing", "tracing-core", "tracing-log", ] [[package]] name = "try-lock" version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" [[package]] name = "typeid" version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bc7d623258602320d5c55d1bc22793b57daff0ec7efc270ea7d55ce1d5f5471c" [[package]] name = "typenum" version = "1.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "562d481066bde0658276a35467c4af00bdc6ee726305698a55b86e61d7ad82bb" [[package]] name = "ucd-trie" version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2896d95c02a80c6d6a5d6e953d479f5ddf2dfdb6a244441010e373ac0fb88971" [[package]] name = "unicase" version = "2.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "75b844d17643ee918803943289730bec8aac480150456169e647ed0b576ba539" [[package]] name = "unicode-bom" version = "2.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7eec5d1121208364f6793f7d2e222bf75a915c19557537745b195b253dd64217" [[package]] name = "unicode-ident" version = "1.0.22" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9312f7c4f6ff9069b165498234ce8be658059c6728633667c526e27dc2cf1df5" [[package]] name = "unicode-linebreak" version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3b09c83c3c29d37506a3e260c08c03743a6bb66a9cd432c6934ab501a190571f" [[package]] name = "unicode-normalization" version = "0.1.25" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5fd4f6878c9cb28d874b009da9e8d183b5abc80117c40bbd187a1fde336be6e8" dependencies = [ "tinyvec", ] [[package]] name = "unicode-width" version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b4ac048d71ede7ee76d585517add45da530660ef4390e49b098733c6e897f254" [[package]] name = "unicode-xid" version = "0.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ebc1c04c71510c7f702b52b7c350734c9ff1295c464a03335b00bb84fc54f853" [[package]] name = "unindent" version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7264e107f553ccae879d21fbea1d6724ac785e8c3bfc762137959b5802826ef3" [[package]] name = "unsafe-libyaml" version = "0.2.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "673aac59facbab8a9007c7f6108d11f63b603f7cabff99fabf650fea5c32b861" [[package]] name = "unscanny" version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e9df2af067a7953e9c3831320f35c1cc0600c30d44d9f7a12b01db1cd88d6b47" [[package]] name = "untrusted" version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" [[package]] name = "upstream-ontologist" version = "0.3.6" dependencies = [ "async-trait", "bit-set 0.8.0", "bit-vec 0.8.0", "breezyshim", "chrono", "clap", "configparser", "debbugs", "debcargo", "debian-changelog", "debian-control", "debian-copyright", "debian-watch", "debversion", "distro-info", "env_logger", "futures", "gix-config 0.48.0", "html5ever 0.36.1", "lazy-regex", "lazy_static", "log", "makefile-lossless", "maplit", "opam-file-rs", "openssl", "percent-encoding", "pretty_assertions", "pulldown-cmark", "pyo3", "pyo3-filelike", "pyproject-toml", "python-pkginfo", "quote", "r-description", "regex", "reqwest", "rst_parser", "rst_renderer", "rust-ini", "select", "semver", "serde", "serde_json", "serde_yaml", "shlex", "tempfile", "tendril", "textwrap", "tokio", "toml 0.9.8", "url", "xmltree 0.12.0", ] [[package]] name = "url" version = "2.5.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "08bc136a29a3d1758e07a9cca267be308aeebf5cfd5a10f3f67ab2097683ef5b" dependencies = [ "form_urlencoded", "idna", "percent-encoding", "serde", ] [[package]] name = "urlencoding" version = "2.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "daf8dba3b7eb870caf1ddeed7bc9d2a049f3cfdfae7cb521b087cc33ae4c49da" [[package]] name = "utf-8" version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "09cc8ee72d2a9becf2f2febe0205bbed8fc6615b7cb429ad062dc7b7ddd036a9" [[package]] name = "utf8_iter" version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be" [[package]] name = "utf8parse" version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" [[package]] name = "valuable" version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ba73ea9cf16a25df0c8caa16c51acb937d5712a8429db78a3ee29d5dcacd3a65" [[package]] name = "vcpkg" version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" [[package]] name = "version-ranges" version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3595ffe225639f1e0fd8d7269dcc05d2fbfea93cfac2fea367daf1adb60aae91" dependencies = [ "smallvec", ] [[package]] name = "version_check" version = "0.9.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" [[package]] name = "walkdir" version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "29790946404f91d9c5d06f9874efddea1dc06c5efe94541a7d6863108e3a5e4b" dependencies = [ "same-file", "winapi-util", ] [[package]] name = "want" version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bfa7760aed19e106de2c7c0b581b509f2f25d3dacaf737cb82ac61bc6d760b0e" dependencies = [ "try-lock", ] [[package]] name = "wasi" version = "0.11.1+wasi-snapshot-preview1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ccf3ec651a847eb01de73ccad15eb7d99f80485de043efb2f370cd654f4ea44b" [[package]] name = "wasip2" version = "1.0.1+wasi-0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0562428422c63773dad2c345a1882263bbf4d65cf3f42e90921f787ef5ad58e7" dependencies = [ "wit-bindgen", ] [[package]] name = "wasite" version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b8dad83b4f25e74f184f64c43b150b91efe7647395b42289f38e50566d82855b" [[package]] name = "wasm-bindgen" version = "0.2.106" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0d759f433fa64a2d763d1340820e46e111a7a5ab75f993d1852d70b03dbb80fd" dependencies = [ "cfg-if", "once_cell", "rustversion", "wasm-bindgen-macro", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-futures" version = "0.4.56" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "836d9622d604feee9e5de25ac10e3ea5f2d65b41eac0d9ce72eb5deae707ce7c" dependencies = [ "cfg-if", "js-sys", "once_cell", "wasm-bindgen", "web-sys", ] [[package]] name = "wasm-bindgen-macro" version = "0.2.106" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "48cb0d2638f8baedbc542ed444afc0644a29166f1595371af4fecf8ce1e7eeb3" dependencies = [ "quote", "wasm-bindgen-macro-support", ] [[package]] name = "wasm-bindgen-macro-support" version = "0.2.106" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cefb59d5cd5f92d9dcf80e4683949f15ca4b511f4ac0a6e14d4e1ac60c6ecd40" dependencies = [ "bumpalo", "proc-macro2", "quote", "syn 2.0.111", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-shared" version = "0.2.106" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cbc538057e648b67f72a982e708d485b2efa771e1ac05fec311f9f63e5800db4" dependencies = [ "unicode-ident", ] [[package]] name = "web-sys" version = "0.3.83" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9b32828d774c412041098d182a8b38b16ea816958e07cf40eec2bc080ae137ac" dependencies = [ "js-sys", "wasm-bindgen", ] [[package]] name = "web_atoms" version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "acd0c322f146d0f8aad130ce6c187953889359584497dac6561204c8e17bb43d" dependencies = [ "phf 0.13.1", "phf_codegen 0.13.1", "string_cache 0.9.0", "string_cache_codegen 0.6.1", ] [[package]] name = "whoami" version = "1.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5d4a4db5077702ca3015d3d02d74974948aba2ad9e12ab7df718ee64ccd7e97d" dependencies = [ "libredox", "wasite", "web-sys", ] [[package]] name = "winapi" version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" dependencies = [ "winapi-i686-pc-windows-gnu", "winapi-x86_64-pc-windows-gnu", ] [[package]] name = "winapi-i686-pc-windows-gnu" version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" [[package]] name = "winapi-util" version = "0.1.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c2a7b1c03c876122aa43f3020e6c3c3ee5c05081c9a00739faf7503aeba10d22" dependencies = [ "windows-sys 0.61.2", ] [[package]] name = "winapi-x86_64-pc-windows-gnu" version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" [[package]] name = "windows-core" version = "0.62.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b8e83a14d34d0623b51dce9581199302a221863196a1dde71a7663a4c2be9deb" dependencies = [ "windows-implement", "windows-interface", "windows-link", "windows-result", "windows-strings", ] [[package]] name = "windows-implement" version = "0.60.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "053e2e040ab57b9dc951b72c264860db7eb3b0200ba345b4e4c3b14f67855ddf" dependencies = [ "proc-macro2", "quote", "syn 2.0.111", ] [[package]] name = "windows-interface" version = "0.59.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3f316c4a2570ba26bbec722032c4099d8c8bc095efccdc15688708623367e358" dependencies = [ "proc-macro2", "quote", "syn 2.0.111", ] [[package]] name = "windows-link" version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5" [[package]] name = "windows-registry" version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "02752bf7fbdcce7f2a27a742f798510f3e5ad88dbe84871e5168e2120c3d5720" dependencies = [ "windows-link", "windows-result", "windows-strings", ] [[package]] name = "windows-result" version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7781fa89eaf60850ac3d2da7af8e5242a5ea78d1a11c49bf2910bb5a73853eb5" dependencies = [ "windows-link", ] [[package]] name = "windows-strings" version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7837d08f69c77cf6b07689544538e017c1bfcf57e34b4c0ff58e6c2cd3b37091" dependencies = [ "windows-link", ] [[package]] name = "windows-sys" version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" dependencies = [ "windows-targets 0.52.6", ] [[package]] name = "windows-sys" version = "0.59.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" dependencies = [ "windows-targets 0.52.6", ] [[package]] name = "windows-sys" version = "0.60.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f2f500e4d28234f72040990ec9d39e3a6b950f9f22d3dba18416c35882612bcb" dependencies = [ "windows-targets 0.53.5", ] [[package]] name = "windows-sys" version = "0.61.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ae137229bcbd6cdf0f7b80a31df61766145077ddf49416a728b02cb3921ff3fc" dependencies = [ "windows-link", ] [[package]] name = "windows-targets" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" dependencies = [ "windows_aarch64_gnullvm 0.52.6", "windows_aarch64_msvc 0.52.6", "windows_i686_gnu 0.52.6", "windows_i686_gnullvm 0.52.6", "windows_i686_msvc 0.52.6", "windows_x86_64_gnu 0.52.6", "windows_x86_64_gnullvm 0.52.6", "windows_x86_64_msvc 0.52.6", ] [[package]] name = "windows-targets" version = "0.53.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4945f9f551b88e0d65f3db0bc25c33b8acea4d9e41163edf90dcd0b19f9069f3" dependencies = [ "windows-link", "windows_aarch64_gnullvm 0.53.1", "windows_aarch64_msvc 0.53.1", "windows_i686_gnu 0.53.1", "windows_i686_gnullvm 0.53.1", "windows_i686_msvc 0.53.1", "windows_x86_64_gnu 0.53.1", "windows_x86_64_gnullvm 0.53.1", "windows_x86_64_msvc 0.53.1", ] [[package]] name = "windows_aarch64_gnullvm" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" [[package]] name = "windows_aarch64_gnullvm" version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a9d8416fa8b42f5c947f8482c43e7d89e73a173cead56d044f6a56104a6d1b53" [[package]] name = "windows_aarch64_msvc" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" [[package]] name = "windows_aarch64_msvc" version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b9d782e804c2f632e395708e99a94275910eb9100b2114651e04744e9b125006" [[package]] name = "windows_i686_gnu" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" [[package]] name = "windows_i686_gnu" version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "960e6da069d81e09becb0ca57a65220ddff016ff2d6af6a223cf372a506593a3" [[package]] name = "windows_i686_gnullvm" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" [[package]] name = "windows_i686_gnullvm" version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fa7359d10048f68ab8b09fa71c3daccfb0e9b559aed648a8f95469c27057180c" [[package]] name = "windows_i686_msvc" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" [[package]] name = "windows_i686_msvc" version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1e7ac75179f18232fe9c285163565a57ef8d3c89254a30685b57d83a38d326c2" [[package]] name = "windows_x86_64_gnu" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" [[package]] name = "windows_x86_64_gnu" version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9c3842cdd74a865a8066ab39c8a7a473c0778a3f29370b5fd6b4b9aa7df4a499" [[package]] name = "windows_x86_64_gnullvm" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" [[package]] name = "windows_x86_64_gnullvm" version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0ffa179e2d07eee8ad8f57493436566c7cc30ac536a3379fdf008f47f6bb7ae1" [[package]] name = "windows_x86_64_msvc" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" [[package]] name = "windows_x86_64_msvc" version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d6bbff5f0aada427a1e5a6da5f1f98158182f26556f345ac9e04d36d0ebed650" [[package]] name = "winnow" version = "0.6.26" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1e90edd2ac1aa278a5c4599b1d89cf03074b610800f866d4026dc199d7929a28" dependencies = [ "memchr", ] [[package]] name = "winnow" version = "0.7.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5a5364e9d77fcdeeaa6062ced926ee3381faa2ee02d3eb83a5c27a8825540829" dependencies = [ "memchr", ] [[package]] name = "wit-bindgen" version = "0.46.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f17a85883d4e6d00e8a97c586de764dabcc06133f7f1d55dce5cdc070ad7fe59" [[package]] name = "writeable" version = "0.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9edde0db4769d2dc68579893f2306b26c6ecfbe0ef499b013d731b7b9247e0b9" [[package]] name = "xattr" version = "1.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "32e45ad4206f6d2479085147f02bc2ef834ac85886624a23575ae137c8aa8156" dependencies = [ "libc", "rustix 1.1.2", ] [[package]] name = "xml" version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2df5825faced2427b2da74d9100f1e2e93c533fff063506a81ede1cf517b2e7e" [[package]] name = "xml-rs" version = "0.8.28" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3ae8337f8a065cfc972643663ea4279e04e7256de865aa66fe25cec5fb912d3f" [[package]] name = "xml5ever" version = "0.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4034e1d05af98b51ad7214527730626f019682d797ba38b51689212118d8e650" dependencies = [ "log", "mac", "markup5ever 0.11.0", ] [[package]] name = "xmltree" version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b619f8c85654798007fb10afa5125590b43b088c225a25fc2fec100a9fad0fc6" dependencies = [ "xml-rs", ] [[package]] name = "xmltree" version = "0.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cbc04313cab124e498ab1724e739720807b6dc405b9ed0edc5860164d2e4ff70" dependencies = [ "xml", ] [[package]] name = "yansi" version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cfe53a6657fd280eaa890a3bc59152892ffa3e30101319d168b781ed6529b049" [[package]] name = "yoke" version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "72d6e5c6afb84d73944e5cedb052c4680d5657337201555f9f2a16b7406d4954" dependencies = [ "stable_deref_trait", "yoke-derive", "zerofrom", ] [[package]] name = "yoke-derive" version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b659052874eb698efe5b9e8cf382204678a0086ebf46982b79d6ca3182927e5d" dependencies = [ "proc-macro2", "quote", "syn 2.0.111", "synstructure 0.13.2", ] [[package]] name = "zerocopy" version = "0.8.31" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fd74ec98b9250adb3ca554bdde269adf631549f51d8a8f8f0a10b50f1cb298c3" dependencies = [ "zerocopy-derive", ] [[package]] name = "zerocopy-derive" version = "0.8.31" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d8a8d209fdf45cf5138cbb5a506f6b52522a25afccc534d1475dad8e31105c6a" dependencies = [ "proc-macro2", "quote", "syn 2.0.111", ] [[package]] name = "zerofrom" version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "50cc42e0333e05660c3587f3bf9d0478688e15d870fab3346451ce7f8c9fbea5" dependencies = [ "zerofrom-derive", ] [[package]] name = "zerofrom-derive" version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d71e5d6e06ab090c67b5e44993ec16b72dcbaabc526db883a360057678b48502" dependencies = [ "proc-macro2", "quote", "syn 2.0.111", "synstructure 0.13.2", ] [[package]] name = "zeroize" version = "1.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b97154e67e32c85465826e8bcc1c59429aaaf107c1e4a9e53c8d8ccd5eff88d0" [[package]] name = "zerotrie" version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2a59c17a5562d507e4b54960e8569ebee33bee890c70aa3fe7b97e85a9fd7851" dependencies = [ "displaydoc", "yoke", "zerofrom", ] [[package]] name = "zerovec" version = "0.11.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6c28719294829477f525be0186d13efa9a3c602f7ec202ca9e353d310fb9a002" dependencies = [ "yoke", "zerofrom", "zerovec-derive", ] [[package]] name = "zerovec-derive" version = "0.11.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "eadce39539ca5cb3985590102671f2567e659fca9666581ad3411d59207951f3" dependencies = [ "proc-macro2", "quote", "syn 2.0.111", ] [[package]] name = "zip" version = "6.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "eb2a05c7c36fde6c09b08576c9f7fb4cda705990f73b58fe011abf7dfb24168b" dependencies = [ "arbitrary", "crc32fast", "flate2", "indexmap", "memchr", "zopfli", ] [[package]] name = "zlib-rs" version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2f06ae92f42f5e5c42443fd094f245eb656abf56dd7cce9b8b263236565e00f2" [[package]] name = "zopfli" version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f05cd8797d63865425ff89b5c4a48804f35ba0ce8d125800027ad6017d2b5249" dependencies = [ "bumpalo", "crc32fast", "log", "simd-adler32", ] upstream-ontologist-0.3.6/Cargo.toml0000644000000126150000000000100131040ustar # THIS FILE IS AUTOMATICALLY GENERATED BY CARGO # # When uploading crates to the registry Cargo will automatically # "normalize" Cargo.toml files for maximal compatibility # with all versions of Cargo and also rewrite `path` dependencies # to registry (e.g., crates.io) dependencies. # # If you are reading this file be aware that the original Cargo.toml # will likely look very different (and much more reasonable). # See Cargo.toml.orig for the original contents. [package] edition = "2021" name = "upstream-ontologist" version = "0.3.6" authors = ["Jelmer Vernooij "] build = "build.rs" include = [ "testdata", "readme_tests", "man", "build.rs", "src/*.rs", "README.md", "src/**/*.rs", ] autolib = false autobins = false autoexamples = false autotests = false autobenches = false default-run = "guess-upstream-metadata" description = "tracking of upstream project metadata" homepage = "https://github.com/jelmer/upstream-ontologist" readme = "README.md" license = "GPL-2.0+" repository = "https://github.com/jelmer/upstream-ontologist.git" [features] cargo = ["dep:toml"] cli = [ "dep:clap", "dep:env_logger", ] debcargo = ["dep:debcargo"] debian = [ "dep:debian-watch", "dep:makefile-lossless", "dep:debian-changelog", "dep:debian-control", "dep:debian-copyright", "dep:debversion", ] debversion = ["dep:debversion"] default = [ "git-config", "launchpad", "opam", "dist-ini", "cargo", "r-description", "pyproject-toml", "python-pkginfo", "debian", "pyo3", "setup-cfg", ] dist-ini = ["dep:rust-ini"] git-config = ["dep:gix-config"] launchpad = ["dep:distro-info"] opam = ["dep:opam-file-rs"] pyo3 = ["dep:pyo3"] pyproject-toml = ["dep:pyproject-toml"] python-pkginfo = ["dep:python-pkginfo"] r-description = ["dep:r-description"] setup-cfg = ["dep:rust-ini"] [lib] name = "upstream_ontologist" path = "src/lib.rs" [[bin]] name = "autocodemeta" path = "src/bin/autocodemeta.rs" required-features = ["cli"] [[bin]] name = "autodoap" path = "src/bin/autodoap.rs" required-features = ["cli"] [[bin]] name = "guess-upstream-metadata" path = "src/bin/guess-upstream-metadata.rs" required-features = ["cli"] [dependencies.async-trait] version = ">=0.1.89, <0.2" [dependencies.bit-set] version = ">=0.5.3, <0.9" [dependencies.bit-vec] version = ">=0.6.3, <0.9" [dependencies.breezyshim] version = ">=0.7.0, <0.8" features = ["auto-initialize"] default-features = false [dependencies.chrono] version = ">=0.4.42, <0.5" [dependencies.clap] version = ">=4.5.48, <5" features = [ "derive", "env", ] optional = true [dependencies.configparser] version = ">=3.0.3, <4" [dependencies.debbugs] version = ">=0.1.5, <0.2" [dependencies.debcargo] version = ">=2.7.10, <3" optional = true [dependencies.debian-changelog] version = ">=0.2.1, <0.3" optional = true [dependencies.debian-control] version = ">=0.1.48, <0.3" optional = true [dependencies.debian-copyright] version = ">=0.1.28, <0.2" optional = true [dependencies.debian-watch] version = ">=0.2.8, <0.3" optional = true [dependencies.debversion] version = ">=0.4.7, <0.6" optional = true [dependencies.distro-info] version = ">=0.4, <0.5" optional = true [dependencies.env_logger] version = ">=0.11.8, <0.12" optional = true [dependencies.futures] version = ">=0.3.31, <0.4" [dependencies.gix-config] version = ">=0.42.0, <0.49" optional = true [dependencies.html5ever] version = ">=0.26, <0.37" [dependencies.lazy-regex] version = ">=3.4, <4" [dependencies.lazy_static] version = ">=1.5.0, <2" [dependencies.log] version = ">=0.4.21, <0.5" [dependencies.makefile-lossless] version = ">=0.2.1, <0.4" optional = true [dependencies.maplit] version = ">=1.0.2, <2" [dependencies.opam-file-rs] version = ">=0.1.5, <0.2" optional = true [dependencies.openssl] version = ">=0.10.64, <0.11" [dependencies.percent-encoding] version = ">=2.3.1, <3" [dependencies.pulldown-cmark] version = ">=0.13.0, <0.14" [dependencies.pyo3] version = ">=0.27.0, <0.28" optional = true [dependencies.pyo3-filelike] version = ">=0.5.1, <0.6" [dependencies.pyproject-toml] version = ">=0.13.4, <0.14" optional = true [dependencies.python-pkginfo] version = ">=0.6.5, <0.7" optional = true [dependencies.r-description] version = ">=0.3.5, <0.4" optional = true [dependencies.regex] version = ">=1.11.0, <2" [dependencies.reqwest] version = ">=0.12.15, <0.13" features = [ "blocking", "json", ] default-features = false [dependencies.rst_parser] version = ">=0.4.2, <0.5" [dependencies.rst_renderer] version = ">=0.4.2, <0.5" [dependencies.rust-ini] version = ">=0.21.1, <0.22" optional = true [dependencies.select] version = ">=0.6.0, <0.7" [dependencies.semver] version = ">=1.0.26, <2" features = ["serde"] [dependencies.serde] version = ">=1.0.219, <2" features = ["derive"] [dependencies.serde_json] version = ">=1.0.120, <2" [dependencies.serde_yaml] version = ">=0.9.34, <0.10" [dependencies.shlex] version = ">=1.3.0, <2" [dependencies.tendril] version = ">=0.4.3, <0.5" [dependencies.textwrap] version = ">=0.16.2, <0.17" [dependencies.tokio] version = ">=1.47.1, <2" features = ["full"] [dependencies.toml] version = ">=0.9.0, <0.10" optional = true [dependencies.url] version = ">=2.5.4, <3" [dependencies.xmltree] version = ">=0.10.3, <0.13" [dev-dependencies.pretty_assertions] version = "1.4" [dev-dependencies.tempfile] version = "3.20" [build-dependencies.quote] version = ">=1.0.40, <2" upstream-ontologist-0.3.6/Cargo.toml.orig000064400000000000000000000074111046102023000165630ustar 00000000000000[package] name = "upstream-ontologist" version = "0.3.6" authors = [ "Jelmer Vernooij ",] edition = "2021" license = "GPL-2.0+" description = "tracking of upstream project metadata" repository = "https://github.com/jelmer/upstream-ontologist.git" homepage = "https://github.com/jelmer/upstream-ontologist" default-run = "guess-upstream-metadata" include = ["testdata", "readme_tests", "man", "build.rs", "src/*.rs", "README.md", "src/**/*.rs"] [dependencies] log = ">=0.4.21, <0.5" shlex = ">=1.3.0, <2" serde_json = ">=1.0.120, <2" lazy_static = ">=1.5.0, <2" regex = ">=1.11.0, <2" url = ">=2.5.4, <3" xmltree = ">=0.10.3, <0.13" configparser = ">=3.0.3, <4" serde_yaml = ">=0.9.34, <0.10" percent-encoding = ">=2.3.1, <3" html5ever = ">=0.26, <0.37" chrono = ">=0.4.42, <0.5" textwrap = ">=0.16.2, <0.17" lazy-regex = ">=3.4, <4" breezyshim = { version = ">=0.7.0, <0.8", default-features = false, features = ["auto-initialize"] } debian-watch = { version = ">=0.2.8, <0.3", optional = true } debian-changelog = { version = ">=0.2.1, <0.3", optional = true } debbugs = ">=0.1.5, <0.2" clap = { version = ">=4.5.48, <5", features = ["derive", "env"], optional = true } maplit = ">=1.0.2, <2" env_logger = { version = ">=0.11.8, <0.12", optional = true } makefile-lossless = { version = ">=0.2.1, <0.4", optional = true } debian-copyright = { version = ">=0.1.28, <0.2", optional = true } debian-control = { version = ">=0.1.48, <0.3", optional = true } pulldown-cmark = ">=0.13.0, <0.14" debcargo = { version = ">=2.7.10, <3", optional = true } rst_parser = ">=0.4.2, <0.5" rst_renderer = ">=0.4.2, <0.5" select = ">=0.6.0, <0.7" tendril = ">=0.4.3, <0.5" bit-vec = ">=0.6.3, <0.9" bit-set = ">=0.5.3, <0.9" openssl = ">=0.10.64, <0.11" pyo3-filelike = ">=0.5.1, <0.6" semver = { version = ">=1.0.26, <2", features = ["serde"] } async-trait = ">=0.1.89, <0.2" tokio = { version = ">=1.47.1, <2", features = ["full"] } futures = ">=0.3.31, <0.4" debversion = { version = ">=0.4.7, <0.6", optional = true } [features] default = ["git-config", "launchpad", "opam", "dist-ini", "cargo", "r-description", "pyproject-toml", "python-pkginfo", "debian", "pyo3", "setup-cfg"] git-config = ["dep:gix-config"] launchpad = ["dep:distro-info"] opam = ["dep:opam-file-rs"] dist-ini = ["dep:rust-ini"] cargo = ["dep:toml"] r-description = ["dep:r-description"] pyproject-toml = ["dep:pyproject-toml"] python-pkginfo = ["dep:python-pkginfo"] setup-cfg = ["dep:rust-ini"] debcargo = ["dep:debcargo"] debian = ["dep:debian-watch", "dep:makefile-lossless", "dep:debian-changelog", "dep:debian-control", "dep:debian-copyright", "dep:debversion"] pyo3 = ["dep:pyo3"] cli = ["dep:clap", "dep:env_logger"] debversion = ["dep:debversion"] [lib] [dev-dependencies] pretty_assertions = "1.4" tempfile = "3.20" [build-dependencies] quote = ">=1.0.40, <2" [dependencies.pyo3] version = ">=0.27.0, <0.28" optional = true [dependencies.reqwest] version = ">=0.12.15, <0.13" features = [ "blocking", "json",] default-features = false [dependencies.rust-ini] version = ">=0.21.1, <0.22" optional = true [dependencies.serde] version = ">=1.0.219, <2" features = [ "derive",] [dependencies.opam-file-rs] version = ">=0.1.5, <0.2" optional = true [dependencies.gix-config] version = ">=0.42.0, <0.49" optional = true [dependencies.distro-info] version = ">=0.4, <0.5" optional = true [dependencies.toml] version = ">=0.9.0, <0.10" optional = true [dependencies.r-description] version = ">=0.3.5, <0.4" optional = true [dependencies.pyproject-toml] version = ">=0.13.4, <0.14" optional = true [dependencies.python-pkginfo] version = ">=0.6.5, <0.7" optional = true [[bin]] name = "autodoap" required-features = ["cli"] [[bin]] name = "autocodemeta" required-features = ["cli"] [[bin]] name = "guess-upstream-metadata" required-features = ["cli"] upstream-ontologist-0.3.6/README.md000064400000000000000000000123771046102023000151620ustar 00000000000000Upstream Ontologist =================== The upstream ontologist provides a common interface for finding metadata about upstream software projects. It will gather information from any sources available, prioritize data that it has higher confidence in as well as report the confidence for each of the bits of metadata. The ontologist originated in Debian and the currently reported metadata fields are loosely based on [DEP-12](https://dep-team.pages.debian.net/deps/dep12), but it is meant to be distribution-agnostic. Provided Fields --------------- Standard fields: * ``Name``: human name of the upstream project * ``Contact``: contact address of some sort of the upstream (e-mail, mailing list URL) * ``Repository``: VCS URL * ``Repository-Browse``: Web URL for viewing the VCS * ``Bug-Database``: Bug database URL (for web viewing, generally) * ``Bug-Submit``: URL to use to submit new bugs (either on the web or an e-mail address) * ``Screenshots``: List of URLs with screenshots * ``Archive``: Archive used - e.g. SourceForge * ``Security-Contact``: e-mail or URL with instructions for reporting security issues * ``Documentation``: Link to documentation on the web * ``Changelog``: URL to the changelog * ``FAQ``: URL to the FAQ * ``Donation``: URL to a donation page * ``Funding``: List of sources of funding for the project Extensions for upstream-ontologist, not defined in DEP-12: * ``SourceForge-Project``: sourceforge project name * ``Wiki``: Wiki URL * ``Summary``: one-line description of the project * ``Description``: longer description of the project * ``License``: Single line license (e.g. "GPL 2.0") * ``Copyright``: List of copyright holders * ``Version``: Current upstream version * ``Security-MD``: URL to markdown file with security policy * ``Author``: List of people who contributed to the project * ``Maintainer``: The maintainer of the project * ``Homepage``: homepage URL (present in ``debian/control`` in Debian packages) Supported Data Sources ---------------------- At the moment, the ontologist can read metadata from the following upstream data sources: * Python package metadata (PKG-INFO, setup.py, setup.cfg, pyproject.timl) * [package.json](https://docs.npmjs.com/cli/v7/configuring-npm/package-json) * [composer.json](https://getcomposer.org/doc/04-schema.md) * [package.xml](https://pear.php.net/manual/en/guide.developers.package2.dependencies.php) * Perl package metadata (dist.ini, META.json, META.yml, Makefile.PL) * [Perl POD files](https://perldoc.perl.org/perlpod) * GNU configure files * [R DESCRIPTION files](https://r-pkgs.org/description.html) * [Rust Cargo.toml](https://doc.rust-lang.org/cargo/reference/manifest.html) * [Maven pom.xml](https://maven.apache.org/pom.html) * [metainfo.xml](https://www.freedesktop.org/software/appstream/docs/chap-Metadata.html) * [.git/config](https://git-scm.com/docs/git-config) * SECURITY.md * [DOAP](https://github.com/ewilderj/doap) * [Haskell cabal files](https://cabal.readthedocs.io/en/3.4/cabal-package.html) * [go.mod](https://golang.org/doc/modules/gomod-ref) * [ruby gemspec files](https://guides.rubygems.org/specification-reference/) * [nuspec files](https://docs.microsoft.com/en-us/nuget/reference/nuspec) * [OPAM files](https://opam.ocaml.org/doc/Manual.html#Package-definitions) * Debian packaging metadata (debian/watch, debian/control, debian/rules, debian/get-orig-source.sh, debian/copyright, debian/patches) * Dart's [pubspec.yaml](https://dart.dev/tools/pub/pubspec) * meson.build It will also scan README and INSTALL for possible upstream repository URLs (and will attempt to verify that those match the local repository). In addition to local files, it can also consult external directories using their APIs: * [GitHub](https://github.com/) * [SourceForge](https://sourceforge.net/) * [repology](https://www.repology.org/) * [Launchpad](https://launchpad.net/) * [PECL](https://pecl.php.net/) * [AUR](https://aur.archlinux.org/) Example Usage ------------- The easiest way to use the upstream ontologist is by invoking the ``guess-upstream-metadata`` command in a software project: ```console $ guess-upstream-metadata ~/src/dulwich Security-MD: https://github.com/dulwich/dulwich/tree/HEAD/SECURITY.md Name: dulwich Version: 0.20.15 Bug-Database: https://github.com/dulwich/dulwich/issues Repository: https://www.dulwich.io/code/ Summary: Python Git Library Bug-Submit: https://github.com/dulwich/dulwich/issues/new ``` Alternatively, there is a Python API as part of the [upstream\_ontologist Python package](https://pypi.org/project/upstream-ontologist/). There are also ``autocodemeta`` and ``autodoap`` commands that can generate output in the [codemeta](https://codemeta.github.io/) and [DOAP](https://github.com/ewilderj/doap) formats, respectively. Reporting bugs -------------- When reporting bugs, please include the observed output of the ``guess-upstream-metadata`` command, the version of the upstream-ontologist package you are using, what output you were expecting, and ideally the location of the upstream source code you are using (e.g. a URL to a Git repository). If there are additional metadata fields you would like to see supported, please let us know - either with or without a patch. Similarly, if you have a new data source you would like to see supported, please file a bug and we can discuss how to add it. upstream-ontologist-0.3.6/build.rs000064400000000000000000000153661046102023000153510ustar 00000000000000use quote::{format_ident, quote}; use std::env; use std::fs; use std::io::Write; use std::path::{Path, PathBuf}; fn generate_upstream_tests(testdata_dir: &Path, dest_path: &Path) -> std::io::Result<()> { let mut w = fs::File::create(dest_path)?; write!( w, "{}", quote! { use std::path::PathBuf; use pretty_assertions::assert_eq; } )?; for entry in fs::read_dir(testdata_dir).unwrap() { let entry = entry.unwrap(); let path = entry.path(); if path.is_dir() { // Get the directory name to use in the test function name let dir_name = path.file_name().unwrap().to_str().unwrap(); let fn_name = format_ident!("test_{}", dir_name.replace(['.', '-'], "_")); let test = quote! { #[tokio::test] async fn #fn_name() { let dir = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("testdata").join(#dir_name); let expected: serde_yaml::Value = serde_yaml::from_reader(std::fs::File::open(dir.join("expected.yaml")).unwrap()).unwrap(); let actual: serde_yaml::Value = serde_yaml::to_value(crate::get_upstream_info(&dir, Some(true), Some(false), Some(false), Some(false)).await.unwrap()).unwrap(); assert_eq!(expected, actual); } }; writeln!(w, "{}", test)?; } } Ok(()) } fn generate_readme_tests(testdata_dir: &Path, dest_path: &Path) -> std::io::Result<()> { let mut w = fs::File::create(dest_path)?; let manifest_dir = PathBuf::from(env!("CARGO_MANIFEST_DIR")); write!( w, "{}", quote! { use std::path::PathBuf; use pretty_assertions::assert_eq; use crate::readme::{description_from_readme_md, description_from_readme_rst, description_from_readme_plain}; } )?; for entry in fs::read_dir(testdata_dir).unwrap() { let entry = entry.unwrap(); let path = manifest_dir.join(entry.path()); if path.is_dir() { // Get the directory name to use in the test function name let dir_name = entry.file_name().to_str().unwrap().to_string(); if path.join("README.md").exists() { let fn_name = format_ident!("test_{}_readme_md", dir_name.replace(['.', '-'], "_")); let test = quote! { #[test] fn #fn_name() { let path = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("readme_tests").join(#dir_name); let readme_md = std::fs::read_to_string(path.join("README.md")).unwrap(); let expected_description = if path.join("description").exists() { Some(std::fs::read_to_string(path.join("description")).unwrap()) } else { None }; let (actual_description, actual_md) = description_from_readme_md(&readme_md).unwrap(); let actual_md = serde_yaml::to_value(actual_md).unwrap(); assert_eq!(actual_description, expected_description); if path.join("expected.yaml").exists() { let expected_md: serde_yaml::Value = serde_yaml::from_reader(std::fs::File::open(path.join("expected.yaml")).unwrap()).unwrap(); assert_eq!(actual_md, expected_md); } } }; write!(w, "{}", test)?; } else if path.join("README.rst").exists() { let fn_name = format_ident!("test_{}_readme_rst", dir_name.replace(['.', '-'], "_")); let test = quote! { #[test] fn #fn_name() { let path = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("readme_tests").join(#dir_name); let readme_rst = std::fs::read_to_string(path.join("README.rst")).unwrap(); let expected_description = if path.join("description").exists() { Some(std::fs::read_to_string(path.join("description")).unwrap()) } else { None }; let (actual_description, actual_md) = description_from_readme_rst(&readme_rst).unwrap(); let actual_md = serde_yaml::to_value(actual_md).unwrap(); assert_eq!(actual_description, expected_description); if path.join("expected.yaml").exists() { let expected_md: serde_yaml::Value = serde_yaml::from_reader(std::fs::File::open(path.join("expected.yaml")).unwrap()).unwrap(); assert_eq!(actual_md, expected_md); } } }; write!(w, "{}", test)?; } else { let fn_name = format_ident!("test_{}_readme_plain", dir_name.replace(['.', '-'], "_")); let test = quote! { #[test] fn #fn_name() { let path = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("readme_tests").join(#dir_name); let readme_plain = std::fs::read_to_string(path.join("README")).unwrap(); let expected_description = if path.join("description").exists() { Some(std::fs::read_to_string(path.join("description")).unwrap()) } else { None }; let (actual_description, actual_md) = description_from_readme_plain(&readme_plain).unwrap(); let actual_md = serde_yaml::to_value(actual_md).unwrap(); assert_eq!(actual_description, expected_description); if path.join("expected.yaml").exists() { let expected_md: serde_yaml::Value = serde_yaml::from_reader(std::fs::File::open(path.join("expected.yaml")).unwrap()).unwrap(); assert_eq!(actual_md, expected_md); } } }; write!(w, "{}", test)?; } } } Ok(()) } fn main() { let out_dir = env::var("OUT_DIR").unwrap(); generate_upstream_tests( Path::new("testdata"), &Path::new(&out_dir).join("upstream_tests.rs"), ) .unwrap(); generate_readme_tests( Path::new("readme_tests"), &Path::new(&out_dir).join("readme_tests.rs"), ) .unwrap(); } upstream-ontologist-0.3.6/man/autodoap.1000064400000000000000000000023471046102023000163500ustar 00000000000000.TH AUTODOAP 1 'September 2023' 'autodoap 0.1.36' 'User Commands' .SH NAME autodoap \- automatically write DOAP files for upstream projects .SH DESCRIPTION autodoap [\-h] [\-\-trust] [\-\-disable\-net\-access] [\-\-check] [\-\-consult\-external\-directory] [\-\-version] [path] This tool tries to guess upstream metadata (Homepage, Contact, VCS Repository) information for an upstream project. It does this by parsing various files in the package, and possibly calling out to external services (unless --disable-net-access is specified). Data is written to standard out in DOAP. .SS "positional arguments:" .IP path .SS "optional arguments:" .TP \fB\-h\fR, \fB\-\-help\fR show this help message and exit .TP \fB\-\-trust\fR Whether to allow running code from the package. .TP \fB\-\-disable\-net\-access\fR Do not probe external services. .TP \fB\-\-check\fR Check guessed metadata against external sources. .TP \fB\-\-consult\-external\-directory\fR Pull in external (not maintained by upstream) directory data .TP \fB\-\-version\fR show program's version number and exit .SH "SEE ALSO" \&\fIapply-multiarch-hints\fR\|(1) \&\fIguess-upstream-metadata\fR\|(1) \&\fIlintian-brush\fR\|(1) \&\fIlintian\fR\|(1) .SH AUTHORS Jelmer Vernooij upstream-ontologist-0.3.6/man/guess-upstream-metadata.1000064400000000000000000000023331046102023000212710ustar 00000000000000.TH GUESS-UPSTREAM-METADATA 1 'September 2023' 'guess-upstream-metadata 0.1.36' 'User Commands' .SH NAME guess-upstream-metadata \- guess upstream package metadata .SH DESCRIPTION guess\-upstream\-metadata [\-h] [\-\-trust] [\-\-disable\-net\-access] [\-\-check] [\-\-consult\-external\-directory] [\-\-version] [path] This tool tries to guess upstream metadata (Homepage, Contact, VCS Repository) for an upstream project. It does this by parsing various files in the package, and possibly calling out to external services (unless --disable-net-access is specified). .SS "positional arguments:" .IP path .SS "optional arguments:" .TP \fB\-h\fR, \fB\-\-help\fR show this help message and exit .TP \fB\-\-trust\fR Whether to allow running code from the package. .TP \fB\-\-disable\-net\-access\fR Do not probe external services. .TP \fB\-\-check\fR Check guessed metadata against external sources. .TP \fB\-\-consult\-external\-directory\fR Pull in external (not maintained by upstream) directory data .TP \fB\-\-version\fR show program's version number and exit .SH "SEE ALSO" \&\fIapply-multiarch-hints\fR\|(1) \&\fIguess-upstream-metadata\fR\|(1) \&\fIlintian-brush\fR\|(1) \&\fIlintian\fR\|(1) .SH AUTHORS Jelmer Vernooij upstream-ontologist-0.3.6/readme_tests/aiozipkin/README.rst000064400000000000000000000147421046102023000220440ustar 00000000000000aiozipkin ========= .. image:: https://github.com/aio-libs/aiozipkin/workflows/CI/badge.svg :target: https://github.com/aio-libs/aiozipkin/actions?query=workflow%3ACI .. image:: https://codecov.io/gh/aio-libs/aiozipkin/branch/master/graph/badge.svg :target: https://codecov.io/gh/aio-libs/aiozipkin .. image:: https://api.codeclimate.com/v1/badges/1ff813d5cad2d702cbf1/maintainability :target: https://codeclimate.com/github/aio-libs/aiozipkin/maintainability :alt: Maintainability .. image:: https://img.shields.io/pypi/v/aiozipkin.svg :target: https://pypi.python.org/pypi/aiozipkin .. image:: https://readthedocs.org/projects/aiozipkin/badge/?version=latest :target: http://aiozipkin.readthedocs.io/en/latest/?badge=latest :alt: Documentation Status .. image:: https://badges.gitter.im/Join%20Chat.svg :target: https://gitter.im/aio-libs/Lobby :alt: Chat on Gitter **aiozipkin** is Python 3.6+ module that adds distributed tracing capabilities from asyncio_ applications with zipkin (http://zipkin.io) server instrumentation. zipkin_ is a distributed tracing system. It helps gather timing data needed to troubleshoot latency problems in microservice architectures. It manages both the collection and lookup of this data. Zipkin’s design is based on the Google Dapper paper. Applications are instrumented with **aiozipkin** report timing data to zipkin_. The Zipkin UI also presents a Dependency diagram showing how many traced requests went through each application. If you are troubleshooting latency problems or errors, you can filter or sort all traces based on the application, length of trace, annotation, or timestamp. .. image:: https://raw.githubusercontent.com/aio-libs/aiozipkin/master/docs/zipkin_animation2.gif :alt: zipkin ui animation Features ======== * Distributed tracing capabilities to **asyncio** applications. * Support zipkin_ ``v2`` protocol. * Easy to use API. * Explicit context handling, no thread local variables. * Can work with jaeger_ and stackdriver_ through zipkin compatible API. zipkin vocabulary ----------------- Before code lets learn important zipkin_ vocabulary, for more detailed information please visit https://zipkin.io/pages/instrumenting .. image:: https://raw.githubusercontent.com/aio-libs/aiozipkin/master/docs/zipkin_glossary.png :alt: zipkin ui glossary * **Span** represents one specific method (RPC) call * **Annotation** string data associated with a particular timestamp in span * **Tag** - key and value associated with given span * **Trace** - collection of spans, related to serving particular request Simple example -------------- .. code:: python import asyncio import aiozipkin as az async def run(): # setup zipkin client zipkin_address = 'http://127.0.0.1:9411/api/v2/spans' endpoint = az.create_endpoint( "simple_service", ipv4="127.0.0.1", port=8080) tracer = await az.create(zipkin_address, endpoint, sample_rate=1.0) # create and setup new trace with tracer.new_trace(sampled=True) as span: # give a name for the span span.name("Slow SQL") # tag with relevant information span.tag("span_type", "root") # indicate that this is client span span.kind(az.CLIENT) # make timestamp and name it with START SQL query span.annotate("START SQL SELECT * FROM") # imitate long SQL query await asyncio.sleep(0.1) # make other timestamp and name it "END SQL" span.annotate("END SQL") await tracer.close() if __name__ == "__main__": loop = asyncio.get_event_loop() loop.run_until_complete(run()) aiohttp example --------------- *aiozipkin* includes *aiohttp* server instrumentation, for this create `web.Application()` as usual and install aiozipkin plugin: .. code:: python import aiozipkin as az def init_app(): host, port = "127.0.0.1", 8080 app = web.Application() endpoint = az.create_endpoint("AIOHTTP_SERVER", ipv4=host, port=port) tracer = await az.create(zipkin_address, endpoint, sample_rate=1.0) az.setup(app, tracer) That is it, plugin adds middleware that tries to fetch context from headers, and create/join new trace. Optionally on client side you can add propagation headers in order to force tracing and to see network latency between client and server. .. code:: python import aiozipkin as az endpoint = az.create_endpoint("AIOHTTP_CLIENT") tracer = await az.create(zipkin_address, endpoint) with tracer.new_trace() as span: span.kind(az.CLIENT) headers = span.context.make_headers() host = "http://127.0.0.1:8080/api/v1/posts/{}".format(i) resp = await session.get(host, headers=headers) await resp.text() Documentation ------------- http://aiozipkin.readthedocs.io/ Installation ------------ Installation process is simple, just:: $ pip install aiozipkin Support of other collectors =========================== **aiozipkin** can work with any other zipkin_ compatible service, currently we tested it with jaeger_ and stackdriver_. Jaeger support -------------- jaeger_ supports zipkin_ span format as result it is possible to use *aiozipkin* with jaeger_ server. You just need to specify *jaeger* server address and it should work out of the box. Not need to run local zipkin server. For more information see tests and jaeger_ documentation. .. image:: https://raw.githubusercontent.com/aio-libs/aiozipkin/master/docs/jaeger.png :alt: jaeger ui animation Stackdriver support ------------------- Google stackdriver_ supports zipkin_ span format as result it is possible to use *aiozipkin* with this google_ service. In order to make this work you need to setup zipkin service locally, that will send trace to the cloud. See google_ cloud documentation how to setup make zipkin collector: .. image:: https://raw.githubusercontent.com/aio-libs/aiozipkin/master/docs/stackdriver.png :alt: jaeger ui animation Requirements ------------ * Python_ 3.6+ * aiohttp_ .. _PEP492: https://www.python.org/dev/peps/pep-0492/ .. _Python: https://www.python.org .. _aiohttp: https://github.com/KeepSafe/aiohttp .. _asyncio: http://docs.python.org/3.5/library/asyncio.html .. _uvloop: https://github.com/MagicStack/uvloop .. _zipkin: http://zipkin.io .. _jaeger: http://jaeger.readthedocs.io/en/latest/ .. _stackdriver: https://cloud.google.com/stackdriver/ .. _google: https://cloud.google.com/trace/docs/zipkin upstream-ontologist-0.3.6/readme_tests/aiozipkin/description000064400000000000000000000013631046102023000226160ustar 00000000000000aiozipkin is Python 3.6+ module that adds distributed tracing capabilities from asyncio applications with zipkin (http://zipkin.io) server instrumentation. zipkin is a distributed tracing system. It helps gather timing data needed to troubleshoot latency problems in microservice architectures. It manages both the collection and lookup of this data. Zipkin’s design is based on the Google Dapper paper. Applications are instrumented with aiozipkin report timing data to zipkin. The Zipkin UI also presents a Dependency diagram showing how many traced requests went through each application. If you are troubleshooting latency problems or errors, you can filter or sort all traces based on the application, length of trace, annotation, or timestamp. upstream-ontologist-0.3.6/readme_tests/argparse/README.rst000064400000000000000000000411161046102023000216460ustar 00000000000000ConfigArgParse -------------- .. image:: https://img.shields.io/pypi/v/ConfigArgParse.svg?style=flat :alt: PyPI version :target: https://pypi.python.org/pypi/ConfigArgParse .. image:: https://img.shields.io/pypi/pyversions/ConfigArgParse.svg :alt: Supported Python versions :target: https://pypi.python.org/pypi/ConfigArgParse .. image:: https://travis-ci.org/bw2/ConfigArgParse.svg?branch=master :alt: Travis CI build :target: https://travis-ci.org/bw2/ConfigArgParse Overview ~~~~~~~~ Applications with more than a handful of user-settable options are best configured through a combination of command line args, config files, hard-coded defaults, and in some cases, environment variables. Python's command line parsing modules such as argparse have very limited support for config files and environment variables, so this module extends argparse to add these features. Available on PyPI: http://pypi.python.org/pypi/ConfigArgParse .. image:: https://travis-ci.org/bw2/ConfigArgParse.svg?branch=master :target: https://travis-ci.org/bw2/ConfigArgParse Features ~~~~~~~~ - command-line, config file, env var, and default settings can now be defined, documented, and parsed in one go using a single API (if a value is specified in more than one way then: command line > environment variables > config file values > defaults) - config files can have .ini or .yaml style syntax (eg. key=value or key: value) - user can provide a config file via a normal-looking command line arg (eg. -c path/to/config.txt) rather than the argparse-style @config.txt - one or more default config file paths can be specified (eg. ['/etc/bla.conf', '~/.my_config'] ) - all argparse functionality is fully supported, so this module can serve as a drop-in replacement (verified by argparse unittests). - env vars and config file keys & syntax are automatically documented in the -h help message - new method :code:`print_values()` can report keys & values and where they were set (eg. command line, env var, config file, or default). - lite-weight (no 3rd-party library dependencies except (optionally) PyYAML) - extensible (:code:`ConfigFileParser` can be subclassed to define a new config file format) - unittested by running the unittests that came with argparse but on configargparse, and using tox to test with Python 2.7 and Python 3+ Example ~~~~~~~ *config_test.py*: Script that defines 4 options and a positional arg and then parses and prints the values. Also, it prints out the help message as well as the string produced by :code:`format_values()` to show what they look like. .. code:: py import configargparse p = configargparse.ArgParser(default_config_files=['/etc/app/conf.d/*.conf', '~/.my_settings']) p.add('-c', '--my-config', required=True, is_config_file=True, help='config file path') p.add('--genome', required=True, help='path to genome file') # this option can be set in a config file because it starts with '--' p.add('-v', help='verbose', action='store_true') p.add('-d', '--dbsnp', help='known variants .vcf', env_var='DBSNP_PATH') # this option can be set in a config file because it starts with '--' p.add('vcf', nargs='+', help='variant file(s)') options = p.parse_args() print(options) print("----------") print(p.format_help()) print("----------") print(p.format_values()) # useful for logging where different settings came from *config.txt:* Since the script above set the config file as required=True, lets create a config file to give it: .. code:: py # settings for config_test.py genome = HCMV # cytomegalovirus genome dbsnp = /data/dbsnp/variants.vcf *command line:* Now run the script and pass it the config file: .. code:: bash DBSNP_PATH=/data/dbsnp/variants_v2.vcf python config_test.py --my-config config.txt f1.vcf f2.vcf *output:* Here is the result: .. code:: bash Namespace(dbsnp='/data/dbsnp/variants_v2.vcf', genome='HCMV', my_config='config.txt', v=False, vcf=['f1.vcf', 'f2.vcf']) ---------- usage: config_test.py [-h] -c MY_CONFIG --genome GENOME [-v] [-d DBSNP] vcf [vcf ...] Args that start with '--' (eg. --genome) can also be set in a config file (/etc/app/conf.d/*.conf or ~/.my_settings or specified via -c). Config file syntax allows: key=value, flag=true, stuff=[a,b,c] (for details, see syntax at https://goo.gl/R74nmi). If an arg is specified in more than one place, then commandline values override environment variables which override config file values which override defaults. positional arguments: vcf variant file(s) optional arguments: -h, --help show this help message and exit -c MY_CONFIG, --my-config MY_CONFIG config file path --genome GENOME path to genome file -v verbose -d DBSNP, --dbsnp DBSNP known variants .vcf [env var: DBSNP_PATH] ---------- Command Line Args: --my-config config.txt f1.vcf f2.vcf Environment Variables: DBSNP_PATH: /data/dbsnp/variants_v2.vcf Config File (config.txt): genome: HCMV Special Values ~~~~~~~~~~~~~~ Under the hood, configargparse handles environment variables and config file values by converting them to their corresponding command line arg. For example, "key = value" will be processed as if "--key value" was specified on the command line. Also, the following special values (whether in a config file or an environment variable) are handled in a special way to support booleans and lists: - :code:`key = true` is handled as if "--key" was specified on the command line. In your python code this key must be defined as a boolean flag (eg. action="store_true" or similar). - :code:`key = [value1, value2, ...]` is handled as if "--key value1 --key value2" etc. was specified on the command line. In your python code this key must be defined as a list (eg. action="append"). Config File Syntax ~~~~~~~~~~~~~~~~~~ Only command line args that have a long version (eg. one that starts with '--') can be set in a config file. For example, "--color" can be set by putting "color=green" in a config file. The config file syntax depends on the constructor arg: :code:`config_file_parser_class` which can be set to one of the provided classes: :code:`DefaultConfigFileParser`, :code:`YAMLConfigFileParser`, :code:`ConfigparserConfigFileParser` or to your own subclass of the :code:`ConfigFileParser` abstract class. *DefaultConfigFileParser* - the full range of valid syntax is: .. code:: yaml # this is a comment ; this is also a comment (.ini style) --- # lines that start with --- are ignored (yaml style) ------------------- [section] # .ini-style section names are treated as comments # how to specify a key-value pair (all of these are equivalent): name value # key is case sensitive: "Name" isn't "name" name = value # (.ini style) (white space is ignored, so name = value same as name=value) name: value # (yaml style) --name value # (argparse style) # how to set a flag arg (eg. arg which has action="store_true") --name name name = True # "True" and "true" are the same # how to specify a list arg (eg. arg which has action="append") fruit = [apple, orange, lemon] indexes = [1, 12, 35 , 40] *YAMLConfigFileParser* - allows a subset of YAML syntax (http://goo.gl/VgT2DU) .. code:: yaml # a comment name1: value name2: true # "True" and "true" are the same fruit: [apple, orange, lemon] indexes: [1, 12, 35, 40] *ConfigparserConfigFileParser* - allows a subset of python's configparser module syntax (https://docs.python.org/3.7/library/configparser.html). In particular the following configparser options are set: .. code:: py config = configparser.ArgParser( delimiters=("=",":"), allow_no_value=False, comment_prefixes=("#",";"), inline_comment_prefixes=("#",";"), strict=True, empty_lines_in_values=False, ) Once configparser parses the config file all section names are removed, thus all keys must have unique names regardless of which INI section they are defined under. Also, any keys which have python list syntax are converted to lists by evaluating them as python code using ast.literal_eval (https://docs.python.org/3/library/ast.html#ast.literal_eval). To facilitate this all multi-line values are converted to single-line values. Thus multi-line string values will have all new-lines converted to spaces. Note, since key-value pairs that have python dictionary syntax are saved as single-line strings, even if formatted across multiple lines in the config file, dictionaries can be read in and converted to valid python dictionaries with PyYAML's safe_load. Example given below: .. code:: py # inside your config file (e.g. config.ini) [section1] # INI sections treated as comments system1_settings: { # start of multi-line dictionary 'a':True, 'b':[2, 4, 8, 16], 'c':{'start':0, 'stop':1000}, 'd':'experiment 32 testing simulation with parameter a on' } # end of multi-line dictionary value ....... # in your configargparse setup import configargparse import yaml parser = configargparse.ArgParser( config_file_parser_class=configargparse.ConfigparserConfigFileParser ) parser.add_argument('--system1_settings', type=yaml.safe_load) args = parser.parse_args() # now args.system1 is a valid python dict ArgParser Singletons ~~~~~~~~~~~~~~~~~~~~~~~~~ To make it easier to configure different modules in an application, configargparse provides globally-available ArgumentParser instances via configargparse.get_argument_parser('name') (similar to logging.getLogger('name')). Here is an example of an application with a utils module that also defines and retrieves its own command-line args. *main.py* .. code:: py import configargparse import utils p = configargparse.get_argument_parser() p.add_argument("-x", help="Main module setting") p.add_argument("--m-setting", help="Main module setting") options = p.parse_known_args() # using p.parse_args() here may raise errors. *utils.py* .. code:: py import configargparse p = configargparse.get_argument_parser() p.add_argument("--utils-setting", help="Config-file-settable option for utils") if __name__ == "__main__": options = p.parse_known_args() Help Formatters ~~~~~~~~~~~~~~~ :code:`ArgumentDefaultsRawHelpFormatter` is a new HelpFormatter that both adds default values AND disables line-wrapping. It can be passed to the constructor: :code:`ArgParser(.., formatter_class=ArgumentDefaultsRawHelpFormatter)` Aliases ~~~~~~~ The configargparse.ArgumentParser API inherits its class and method names from argparse and also provides the following shorter names for convenience: - p = configargparse.get_arg_parser() # get global singleton instance - p = configargparse.get_parser() - p = configargparse.ArgParser() # create a new instance - p = configargparse.Parser() - p.add_arg(..) - p.add(..) - options = p.parse(..) HelpFormatters: - RawFormatter = RawDescriptionHelpFormatter - DefaultsFormatter = ArgumentDefaultsHelpFormatter - DefaultsRawFormatter = ArgumentDefaultsRawHelpFormatter Design Notes ~~~~~~~~~~~~ Unit tests: tests/test_configargparse.py contains custom unittests for features specific to this module (such as config file and env-var support), as well as a hook to load and run argparse unittests (see the built-in test.test_argparse module) but on configargparse in place of argparse. This ensures that configargparse will work as a drop in replacement for argparse in all usecases. Previously existing modules (PyPI search keywords: config argparse): - argparse (built-in module Python v2.7+) - Good: - fully featured command line parsing - can read args from files using an easy to understand mechanism - Bad: - syntax for specifying config file path is unusual (eg. @file.txt)and not described in the user help message. - default config file syntax doesn't support comments and is unintuitive (eg. --namevalue) - no support for environment variables - ConfArgParse v1.0.15 (https://pypi.python.org/pypi/ConfArgParse) - Good: - extends argparse with support for config files parsed by ConfigParser - clear documentation in README - Bad: - config file values are processed using ArgumentParser.set_defaults(..) which means "required" and "choices" are not handled as expected. For example, if you specify a required value in a config file, you still have to specify it again on the command line. - doesn't work with Python 3 yet - no unit tests, code not well documented - appsettings v0.5 (https://pypi.python.org/pypi/appsettings) - Good: - supports config file (yaml format) and env_var parsing - supports config-file-only setting for specifying lists and dicts - Bad: - passes in config file and env settings via parse_args namespace param - tests not finished and don't work with Python 3 (import StringIO) - argparse_config v0.5.1 (https://pypi.python.org/pypi/argparse_config) - Good: - similar features to ConfArgParse v1.0.15 - Bad: - doesn't work with Python 3 (error during pip install) - yconf v0.3.2 - (https://pypi.python.org/pypi/yconf) - features and interface not that great - hieropt v0.3 - (https://pypi.python.org/pypi/hieropt) - doesn't appear to be maintained, couldn't find documentation - configurati v0.2.3 - (https://pypi.python.org/pypi/configurati) - Good: - JSON, YAML, or Python configuration files - handles rich data structures such as dictionaries - can group configuration names into sections (like .ini files) - Bad: - doesn't work with Python 3 - 2+ years since last release to PyPI - apparently unmaintained Design choices: 1. all options must be settable via command line. Having options that can only be set using config files or env. vars adds complexity to the API, and is not a useful enough feature since the developer can split up options into sections and call a section "config file keys", with command line args that are just "--" plus the config key. 2. config file and env. var settings should be processed by appending them to the command line (another benefit of #1). This is an easy-to-implement solution and implicitly takes care of checking that all "required" args are provided, etc., plus the behavior should be easy for users to understand. 3. configargparse shouldn't override argparse's convert_arg_line_to_args method so that all argparse unit tests can be run on configargparse. 4. in terms of what to allow for config file keys, the "dest" value of an option can't serve as a valid config key because many options can have the same dest. Instead, since multiple options can't use the same long arg (eg. "--long-arg-x"), let the config key be either "--long-arg-x" or "long-arg-x". This means the developer can allow only a subset of the command-line args to be specified via config file (eg. short args like -x would be excluded). Also, that way config keys are automatically documented whenever the command line args are documented in the help message. 5. don't force users to put config file settings in the right .ini [sections]. This doesn't have a clear benefit since all options are command-line settable, and so have a globally unique key anyway. Enforcing sections just makes things harder for the user and adds complexity to the implementation. 6. if necessary, config-file-only args can be added later by implementing a separate add method and using the namespace arg as in appsettings_v0.5 Relevant sites: - http://stackoverflow.com/questions/6133517/parse-config-file-environment-and-command-line-arguments-to-get-a-single-coll - http://tricksntweaks.blogspot.com/2013_05_01_archive.html - http://www.youtube.com/watch?v=vvCwqHgZJc8#t=35 .. |Travis CI Status for bw2/ConfigArgParse| image:: https://travis-ci.org/bw2/ConfigArgParse.svg?branch=master Versioning ~~~~~~~~~~ This software follows `Semantic Versioning`_ .. _Semantic Versioning: http://semver.org/ upstream-ontologist-0.3.6/readme_tests/argparse/description000064400000000000000000000006011046102023000224170ustar 00000000000000Applications with more than a handful of user-settable options are best configured through a combination of command line args, config files, hard-coded defaults, and in some cases, environment variables. Python's command line parsing modules such as argparse have very limited support for config files and environment variables, so this module extends argparse to add these features. upstream-ontologist-0.3.6/readme_tests/bitlbee/README.md000064400000000000000000000034761046102023000212470ustar 00000000000000# BitlBee ![](https://www.bitlbee.org/style/logo.png) [![Build Status](https://travis-ci.org/bitlbee/bitlbee.svg)](https://travis-ci.org/bitlbee/bitlbee) [![Coverity Scan Build Status](https://scan.coverity.com/projects/4028/badge.svg)](https://scan.coverity.com/projects/4028) An IRC to other chat networks gateway Main website: https://www.bitlbee.org/ Bug tracker: https://bugs.bitlbee.org/ Wiki: https://wiki.bitlbee.org/ License: GPLv2 ## Installation BitlBee is available in the package managers of most distros. For debian/ubuntu/etc you may use the nightly APT repository: https://code.bitlbee.org/debian/ You can also use a public server (such as `im.bitlbee.org`) instead of installing it: https://www.bitlbee.org/main.php/servers.html ## Compiling If you wish to compile it yourself, ensure you have the following packages and their headers: * glib 2.32 or newer (not to be confused with glibc) * gnutls * python 2 or 3 (for the user guide) Some optional features have additional dependencies, such as libpurple, libotr, libevent, etc. NSS and OpenSSL are also available but not as well supported as GnuTLS. Once you have the dependencies, building should be a matter of: ./configure make sudo make install ## Development tips * To enable debug symbols: `./configure --debug=1` * To get some additional debug output for some protocols: `BITLBEE_DEBUG=1 ./bitlbee -Dnv` * Use github pull requests against the 'develop' branch to submit patches. * The coding style based on K&R with tabs and 120 columns. See `./doc/uncrustify.cfg` for the parameters used to reformat the code. * Mappings of bzr revisions to git commits (for historical purposes) are available in `./doc/git-bzr-rev-map` * See also `./doc/README` and `./doc/HACKING` ## Help? Join **#BitlBee** on OFTC (**irc.oftc.net**) (OFTC, *not* freenode!) upstream-ontologist-0.3.6/readme_tests/bitlbee/description000064400000000000000000000000461046102023000222240ustar 00000000000000An IRC to other chat networks gateway upstream-ontologist-0.3.6/readme_tests/bup/README.md000064400000000000000000000617601046102023000204270ustar 00000000000000bup: It backs things up ======================= bup is a program that backs things up. It's short for "backup." Can you believe that nobody else has named an open source program "bup" after all this time? Me neither. Despite its unassuming name, bup is pretty cool. To give you an idea of just how cool it is, I wrote you this poem: Bup is teh awesome What rhymes with awesome? I guess maybe possum But that's irrelevant. Hmm. Did that help? Maybe prose is more useful after all. Reasons bup is awesome ---------------------- bup has a few advantages over other backup software: - It uses a rolling checksum algorithm (similar to rsync) to split large files into chunks. The most useful result of this is you can backup huge virtual machine (VM) disk images, databases, and XML files incrementally, even though they're typically all in one huge file, and not use tons of disk space for multiple versions. - It uses the packfile format from git (the open source version control system), so you can access the stored data even if you don't like bup's user interface. - Unlike git, it writes packfiles *directly* (instead of having a separate garbage collection / repacking stage) so it's fast even with gratuitously huge amounts of data. bup's improved index formats also allow you to track far more filenames than git (millions) and keep track of far more objects (hundreds or thousands of gigabytes). - Data is "automagically" shared between incremental backups without having to know which backup is based on which other one - even if the backups are made from two different computers that don't even know about each other. You just tell bup to back stuff up, and it saves only the minimum amount of data needed. - You can back up directly to a remote bup server, without needing tons of temporary disk space on the computer being backed up. And if your backup is interrupted halfway through, the next run will pick up where you left off. And it's easy to set up a bup server: just install bup on any machine where you have ssh access. - Bup can use "par2" redundancy to recover corrupted backups even if your disk has undetected bad sectors. - Even when a backup is incremental, you don't have to worry about restoring the full backup, then each of the incrementals in turn; an incremental backup *acts* as if it's a full backup, it just takes less disk space. - You can mount your bup repository as a FUSE filesystem and access the content that way, and even export it over Samba. - It's written in python (with some C parts to make it faster) so it's easy for you to extend and maintain. Reasons you might want to avoid bup ----------------------------------- - It's not remotely as well tested as something like tar, so it's more likely to eat your data. It's also missing some probably-critical features, though fewer than it used to be. - It requires python 3.7 or newer (or 2.7 for a bit longer), a C compiler, and an installed git version >= 1.5.6. It also requires par2 if you want fsck to be able to generate the information needed to recover from some types of corruption. While python 2.7 is still supported, please make plans to upgrade. Python 2 upstream support ended on 2020-01-01, and we plan to drop support soon too. - It currently only works on Linux, FreeBSD, NetBSD, OS X >= 10.4, Solaris, or Windows (with Cygwin, and WSL). Patches to support other platforms are welcome. - Until resolved, a [glibc bug](https://sourceware.org/bugzilla/show_bug.cgi?id=26034) might cause bup to crash on startup for some (unusual) command line argument values, when bup is configured to use Python 3. - Any items in "Things that are stupid" below. Notable changes introduced by a release ======================================= - Changes in 0.32 as compared to 0.31 - Changes in 0.31 as compared to 0.30.1 - Changes in 0.30.1 as compared to 0.30 - Changes in 0.30 as compared to 0.29.3 - Changes in 0.29.3 as compared to 0.29.2 - Changes in 0.29.2 as compared to 0.29.1 - Changes in 0.29.1 as compared to 0.29 - Changes in 0.29 as compared to 0.28.1 - Changes in 0.28.1 as compared to 0.28 - Changes in 0.28 as compared to 0.27.1 - Changes in 0.27.1 as compared to 0.27 Test status =========== | branch | Debian | FreeBSD | macOS | |--------|------------------------------------------------------------------------------------------------------------------------------------------------|--------------------------------------------------------------------------------------------------------------------------------------------------|----------------------------------------------------------------------------------------------------------------------------------------------| | master | [![Debian test status](https://api.cirrus-ci.com/github/bup/bup.svg?branch=master&task=debian)](https://cirrus-ci.com/github/bup/bup) | [![FreeBSD test status](https://api.cirrus-ci.com/github/bup/bup.svg?branch=master&task=freebsd)](https://cirrus-ci.com/github/bup/bup) | [![macOS test status](https://api.cirrus-ci.com/github/bup/bup.svg?branch=master&task=macos)](https://cirrus-ci.com/github/bup/bup) | | 0.30.x | [![Debian test status](https://api.cirrus-ci.com/github/bup/bup.svg?branch=0.30.x&task=debian)](https://cirrus-ci.com/github/bup/bup) | [![FreeBSD test status](https://api.cirrus-ci.com/github/bup/bup.svg?branch=0.30.x&task=freebsd)](https://cirrus-ci.com/github/bup/bup) | [![macOS test status](https://api.cirrus-ci.com/github/bup/bup.svg?branch=0.30.x&task=macos)](https://cirrus-ci.com/github/bup/bup) | | 0.29.x | [![Debian test status](https://api.cirrus-ci.com/github/bup/bup.svg?branch=0.29.x&task=debian)](https://cirrus-ci.com/github/bup/bup) | [![FreeBSD test status](https://api.cirrus-ci.com/github/bup/bup.svg?branch=0.29.x&task=freebsd)](https://cirrus-ci.com/github/bup/bup) | [![macOS test status](https://api.cirrus-ci.com/github/bup/bup.svg?branch=0.29.x&task=macos)](https://cirrus-ci.com/github/bup/bup) | Getting started =============== From source ----------- - Check out the bup source code using git: ```sh git clone https://github.com/bup/bup ``` - This will leave you on the master branch, which is perfect if you would like to help with development, but if you'd just like to use bup, please check out the latest stable release like this: ```sh git checkout 0.32 ``` You can see the latest stable release here: https://github.com/bup/bup/releases. - Install the required python libraries (including the development libraries). On very recent Debian/Ubuntu versions, this may be sufficient (run as root): ```sh apt-get build-dep bup ``` Otherwise try this: ```sh apt-get install python3.7-dev python3-fuse apt-get install python3-pyxattr python3-pytest apt-get install python3-distutils apt-get install pkg-config linux-libc-dev libacl1-dev apt-get install gcc make acl attr rsync apt-get install python3-pytest-xdist # optional (parallel tests) apt-get install par2 # optional (error correction) apt-get install libreadline-dev # optional (bup ftp) apt-get install python3-tornado # optional (bup web) ``` Or, if you can't yet migrate to Python 3 (please try to soon): ```sh apt-get install python2.7-dev python-fuse apt-get install python-pyxattr python-pytest apt-get install pkg-config linux-libc-dev libacl1-dev apt-get install gcc make acl attr rsync apt-get install python-pytest-xdist # optional (parallel tests) apt-get install par2 # optional (error correction) apt-get install libreadline-dev # optional (bup ftp) apt-get install python-tornado # optional (bup web) ``` On CentOS (for CentOS 6, at least), this should be sufficient (run as root): ```sh yum groupinstall "Development Tools" yum install python2 python2-devel libacl-devel pylibacl yum install fuse-python pyxattr yum install perl-Time-HiRes yum install readline-devel # optional (bup ftp) yum install python-tornado # optional (bup web) ``` In addition to the default CentOS repositories, you may need to add RPMForge (for fuse-python) and EPEL (for pyxattr). On Cygwin, install python, make, rsync, and gcc4. If you would like to use the optional bup web server on systems without a tornado package, you may want to try this: ```sh pip install tornado ``` - Build the python module and symlinks: ```sh make ``` - Run the tests: ```sh make long-check ``` or if you're in a bit more of a hurry: ```sh make check ``` If you have the Python xdist module installed, then you can probably run the tests faster by adding the make -j option (see ./HACKING for additional information): ```sh make -j check ``` The tests should pass. If they don't pass for you, stop here and send an email to bup-list@googlegroups.com. Though if there are symbolic links along the current working directory path, the tests may fail. Running something like this before "make test" should sidestep the problem: ```sh cd "$(pwd -P)" ``` - You can install bup via "make install", and override the default destination with DESTDIR and PREFIX. Files are normally installed to "$DESTDIR/$PREFIX" where DESTDIR is empty by default, and PREFIX is set to /usr/local. So if you wanted to install bup to /opt/bup, you might do something like this: ```sh make install DESTDIR=/opt/bup PREFIX='' ``` - The Python executable that bup will use is chosen by ./configure, which will search for a reasonable version unless PYTHON is set in the environment, in which case, bup will use that path. You can see which Python executable was chosen by looking at the configure output, or examining cmd/python-cmd.sh, and you can change the selection by re-running ./configure. From binary packages -------------------- Binary packages of bup are known to be built for the following OSes: - Debian: http://packages.debian.org/search?searchon=names&keywords=bup - Ubuntu: http://packages.ubuntu.com/search?searchon=names&keywords=bup - pkgsrc (NetBSD, Dragonfly, and others) http://pkgsrc.se/sysutils/bup http://cvsweb.netbsd.org/bsdweb.cgi/pkgsrc/sysutils/bup/ - Arch Linux: https://www.archlinux.org/packages/?sort=&q=bup - Fedora: https://apps.fedoraproject.org/packages/bup - macOS (Homebrew): https://formulae.brew.sh/formula/bup Using bup --------- - Get help for any bup command: ```sh bup help bup help init bup help index bup help save bup help restore ... ``` - Initialize the default BUP_DIR (~/.bup -- you can choose another by either specifying `bup -d DIR ...` or setting the `BUP_DIR` environment variable for a command): ```sh bup init ``` - Make a local backup (-v or -vv will increase the verbosity): ```sh bup index /etc bup save -n local-etc /etc ``` - Restore a local backup to ./dest: ```sh bup restore -C ./dest local-etc/latest/etc ls -l dest/etc ``` - Look at how much disk space your backup took: ```sh du -s ~/.bup ``` - Make another backup (which should be mostly identical to the last one; notice that you don't have to *specify* that this backup is incremental, it just saves space automatically): ```sh bup index /etc bup save -n local-etc /etc ``` - Look how little extra space your second backup used (on top of the first): ```sh du -s ~/.bup ``` - Get a list of your previous backups: ```sh bup ls local-etc ``` - Restore your first backup again: ```sh bup restore -C ./dest-2 local-etc/2013-11-23-11195/etc ``` - Make a backup to a remote server which must already have the 'bup' command somewhere in its PATH (see /etc/profile, etc/environment, ~/.profile, or ~/.bashrc), and be accessible via ssh. Make sure to replace SERVERNAME with the actual hostname of your server: ```sh bup init -r SERVERNAME:path/to/remote-bup-dir bup index /etc bup save -r SERVERNAME:path/to/remote-bup-dir -n local-etc /etc ``` - Make a remote backup to ~/.bup on SERVER: ```sh bup index /etc bup save -r SERVER: -n local-etc /etc ``` - See what saves are available in ~/.bup on SERVER: ```sh bup ls -r SERVER: ``` - Restore the remote backup to ./dest: ```sh bup restore -r SERVER: -C ./dest local-etc/latest/etc ls -l dest/etc ``` - Defend your backups from death rays (OK fine, more likely from the occasional bad disk block). This writes parity information (currently via par2) for all of the existing data so that bup may be able to recover from some amount of repository corruption: ```sh bup fsck -g ``` - Use split/join instead of index/save/restore. Try making a local backup using tar: ```sh tar -cvf - /etc | bup split -n local-etc -vv ``` - Try restoring the tarball: ```sh bup join local-etc | tar -tf - ``` - Look at how much disk space your backup took: ```sh du -s ~/.bup ``` - Make another tar backup: ```sh tar -cvf - /etc | bup split -n local-etc -vv ``` - Look at how little extra space your second backup used on top of the first: ```sh du -s ~/.bup ``` - Restore the first tar backup again (the ~1 is git notation for "one older than the most recent"): ```sh bup join local-etc~1 | tar -tf - ``` - Get a list of your previous split-based backups: ```sh GIT_DIR=~/.bup git log local-etc ``` - Save a tar archive to a remote server (without tar -z to facilitate deduplication): ```sh tar -cvf - /etc | bup split -r SERVERNAME: -n local-etc -vv ``` - Restore the archive: ```sh bup join -r SERVERNAME: local-etc | tar -tf - ``` That's all there is to it! Notes on FreeBSD ---------------- - FreeBSD's default 'make' command doesn't like bup's Makefile. In order to compile the code, run tests and install bup, you need to install GNU Make from the port named 'gmake' and use its executable instead in the commands seen above. (i.e. 'gmake test' runs bup's test suite) - Python's development headers are automatically installed with the 'python' port so there's no need to install them separately. - To use the 'bup fuse' command, you need to install the fuse kernel module from the 'fusefs-kmod' port in the 'sysutils' section and the libraries from the port named 'py-fusefs' in the 'devel' section. - The 'par2' command can be found in the port named 'par2cmdline'. - In order to compile the documentation, you need pandoc which can be found in the port named 'hs-pandoc' in the 'textproc' section. Notes on NetBSD/pkgsrc ---------------------- - See pkgsrc/sysutils/bup, which should be the most recent stable release and includes man pages. It also has a reasonable set of dependencies (git, par2, py-fuse-bindings). - The "fuse-python" package referred to is hard to locate, and is a separate tarball for the python language binding distributed by the fuse project on sourceforge. It is available as pkgsrc/filesystems/py-fuse-bindings and on NetBSD 5, "bup fuse" works with it. - "bup fuse" presents every directory/file as inode 0. The directory traversal code ("fts") in NetBSD's libc will interpret this as a cycle and error out, so "ls -R" and "find" will not work. - There is no support for ACLs. If/when some enterprising person fixes this, adjust dev/compare-trees. Notes on Cygwin --------------- - There is no support for ACLs. If/when some enterprising person fixes this, adjust dev/compare-trees. - In test/ext/test-misc, two tests have been disabled. These tests check to see that repeated saves produce identical trees and that an intervening index doesn't change the SHA1. Apparently Cygwin has some unusual behaviors with respect to access times (that probably warrant further investigation). Possibly related: http://cygwin.com/ml/cygwin/2007-06/msg00436.html Notes on OS X ------------- - There is no support for ACLs. If/when some enterprising person fixes this, adjust dev/compare-trees. How it works ============ Basic storage: -------------- bup stores its data in a git-formatted repository. Unfortunately, git itself doesn't actually behave very well for bup's use case (huge numbers of files, files with huge sizes, retaining file permissions/ownership are important), so we mostly don't use git's *code* except for a few helper programs. For example, bup has its own git packfile writer written in python. Basically, 'bup split' reads the data on stdin (or from files specified on the command line), breaks it into chunks using a rolling checksum (similar to rsync), and saves those chunks into a new git packfile. There is at least one git packfile per backup. When deciding whether to write a particular chunk into the new packfile, bup first checks all the other packfiles that exist to see if they already have that chunk. If they do, the chunk is skipped. git packs come in two parts: the pack itself (*.pack) and the index (*.idx). The index is pretty small, and contains a list of all the objects in the pack. Thus, when generating a remote backup, we don't have to have a copy of the packfiles from the remote server: the local end just downloads a copy of the server's *index* files, and compares objects against those when generating the new pack, which it sends directly to the server. The "-n" option to 'bup split' and 'bup save' is the name of the backup you want to create, but it's actually implemented as a git branch. So you can do cute things like checkout a particular branch using git, and receive a bunch of chunk files corresponding to the file you split. If you use '-b' or '-t' or '-c' instead of '-n', bup split will output a list of blobs, a tree containing that list of blobs, or a commit containing that tree, respectively, to stdout. You can use this to construct your own scripts that do something with those values. The bup index: -------------- 'bup index' walks through your filesystem and updates a file (whose name is, by default, ~/.bup/bupindex) to contain the name, attributes, and an optional git SHA1 (blob id) of each file and directory. 'bup save' basically just runs the equivalent of 'bup split' a whole bunch of times, once per file in the index, and assembles a git tree that contains all the resulting objects. Among other things, that makes 'git diff' much more useful (compared to splitting a tarball, which is essentially a big binary blob). However, since bup splits large files into smaller chunks, the resulting tree structure doesn't *exactly* correspond to what git itself would have stored. Also, the tree format used by 'bup save' will probably change in the future to support storing file ownership, more complex file permissions, and so on. If a file has previously been written by 'bup save', then its git blob/tree id is stored in the index. This lets 'bup save' avoid reading that file to produce future incremental backups, which means it can go *very* fast unless a lot of files have changed. Things that are stupid for now but which we'll fix later ======================================================== Help with any of these problems, or others, is very welcome. Join the mailing list (see below) if you'd like to help. - 'bup save' and 'bup restore' have immature metadata support. On the plus side, they actually do have support now, but it's new, and not remotely as well tested as tar/rsync/whatever's. However, you have to start somewhere, and as of 0.25, we think it's ready for more general use. Please let us know if you have any trouble. Also, if any strip or graft-style options are specified to 'bup save', then no metadata will be written for the root directory. That's obviously less than ideal. - bup is overly optimistic about mmap. Right now bup just assumes that it can mmap as large a block as it likes, and that mmap will never fail. Yeah, right... If nothing else, this has failed on 32-bit architectures (and 31-bit is even worse -- looking at you, s390). To fix this, we might just implement a FakeMmap[1] class that uses normal file IO and handles all of the mmap methods[2] that bup actually calls. Then we'd swap in one of those whenever mmap fails. This would also require implementing some of the methods needed to support "[]" array access, probably at a minimum __getitem__, __setitem__, and __setslice__ [3]. [1] http://comments.gmane.org/gmane.comp.sysutils.backup.bup/613 [2] http://docs.python.org/2/library/mmap.html [3] http://docs.python.org/2/reference/datamodel.html#emulating-container-types - 'bup index' is slower than it should be. It's still rather fast: it can iterate through all the filenames on my 600,000 file filesystem in a few seconds. But it still needs to rewrite the entire index file just to add a single filename, which is pretty nasty; it should just leave the new files in a second "extra index" file or something. - bup could use inotify for *really* efficient incremental backups. You could even have your system doing "continuous" backups: whenever a file changes, we immediately send an image of it to the server. We could give the continuous-backup process a really low CPU and I/O priority so you wouldn't even know it was running. - bup only has experimental support for pruning old backups. While you should now be able to drop old saves and branches with `bup rm`, and reclaim the space occupied by data that's no longer needed by other backups with `bup gc`, these commands are experimental, and should be handled with great care. See the man pages for more information. Unless you want to help test the new commands, one possible workaround is to just start a new BUP_DIR occasionally, i.e. bup-2013, bup-2014... - bup has never been tested on anything but Linux, FreeBSD, NetBSD, OS X, and Windows+Cygwin. There's nothing that makes it *inherently* non-portable, though, so that's mostly a matter of someone putting in some effort. (For a "native" Windows port, the most annoying thing is the absence of ssh in a default Windows installation.) - bup needs better documentation. According to an article about bup in Linux Weekly News (https://lwn.net/Articles/380983/), "it's a bit short on examples and a user guide would be nice." Documentation is the sort of thing that will never be great unless someone from outside contributes it (since the developers can never remember which parts are hard to understand). - bup is "relatively speedy" and has "pretty good" compression. ...according to the same LWN article. Clearly neither of those is good enough. We should have awe-inspiring speed and crazy-good compression. Must work on that. Writing more parts in C might help with the speed. - bup has no GUI. Actually, that's not stupid, but you might consider it a limitation. See the ["Related Projects"](https://bup.github.io/) list for some possible options. More Documentation ================== bup has an extensive set of man pages. Try using 'bup help' to get started, or use 'bup help SUBCOMMAND' for any bup subcommand (like split, join, index, save, etc.) to get details on that command. For further technical details, please see ./DESIGN. How you can help ================ bup is a work in progress and there are many ways it can still be improved. If you'd like to contribute patches, ideas, or bug reports, please join the bup mailing list. You can find the mailing list archives here: http://groups.google.com/group/bup-list and you can subscribe by sending a message to: bup-list+subscribe@googlegroups.com Please see ./HACKING for additional information, i.e. how to submit patches (hint - no pull requests), how we handle branches, etc. Have fun, Avery upstream-ontologist-0.3.6/readme_tests/bup/description000064400000000000000000000010261046102023000214030ustar 00000000000000bup is a program that backs things up. It's short for "backup." Can you believe that nobody else has named an open source program "bup" after all this time? Me neither. Despite its unassuming name, bup is pretty cool. To give you an idea of just how cool it is, I wrote you this poem: Bup is teh awesome What rhymes with awesome? I guess maybe possum But that's irrelevant. Hmm. Did that help? Maybe prose is more useful after all. upstream-ontologist-0.3.6/readme_tests/cbor2/README.rst000064400000000000000000000073231046102023000210530ustar 00000000000000.. image:: https://travis-ci.com/agronholm/cbor2.svg?branch=master :target: https://travis-ci.com/agronholm/cbor2 :alt: Build Status .. image:: https://coveralls.io/repos/github/agronholm/cbor2/badge.svg?branch=master :target: https://coveralls.io/github/agronholm/cbor2?branch=master :alt: Code Coverage .. image:: https://readthedocs.org/projects/cbor2/badge/?version=latest :target: https://cbor2.readthedocs.io/en/latest/?badge=latest :alt: Documentation Status About ===== This library provides encoding and decoding for the Concise Binary Object Representation (CBOR) (`RFC 7049`_) serialization format. `Read the docs `_ to learn more. It is implemented in pure python with an optional C backend. On PyPy, cbor2 runs with almost identical performance to the C backend. .. _RFC 7049: https://tools.ietf.org/html/rfc7049 Features -------- * Simple api like ``json`` or ``pickle`` modules. * Support many `CBOR tags`_ with `stdlib objects`_. * Generic tag decoding. * `Shared value`_ references including cyclic references. * Optional C module backend tested on big- and little-endian architectures. * Extensible `tagged value handling`_ using ``tag_hook`` and ``object_hook`` on decode and ``default`` on encode. * Command-line diagnostic tool, converting CBOR file or stream to JSON ``python -m cbor2.tool`` (This is a lossy conversion, for diagnostics only) * Thorough test suite. .. _CBOR tags: https://www.iana.org/assignments/cbor-tags/cbor-tags.xhtml .. _stdlib objects: https://cbor2.readthedocs.io/en/latest/usage.html#tag-support .. _Shared value: http://cbor.schmorp.de/value-sharing .. _tagged value handling: https://cbor2.readthedocs.io/en/latest/customizing.html#using-the-cbor-tags-for-custom-types Installation ============ :: pip install cbor2 Requirements ------------ * Python >= 3.6 (or `PyPy3`_ 3.6+) * C-extension: Any C compiler that can build Python extensions. Any modern libc with the exception of Glibc<2.9 .. _PyPy3: https://www.pypy.org/ Building the C-Extension ------------------------ To force building of the optional C-extension, set OS env ``CBOR2_BUILD_C_EXTENSION=1``. To disable building of the optional C-extension, set OS env ``CBOR2_BUILD_C_EXTENSION=0``. If this environment variable is unset, setup.py will default to auto detecting a compatible C library and attempt to compile the extension. Usage ===== `Basic Usage `_ Command-line Usage ================== ``python -m cbor2.tool`` converts CBOR data in raw binary or base64 encoding into a representation that allows printing as JSON. This is a lossy transformation as each datatype is converted into something that can be represented as a JSON value. Usage:: # Pass hexadecimal through xxd. $ echo a16568656c6c6f65776f726c64 | xxd -r -ps | python -m cbor2.tool --pretty { "hello": "world" } # Decode Base64 directly $ echo ggEC | python -m cbor2.tool --decode [1, 2] # Read from a file encoded in Base64 $ python -m cbor2.tool -d tests/examples.cbor.b64 {...} It can be used in a pipeline with json processing tools like `jq`_ to allow syntax coloring, field extraction and more. CBOR data items concatenated into a sequence can be decoded also:: $ echo ggECggMEggUG | python -m cbor2.tool -d --sequence [1, 2] [3, 4] [5, 6] Multiple files can also be sent to a single output file:: $ python -m cbor2.tool -o all_files.json file1.cbor file2.cbor ... fileN.cbor .. _jq: https://stedolan.github.io/jq/ Security ======== This library has not been tested against malicious input. In theory it should be as safe as JSON, since unlike ``pickle`` the decoder does not execute any code. upstream-ontologist-0.3.6/readme_tests/cbor2/description000064400000000000000000000004001046102023000216170ustar 00000000000000 :target: https://travis-ci.com/agronholm/cbor2 :alt: Build Status :target: https://coveralls.io/github/agronholm/cbor2?branch=master :alt: Code Coverage :target: https://cbor2.readthedocs.io/en/latest/?badge=latest :alt: Documentation Status upstream-ontologist-0.3.6/readme_tests/django-ical/README.rst000064400000000000000000000032631046102023000222130ustar 00000000000000django-ical =========== |pypi| |docs| |build| |coverage| |jazzband| django-ical is a simple library/framework for creating `iCal `_ feeds based in Django's `syndication feed framework `_. This documentation is modeled after the documentation for the syndication feed framework so you can think of it as a simple extension. If you are familiar with the Django syndication feed framework you should be able to be able to use django-ical fairly quickly. It works the same way as the Django syndication framework but adds a few extension properties to support iCalendar feeds. django-ical uses the `icalendar `_ library under the hood to generate iCalendar feeds. Documentation ------------- Documentation is hosted on Read the Docs: https://django-ical.readthedocs.io/en/latest/ .. |pypi| image:: https://img.shields.io/pypi/v/django-ical.svg :alt: PyPI :target: https://pypi.org/project/django-ical/ .. |docs| image:: https://readthedocs.org/projects/django-ical/badge/?version=latest :alt: Documentation Status :scale: 100% :target: http://django-ical.readthedocs.io/en/latest/?badge=latest .. |build| image:: https://github.com/jazzband/django-ical/workflows/Test/badge.svg :target: https://github.com/jazzband/django-ical/actions :alt: GitHub Actions .. |coverage| image:: https://codecov.io/gh/jazzband/django-ical/branch/master/graph/badge.svg :target: https://codecov.io/gh/jazzband/django-ical :alt: Coverage .. |jazzband| image:: https://jazzband.co/static/img/badge.svg :target: https://jazzband.co/ :alt: Jazzband upstream-ontologist-0.3.6/readme_tests/django-ical/description000064400000000000000000000011071046102023000227650ustar 00000000000000django-ical is a simple library/framework for creating iCal feeds based in Django's syndication feed framework. This documentation is modeled after the documentation for the syndication feed framework so you can think of it as a simple extension. If you are familiar with the Django syndication feed framework you should be able to be able to use django-ical fairly quickly. It works the same way as the Django syndication framework but adds a few extension properties to support iCalendar feeds. django-ical uses the icalendar library under the hood to generate iCalendar feeds. upstream-ontologist-0.3.6/readme_tests/dulwich/README.rst000064400000000000000000000056041046102023000215030ustar 00000000000000Dulwich ======= This is the Dulwich project. It aims to provide an interface to git repos (both local and remote) that doesn't call out to git directly but instead uses pure Python. **Main website**: **License**: Apache License, version 2 or GNU General Public License, version 2 or later. The project is named after the part of London that Mr. and Mrs. Git live in in the particular Monty Python sketch. Installation ------------ By default, Dulwich' setup.py will attempt to build and install the optional C extensions. The reason for this is that they significantly improve the performance since some low-level operations that are executed often are much slower in CPython. If you don't want to install the C bindings, specify the --pure argument to setup.py:: $ python setup.py --pure install or if you are installing from pip:: $ pip install dulwich --global-option="--pure" Note that you can also specify --global-option in a `requirements.txt `_ file, e.g. like this:: dulwich --global-option=--pure Getting started --------------- Dulwich comes with both a lower-level API and higher-level plumbing ("porcelain"). For example, to use the lower level API to access the commit message of the last commit:: >>> from dulwich.repo import Repo >>> r = Repo('.') >>> r.head() '57fbe010446356833a6ad1600059d80b1e731e15' >>> c = r[r.head()] >>> c >>> c.message 'Add note about encoding.\n' And to print it using porcelain:: >>> from dulwich import porcelain >>> porcelain.log('.', max_entries=1) -------------------------------------------------- commit: 57fbe010446356833a6ad1600059d80b1e731e15 Author: Jelmer Vernooij Date: Sat Apr 29 2017 23:57:34 +0000 Add note about encoding. Further documentation --------------------- The dulwich documentation can be found in docs/ and built by running ``make doc``. It can also be found `on the web `_. Help ---- There is a *#dulwich* IRC channel on the `Freenode `_, and `dulwich-announce `_ and `dulwich-discuss `_ mailing lists. Contributing ------------ For a full list of contributors, see the git logs or `AUTHORS `_. If you'd like to contribute to Dulwich, see the `CONTRIBUTING `_ file and `list of open issues `_. Supported versions of Python ---------------------------- At the moment, Dulwich supports (and is tested on) CPython 3.5 and later and Pypy. The latest release series to support Python 2.x was the 0.19 series. See the 0.19 branch in the Dulwich git repository. upstream-ontologist-0.3.6/readme_tests/dulwich/description000064400000000000000000000002471046102023000222600ustar 00000000000000This is the Dulwich project. It aims to provide an interface to git repos (both local and remote) that doesn't call out to git directly but instead uses pure Python. upstream-ontologist-0.3.6/readme_tests/empty/README.md000064400000000000000000000000001046102023000207530ustar 00000000000000upstream-ontologist-0.3.6/readme_tests/erbium/README.md000064400000000000000000000010101046102023000211020ustar 00000000000000Erbium ====== Erbium[^0] provides networking services for use on small/home networks. Erbium currently supports both DNS and DHCP, with other protocols hopefully coming soon. Erbium is in early development. * DNS is still in early development, and not ready for use. * DHCP is beta quality. Should be ready for test use. * Router Advertisements are alpha quality. Should be ready for limited testing. [^0]: Erbium is the 68th element in the periodic table, the same as the client port number for DHCP. upstream-ontologist-0.3.6/readme_tests/erbium/description000064400000000000000000000007561046102023000221110ustar 00000000000000Erbium[^0] provides networking services for use on small/home networks. Erbium currently supports both DNS and DHCP, with other protocols hopefully coming soon. Erbium is in early development. * DNS is still in early development, and not ready for use. * DHCP is beta quality. Should be ready for test use. * Router Advertisements are alpha quality. Should be ready for limited testing. [^0]: Erbium is the 68th element in the periodic table, the same as the client port number for DHCP. upstream-ontologist-0.3.6/readme_tests/isso/README.md000064400000000000000000000006071046102023000206070ustar 00000000000000Isso – a commenting server similar to Disqus ============================================ Isso – *Ich schrei sonst* – is a lightweight commenting server written in Python and JavaScript. It aims to be a drop-in replacement for [Disqus](http://disqus.com). ![Isso in Action](http://posativ.org/~tmp/isso-sample.png) See [posativ.org/isso](http://posativ.org/isso/) for more details. upstream-ontologist-0.3.6/readme_tests/isso/description000064400000000000000000000002231046102023000215700ustar 00000000000000Isso – Ich schrei sonst – is a lightweight commenting server written in Python and JavaScript. It aims to be a drop-in replacement for Disqus. upstream-ontologist-0.3.6/readme_tests/jadx/README.md000064400000000000000000000123351046102023000205610ustar 00000000000000## JADX [![Build Status](https://travis-ci.org/skylot/jadx.png?branch=master)](https://travis-ci.org/skylot/jadx) [![Code Coverage](https://codecov.io/gh/skylot/jadx/branch/master/graph/badge.svg)](https://codecov.io/gh/skylot/jadx) [![SonarQube Bugs](https://sonarcloud.io/api/project_badges/measure?project=jadx&metric=bugs)](https://sonarcloud.io/dashboard?id=jadx) [![License](http://img.shields.io/:license-apache-blue.svg)](http://www.apache.org/licenses/LICENSE-2.0.html) [![semantic-release](https://img.shields.io/badge/%20%20%F0%9F%93%A6%F0%9F%9A%80-semantic--release-e10079.svg)](https://github.com/semantic-release/semantic-release) **jadx** - Dex to Java decompiler Command line and GUI tools for produce Java source code from Android Dex and Apk files ![jadx-gui screenshot](https://i.imgur.com/h917IBZ.png) ### Downloads - latest [unstable build: ![Download](https://api.bintray.com/packages/skylot/jadx/unstable/images/download.svg) ](https://bintray.com/skylot/jadx/unstable/_latestVersion#files) - release from [github: ![Latest release](https://img.shields.io/github/release/skylot/jadx.svg)](https://github.com/skylot/jadx/releases/latest) - release from [bintray: ![Download](https://api.bintray.com/packages/skylot/jadx/releases/images/download.svg) ](https://bintray.com/skylot/jadx/releases/_latestVersion#files) After download unpack zip file go to `bin` directory and run: - `jadx` - command line version - `jadx-gui` - graphical version On Windows run `.bat` files with double-click\ **Note:** ensure you have installed Java 8 64-bit version ### Related projects: - [PyJadx](https://github.com/romainthomas/pyjadx) - python binding for jadx by [@romainthomas](https://github.com/romainthomas) ### Building jadx from source JDK 8 or higher must be installed: git clone https://github.com/skylot/jadx.git cd jadx ./gradlew dist (on Windows, use `gradlew.bat` instead of `./gradlew`) Scripts for run jadx will be placed in `build/jadx/bin` and also packed to `build/jadx-.zip` ### macOS You can install using brew: brew install jadx ### Run Run **jadx** on itself: cd build/jadx/ bin/jadx -d out lib/jadx-core-*.jar # or bin/jadx-gui lib/jadx-core-*.jar ### Usage ``` jadx[-gui] [options] (.apk, .dex, .jar, .class, .smali, .zip, .aar, .arsc) options: -d, --output-dir - output directory -ds, --output-dir-src - output directory for sources -dr, --output-dir-res - output directory for resources -j, --threads-count - processing threads count -r, --no-res - do not decode resources -s, --no-src - do not decompile source code --single-class - decompile a single class --output-format - can be 'java' or 'json' (default: java) -e, --export-gradle - save as android gradle project --show-bad-code - show inconsistent code (incorrectly decompiled) --no-imports - disable use of imports, always write entire package name --no-debug-info - disable debug info --no-inline-anonymous - disable anonymous classes inline --no-replace-consts - don't replace constant value with matching constant field --escape-unicode - escape non latin characters in strings (with \u) --respect-bytecode-access-modifiers - don't change original access modifiers --deobf - activate deobfuscation --deobf-min - min length of name, renamed if shorter (default: 3) --deobf-max - max length of name, renamed if longer (default: 64) --deobf-rewrite-cfg - force to save deobfuscation map --deobf-use-sourcename - use source file name as class name alias --rename-flags - what to rename, comma-separated, 'case' for system case sensitivity, 'valid' for java identifiers, 'printable' characters, 'none' or 'all' --fs-case-sensitive - treat filesystem as case sensitive, false by default --cfg - save methods control flow graph to dot file --raw-cfg - save methods control flow graph (use raw instructions) -f, --fallback - make simple dump (using goto instead of 'if', 'for', etc) -v, --verbose - verbose output --version - print jadx version -h, --help - print this help Example: jadx -d out classes.dex jadx --rename-flags "none" classes.dex jadx --rename-flags "valid,printable" classes.dex ``` These options also worked on jadx-gui running from command line and override options from preferences dialog ### Troubleshooting ##### Out of memory error: - Reduce processing threads count (`-j` option) - Increase maximum java heap size: * command line (example for linux): `JAVA_OPTS="-Xmx4G" jadx -j 1 some.apk` * edit 'jadx' script (jadx.bat on Windows) and setup bigger heap size: `DEFAULT_JVM_OPTS="-Xmx2500M"` --------------------------------------- *Licensed under the Apache 2.0 License* *Copyright 2018 by Skylot* upstream-ontologist-0.3.6/readme_tests/jadx/description000064400000000000000000000001271046102023000215440ustar 00000000000000Command line and GUI tools for produce Java source code from Android Dex and Apk files upstream-ontologist-0.3.6/readme_tests/jupyter-client/README.md000064400000000000000000000044341046102023000226120ustar 00000000000000# Jupyter Client [![Build Status](https://github.com/jupyter/jupyter_client/workflows/CI/badge.svg)](https://github.com/jupyter/jupyter_client/actions) [![Code Health](https://landscape.io/github/jupyter/jupyter_client/master/landscape.svg?style=flat)](https://landscape.io/github/jupyter/jupyter_client/master) `jupyter_client` contains the reference implementation of the [Jupyter protocol][]. It also provides client and kernel management APIs for working with kernels. It also provides the `jupyter kernelspec` entrypoint for installing kernelspecs for use with Jupyter frontends. [Jupyter protocol]: https://jupyter-client.readthedocs.io/en/latest/messaging.html # Development Setup The [Jupyter Contributor Guides](http://jupyter.readthedocs.io/en/latest/contributor/content-contributor.html) provide extensive information on contributing code or documentation to Jupyter projects. The limited instructions below for setting up a development environment are for your convenience. ## Coding You'll need Python and `pip` on the search path. Clone the Jupyter Client git repository to your computer, for example in `/my/project/jupyter_client`. Now create an [editable install](https://pip.pypa.io/en/stable/reference/pip_install/#editable-installs) and download the dependencies of code and test suite by executing: cd /my/projects/jupyter_client/ pip install -e .[test] py.test The last command runs the test suite to verify the setup. During development, you can pass filenames to `py.test`, and it will execute only those tests. ## Documentation The documentation of Jupyter Client is generated from the files in `docs/` using Sphinx. Instructions for setting up Sphinx with a selection of optional modules are in the [Documentation Guide](https://jupyter.readthedocs.io/en/latest/contributing/docs-contributions/index.html). You'll also need the `make` command. For a minimal Sphinx installation to process the Jupyter Client docs, execute: pip install ipykernel sphinx sphinx_rtd_theme The following commands build the documentation in HTML format and check for broken links: cd /my/projects/jupyter_client/docs/ make html linkcheck Point your browser to the following URL to access the generated documentation: _file:///my/projects/jupyter\_client/docs/\_build/html/index.html_ upstream-ontologist-0.3.6/readme_tests/jupyter-client/description000064400000000000000000000004121046102023000235710ustar 00000000000000jupyter_client contains the reference implementation of the Jupyter protocol. It also provides client and kernel management APIs for working with kernels. It also provides the jupyter kernelspec entrypoint for installing kernelspecs for use with Jupyter frontends. upstream-ontologist-0.3.6/readme_tests/libtrace/README000064400000000000000000000032431046102023000210170ustar 00000000000000libtrace 4.0.7 --------------------------------------------------------------------------- Copyright (c) 2007-2019 The University of Waikato, Hamilton, New Zealand. All rights reserved. This code has been developed by the University of Waikato WAND research group. For further information please see http://www.wand.net.nz/. --------------------------------------------------------------------------- See INSTALL for instructions on how to install libtrace. This directory contains source code for libtrace, a userspace library for processing of network traffic capture from live interfaces or from offline traces. libtrace was primarily designed for use with the real-time interface to the Waikato DAG Capture Point software running at The University of Waikato, and has been since extended to a range of other trace and interface formats. In version 4.0, we have introduced an API for processing packets in parallel using multiple threads. See libtrace_parallel.h for a detailed description of the API. Further information about libtrace, see http://research.wand.net.nz/software/libtrace.php Bugs should be reported by either emailing contact@wand.net.nz or filing an issue at https://github.com/LibtraceTeam/libtrace It is licensed under the GNU Lesser General Public License (GPL) version 3. Please see the included files COPYING and COPYING.LESSER for details of this license. A detailed ChangeLog can be found on the libtrace wiki: https://github.com/LibtraceTeam/libtrace/wiki/ChangeLog Documentation, usage instructions and a detailed tutorial can also found on the libtrace wiki. For further information, please contact the WAND group. See http://www.wand.net.nz/ for details. upstream-ontologist-0.3.6/readme_tests/libtrace/description000064400000000000000000000010451046102023000224030ustar 00000000000000This directory contains source code for libtrace, a userspace library for processing of network traffic capture from live interfaces or from offline traces. libtrace was primarily designed for use with the real-time interface to the Waikato DAG Capture Point software running at The University of Waikato, and has been since extended to a range of other trace and interface formats. In version 4.0, we have introduced an API for processing packets in parallel using multiple threads. See libtrace_parallel.h for a detailed description of the API. upstream-ontologist-0.3.6/readme_tests/perl-timedate/README000064400000000000000000000014531046102023000217670ustar 00000000000000This is the perl5 TimeDate distribution. It requires perl version 5.003 or later This distribution replaces my earlier GetDate distribution, which was only a date parser. The date parser contained in this distribution is far superior to the yacc based parser, and a *lot* faster. The parser contained here will only parse absolute dates, if you want a date parser that can parse relative dates then take a look at the Time modules by David Muir on CPAN. You install the library by running these commands: perl Makefile.PL make make test make install Please report any bugs/suggestions to Copyright 1995-2009 Graham Barr. This library is free software; you can redistribute it and/or modify it under the same terms as Perl itself. Share and Enjoy! Graham upstream-ontologist-0.3.6/readme_tests/perl-timedate/description000064400000000000000000000007101046102023000233500ustar 00000000000000This is the perl5 TimeDate distribution. It requires perl version 5.003 or later This distribution replaces my earlier GetDate distribution, which was only a date parser. The date parser contained in this distribution is far superior to the yacc based parser, and a *lot* faster. The parser contained here will only parse absolute dates, if you want a date parser that can parse relative dates then take a look at the Time modules by David Muir on CPAN. upstream-ontologist-0.3.6/readme_tests/perl5-xml-compile-cache/README.md000064400000000000000000000042201046102023000241410ustar 00000000000000# distribution XML-Compile-Cache * My extended documentation: * Development via GitHub: * Download from CPAN: * Indexed from CPAN: and The XML-Compile suite is a large set of modules for various XML related standards. This optional component is very useful: it manages compiled handlers and helps you define prefixes. ## Development → Release Important to know, is that I use an extension on POD to write the manuals. The "raw" unprocessed version is visible on GitHub. It will run without problems, but does not contain manual-pages. Releases to CPAN are different: "raw" documentation gets removed from the code and translated into real POD and clean HTML. This reformatting is implemented with the OODoc distribution (A name I chose before OpenOffice existed, sorry for the confusion) Clone from github for the "raw" version. For instance, when you want to contribute a new feature. On github, you can find the processed version for each release. But the better source is CPAN; to get it installed simply run: ```sh cpan -i XML::Compile::Cache ``` ## Contributing When you want to contribute to this module, you do not need to provide a perfect patch... actually: it is nearly impossible to create a patch which I will merge without modification. Usually, I need to adapt the style of code and documentation to my own strict rules. When you submit an extension, please contribute a set with 1. code 2. code documentation 3. regression tests in t/ **Please note:** When you contribute in any way, you agree to transfer the copyrights to Mark Overmeer (you will get the honors in the code and/or ChangeLog). You also automatically agree that your contribution is released under the same license as this project: licensed as perl itself. ## Copyright and License This project is free software; you can redistribute it and/or modify it under the same terms as Perl itself. See upstream-ontologist-0.3.6/readme_tests/perl5-xml-compile-cache/description000064400000000000000000000002701046102023000251310ustar 00000000000000The XML-Compile suite is a large set of modules for various XML related standards. This optional component is very useful: it manages compiled handlers and helps you define prefixes. upstream-ontologist-0.3.6/readme_tests/pylint-flask/README.md000064400000000000000000000041431046102023000222460ustar 00000000000000pylint-flask =============== [![Build Status](https://travis-ci.org/jschaf/pylint-flask.svg?branch=master)](https://travis-ci.org/jschaf/pylint-flask) [![Coverage Status](https://coveralls.io/repos/jschaf/pylint-flask/badge.svg?branch=master)](https://coveralls.io/r/jschaf/pylint-flask?branch=master) [![PyPI](https://img.shields.io/pypi/v/pylint-flask.svg)](https://pypi.python.org/pypi/pylint-flask) [![License](https://img.shields.io/badge/license-GPLv2%20License-blue.svg)](https://www.gnu.org/licenses/old-licenses/gpl-2.0.en.html) ## About `pylint-flask` is [Pylint](http://pylint.org) plugin for improving code analysis when editing code using [Flask](http://flask.pocoo.org/). Inspired by [pylint-django](https://github.com/landscapeio/pylint-django). ### Problems pylint-flask solves: 1. Recognize `flask.ext.*` style imports. Say you have the following code: ```python from flask.ext import wtf from flask.ext.wtf import validators class PostForm(wtf.Form): content = wtf.TextAreaField('Content', validators=[validators.Required()]) ``` Normally, pylint will throw errors like: ``` E: 1,0: No name 'wtf' in module 'flask.ext' E: 2,0: No name 'wtf' in module 'flask.ext' F: 2,0: Unable to import 'flask.ext.wtf' ``` As pylint builds it's own abstract syntax tree, `pylint-flask` will translate the `flask.ext` imports into the actual module name, so pylint can continue checking your code. ## Usage Ensure `pylint-flask` is installed and on your path, and then run pylint using pylint-flask as a plugin. ``` pip install pylint-flask pylint --load-plugins pylint_flask [..your module..] ``` ## Contributing Pull requests are always welcome. Here's an outline of the steps you need to prepare your code. 1. git clone https://github.com/jschaf/pylint-flask.git 2. cd pylint-flask 3. mkvirtualenv pylint-flask 4. pip install -r dev-requirements.txt 5. git checkout -b MY-NEW-FIX 6. Hack away 7. Make sure everything is green by running `tox` 7. git push origin MY-NEW-FIX 8. Create a pull request ## License pylint-flask is available under the GPLv2 license.upstream-ontologist-0.3.6/readme_tests/pylint-flask/description000064400000000000000000000001641046102023000232340ustar 00000000000000pylint-flask is Pylint plugin for improving code analysis when editing code using Flask. Inspired by pylint-django. upstream-ontologist-0.3.6/readme_tests/python-icalendar/README.rst000064400000000000000000000022431046102023000233010ustar 00000000000000========================================================== Internet Calendaring and Scheduling (iCalendar) for Python ========================================================== The `icalendar`_ package is a `RFC 5545`_ compatible parser/generator for iCalendar files. ---- :Homepage: https://icalendar.readthedocs.io :Code: https://github.com/collective/icalendar :Mailing list: https://github.com/collective/icalendar/issues :Dependencies: `python-dateutil`_ and `pytz`_. :Compatible with: Python 2.7 and 3.4+ :License: `BSD`_ ---- .. image:: https://travis-ci.org/collective/icalendar.svg?branch=master :target: https://travis-ci.org/collective/icalendar .. _`icalendar`: https://pypi.org/project/icalendar/ .. _`RFC 5545`: https://www.ietf.org/rfc/rfc5545.txt .. _`python-dateutil`: https://github.com/dateutil/dateutil/ .. _`pytz`: https://pypi.org/project/pytz/ .. _`BSD`: https://github.com/collective/icalendar/issues/2 Related projects ================ * `icalevents `_. It is built on top of icalendar and allows you to query iCal files and get the events happening on specific dates. It manages recurrent events as well. upstream-ontologist-0.3.6/readme_tests/python-icalendar/description000064400000000000000000000001251046102023000240550ustar 00000000000000The icalendar package is a RFC 5545 compatible parser/generator for iCalendar files. upstream-ontologist-0.3.6/readme_tests/python-rsa/README.md000064400000000000000000000036101046102023000217330ustar 00000000000000# Pure Python RSA implementation [![PyPI](https://img.shields.io/pypi/v/rsa.svg)](https://pypi.org/project/rsa/) [![Build Status](https://travis-ci.org/sybrenstuvel/python-rsa.svg?branch=master)](https://travis-ci.org/sybrenstuvel/python-rsa) [![Coverage Status](https://coveralls.io/repos/github/sybrenstuvel/python-rsa/badge.svg?branch=master)](https://coveralls.io/github/sybrenstuvel/python-rsa?branch=master) [![Code Climate](https://api.codeclimate.com/v1/badges/a99a88d28ad37a79dbf6/maintainability)](https://codeclimate.com/github/codeclimate/codeclimate/maintainability) [Python-RSA](https://stuvel.eu/rsa) is a pure-Python RSA implementation. It supports encryption and decryption, signing and verifying signatures, and key generation according to PKCS#1 version 1.5. It can be used as a Python library as well as on the commandline. The code was mostly written by Sybren A. Stüvel. Documentation can be found at the [Python-RSA homepage](https://stuvel.eu/rsa). For all changes, check [the changelog](https://github.com/sybrenstuvel/python-rsa/blob/master/CHANGELOG.md). Download and install using: pip install rsa or download it from the [Python Package Index](https://pypi.org/project/rsa/). The source code is maintained at [GitHub](https://github.com/sybrenstuvel/python-rsa/) and is licensed under the [Apache License, version 2.0](https://www.apache.org/licenses/LICENSE-2.0) ## Security Because of how Python internally stores numbers, it is very hard (if not impossible) to make a pure-Python program secure against timing attacks. This library is no exception, so use it with care. See https://securitypitfalls.wordpress.com/2018/08/03/constant-time-compare-in-python/ for more info. ## Setup of Development Environment ``` python3 -m venv .venv . ./.venv/bin/activate pip install poetry poetry install ``` ## Publishing a New Release ``` . ./.venv/bin/activate poetry publish --build ``` upstream-ontologist-0.3.6/readme_tests/python-rsa/description000064400000000000000000000004421046102023000227220ustar 00000000000000Python-RSA is a pure-Python RSA implementation. It supports encryption and decryption, signing and verifying signatures, and key generation according to PKCS#1 version 1.5. It can be used as a Python library as well as on the commandline. The code was mostly written by Sybren A. Stüvel. upstream-ontologist-0.3.6/readme_tests/ruby-columnize/README.md000064400000000000000000000051541046102023000226200ustar 00000000000000[![Build Status](https://travis-ci.org/rocky/columnize.png)](https://travis-ci.org/rocky/columnize) [![Gem Version](https://badge.fury.io/rb/columnize.svg)](http://badge.fury.io/rb/columnize) Columnize - Format an Array as a Column-aligned String ============================================================================ In showing a long lists, sometimes one would prefer to see the value arranged aligned in columns. Some examples include listing methods of an object, listing debugger commands, or showing a numeric array with data aligned. Setup ----- $ irb >> require 'columnize' => true With numeric data ----------------- >> a = (1..10).to_a => [1, 2, 3, 4, 5, 6, 7, 8, 9, 10] >> a.columnize => "1 2 3 4 5 6 7 8 9 10" >> puts a.columnize :arrange_array => true, :displaywidth => 10 [1, 2, 3, 4, 5, 6, 7, 8, 9, 10] => nil >> puts a.columnize :arrange_array => true, :displaywidth => 20 [1, 2, 3, 4, 5, 6, 7, 8, 9, 10] => nil With String data ---------------- >> g = %w(bibrons golden madascar leopard mourning suras tokay) => ["bibrons", "golden", "madascar", "leopard", "mourning", "suras", "tokay"] >> puts g.columnize :displaywidth => 15 bibrons suras golden tokay madascar leopard mourning => nil >> puts g.columnize :displaywidth => 19, :colsep => ' | ' bibrons | suras golden | tokay madascar leopard mourning => nil >> puts g.columnize :displaywidth => 18, :colsep => ' | ', :ljust => false bibrons | mourning golden | suras madascar | tokay leopard => nil Using Columnize.columnize ------------------------- >> Columnize.columnize(a) => "1 2 3 4 5 6 7 8 9 10" >> puts Columnize.columnize(a, :displaywidth => 10) 1 5 9 2 6 10 3 7 4 8 => nil >> Columnize.columnize(g) => "bibrons golden madascar leopard mourning suras tokay" >> puts Columnize.columnize(g, :displaywidth => 19, :colsep => ' | ') bibrons | mourning golden | suras madascar | tokay leopard => nil Credits ------- This is adapted from a method of the same name from Python's cmd module. Other stuff ----------- Authors: Rocky Bernstein [![endorse](https://api.coderwall.com/rocky/endorsecount.png)](https://coderwall.com/rocky) and [Martin Davis](https://github.com/waslogic) License: Copyright (c) 2011,2013 Rocky Bernstein Warranty -------- You can redistribute it and/or modify it under either the terms of the GPL version 2 or the conditions listed in COPYING upstream-ontologist-0.3.6/readme_tests/ruby-columnize/description000064400000000000000000000003371046102023000236050ustar 00000000000000In showing a long lists, sometimes one would prefer to see the value arranged aligned in columns. Some examples include listing methods of an object, listing debugger commands, or showing a numeric array with data aligned. upstream-ontologist-0.3.6/readme_tests/ruby-sha3/README.md000064400000000000000000000076751046102023000214630ustar 00000000000000# sha3 [![Gem Version](https://badge.fury.io/rb/sha3.svg)](https://badge.fury.io/rb/sha3) [![CI](https://secure.travis-ci.org/johanns/sha3.png)](https://secure.travis-ci.org/johanns/sha3) [![Dependencies](https://gemnasium.com/johanns/sha3.png)](https://gemnasium.com/johanns/sha3) [![CodeClimate](https://codeclimate.com/github/johanns/sha3.png)](https://codeclimate.com/github/johanns/sha3) **SHA3 for Ruby** is a native (C) binding to SHA3 (Keccak FIPS 202) cryptographic hashing algorithm. - Home :: [https://github.com/johanns/sha3#readme]() - Issues :: [https://github.com/johanns/sha3/issues]() - Documentation :: [http://rubydoc.info/gems/sha3/frames]() ## Warnings - Version 1.0+ breaks compatibility with previous versions of this gem. - Do NOT use SHA3 to hash passwords; use either ```bcrypt``` or ```scrypt``` instead! ## Module details **SHA3::Digest**: A standard *Digest* _subclass_. The interface, and operation of this class are parallel to digest classes bundled with MRI-based Rubies (e.g.: **Digest::SHA2**, and **OpenSSL::Digest**). See [documentation for Ruby's **Digest** class for additional details](http://www.ruby-doc.org/stdlib-2.2.3/libdoc/digest/rdoc/Digest.html). ## Installation ```shell gem install sha3 ``` ## Usage ```ruby require 'sha3' ``` Valid hash bit-lengths are: *224*, *256*, *384*, *512*. ```ruby :sha224 :sha256 :sha384 :sha512 # SHA3::Digest.new(224) is SHA3::Digest.new(:sha224) ``` Alternatively, you can instantiate using one of four sub-classes: ```ruby SHA3::Digest::SHA224.new() # 224 bits SHA3::Digest::SHA256.new() # 256 bits SHA3::Digest::SHA384.new() # 384 bits SHA3::Digest::SHA512.new() # 512 bits ``` ### Basics ```ruby # Instantiate a new SHA3::Digest class with 256 bit length s = SHA3::Digest.new(:sha256) # OR # s = SHA3::Digest::SHA256.new() # Update hash state, and compute new value s.update "Compute Me" # << is an .update() alias s << "Me too" # Returns digest value in bytes s.digest # => "\xBE\xDF\r\xD9\xA1..." # Returns digest value as hex string s.hexdigest # => "bedf0dd9a15b647..." ### Digest class-methods: ### SHA3::Digest.hexdigest(:sha224, "Hash me, please") # => "200e7bc18cd613..." SHA3::Digest::SHA384.digest("Hash me, please") # => "\xF5\xCEpC\xB0eV..." ``` ### Hashing a file ```ruby # Compute the hash value for given file, and return the result as hex s = SHA3::Digest::SHA224.file("my_fantastical_file.bin").hexdigest # Calling SHA3::Digest.file(...) defaults to SHA256 s = SHA3::Digest.file("tests.sh") # => # ``` ## Development * Native build tools (e.g., GCC, Minigw, etc.) * Gems: rubygems-tasks, rake, rspec, yard ### Testing + RSpec Call ```rake``` to run the included RSpec tests. Only a small subset of test vectors are included in the source repository; however, the complete test vectors suite is available for download. Simply run the ```tests.sh``` shell script (available in the root of source directory) to generate full byte-length RSpec test files. ```sh tests.sh``` ### Rubies Tested with Rubies: - MRI Ruby-Head - MRI 2.1.0 - MRI 2.0.0 - MRI 1.9.3 - MRI 1.9.2 - MRI 1.8.7 - Rubinius 2 On: - Ubuntu 12.04, 12.10, 13.04, 14.04, 15.04 - Windows 7, 8, 8.1, 10 - Mac OS X 10.6 - 10.11 ## Releases - *1.0.1* :: FIPS 202 compliance (breaks compatibility with earlier releases) - *0.2.6* :: Fixed bug #4 - *0.2.5* :: Bug fixes. (See ChangeLog.rdoc) - *0.2.4* :: Bug fixes. (YANKED) - *0.2.3* :: Added documentation file (decoupled form C source); refactored C source. - *0.2.2* :: Added sub-class for each SHA3 supported bit-lengths (example: SHA3::Digest::SHA256). Minor bug fix. - *0.2.0* :: Production worthy, but breaks API compatibility with 0.1.x. Backward-compatibility will be maintained henceforth. - *0.1.x* :: Alpha code, and not suitable for production. ## TO DO - Add SHAKE128/256 support ## Copyright Copyright (c) 2012 - 2015 Johanns Gregorian (https://github.com/johanns) **See LICENSE.txt for details.** upstream-ontologist-0.3.6/readme_tests/ruby-sha3/description000064400000000000000000000001411046102023000224270ustar 00000000000000SHA3 for Ruby is a native (C) binding to SHA3 (Keccak FIPS 202) cryptographic hashing algorithm. upstream-ontologist-0.3.6/readme_tests/samba/README.md000064400000000000000000000114341046102023000207150ustar 00000000000000About Samba =========== Samba is the standard Windows interoperability suite of programs for Linux and Unix. Samba is Free Software licensed under the GNU General Public License and the Samba project is a member of the Software Freedom Conservancy. Since 1992, Samba has provided secure, stable and fast file and print services for all clients using the SMB/CIFS protocol, such as all versions of DOS and Windows, OS/2, Linux and many others. Samba is an important component to seamlessly integrate Linux/Unix Servers and Desktops into Active Directory environments. It can function both as a domain controller or as a regular domain member. For the AD DC implementation a full HOWTO is provided at: https://wiki.samba.org/index.php/Samba4/HOWTO Community guidelines can be read at: https://wiki.samba.org/index.php/How_to_do_Samba:_Nicely This software is freely distributable under the GNU public license, a copy of which you should have received with this software (in a file called COPYING). CONTRIBUTIONS ============= Please see https://wiki.samba.org/index.php/Contribute for detailed set-by-step instructions on how to submit a patch for Samba via GitLab. Samba's GitLab mirror is at https://gitlab.com/samba-team/samba OUR CONTRIBUTORS ================ See https://www.samba.org/samba/team/ for details of the Samba Team, as well as details of all those currently active in Samba development. If you like a particular feature then look through the git change-log (on the web at https://gitweb.samba.org/?p=samba.git;a=summary) and see who added it, then send them an email. Remember that free software of this kind lives or dies by the response we get. If no one tells us they like it then we'll probably move onto something else. MORE INFO ========= DOCUMENTATION ------------- There is quite a bit of documentation included with the package, including man pages and the wiki at https://wiki.samba.org If you would like to help with our documentation, please contribute that improved content to the wiki, we are moving as much content there as possible. MAILING LIST ------------ Please do NOT send subscription/unsubscription requests to the lists! There is a mailing list for discussion of Samba. For details go to or send mail to There is also an announcement mailing list where new versions are announced. To subscribe go to or send mail to . All announcements also go to the samba list, so you only need to be on one. For details of other Samba mailing lists and for access to archives, see MAILING LIST ETIQUETTE ---------------------- A few tips when submitting to this or any mailing list. 1. Make your subject short and descriptive. Avoid the words "help" or "Samba" in the subject. The readers of this list already know that a) you need help, and b) you are writing about samba (of course, you may need to distinguish between Samba PDC and other file sharing software). Avoid phrases such as "what is" and "how do i". Some good subject lines might look like "Slow response with Excel files" or "Migrating from Samba PDC to NT PDC". 2. If you include the original message in your reply, trim it so that only the relevant lines, enough to establish context, are included. Chances are (since this is a mailing list) we've already read the original message. 3. Trim irrelevant headers from the original message in your reply. All we need to see is a) From, b) Date, and c) Subject. We don't even really need the Subject, if you haven't changed it. Better yet is to just preface the original message with "On [date] [someone] wrote:". 4. Please don't reply to or argue about spam, spam filters or viruses on any Samba lists. We do have a spam filtering system that is working quite well thank you very much but occasionally unwanted messages slip through. Deal with it. 5. Never say "Me too." It doesn't help anyone solve the problem. Instead, if you ARE having the same problem, give more information. Have you seen something that the other writer hasn't mentioned, which may be helpful? 6. If you ask about a problem, then come up with the solution on your own or through another source, by all means post it. Someone else may have the same problem and is waiting for an answer, but never hears of it. 7. Give as much *relevant* information as possible such as Samba release number, OS, kernel version, etc... 8. RTFM. Google. WEBSITE ------- A Samba website has been setup with lots of useful info. Connect to: https://www.samba.org/ As well as general information and documentation, this also has searchable archives of the mailing list and links to other useful resources such as the wiki. upstream-ontologist-0.3.6/readme_tests/samba/description000064400000000000000000000014621046102023000217040ustar 00000000000000Samba is the standard Windows interoperability suite of programs for Linux and Unix. Samba is Free Software licensed under the GNU General Public License and the Samba project is a member of the Software Freedom Conservancy. Since 1992, Samba has provided secure, stable and fast file and print services for all clients using the SMB/CIFS protocol, such as all versions of DOS and Windows, OS/2, Linux and many others. Samba is an important component to seamlessly integrate Linux/Unix Servers and Desktops into Active Directory environments. It can function both as a domain controller or as a regular domain member. For the AD DC implementation a full HOWTO is provided at: https://wiki.samba.org/index.php/Samba4/HOWTO Community guidelines can be read at: https://wiki.samba.org/index.php/How_to_do_Samba:_Nicely upstream-ontologist-0.3.6/readme_tests/saneyaml/README.rst000064400000000000000000000026101046102023000216470ustar 00000000000000======== saneyaml ======== This micro library is a PyYaml wrapper with sane behaviour to read and write readable YAML safely, typically when used with configuration files. With saneyaml you can dump readable and clean YAML and load safely any YAML preserving ordering and avoiding surprises of type conversions by loading everything except booleans as strings. Optionally you can check for duplicated map keys when loading YAML. Works with Python 2 and 3. Requires PyYAML. License: apache-2.0 Homepage_url: https://github.com/nexB/saneyaml Usage:: pip install saneyaml >>> from saneyaml import load as l >>> from saneyaml import dump as d >>> a=l('''version: 3.0.0.dev6 ... ... description: | ... AboutCode Toolkit is a tool to process ABOUT files. An ABOUT file ... provides a way to document a software component. ... ''') >>> a OrderedDict([ (u'version', u'3.0.0.dev6'), (u'description', u'AboutCode Toolkit is a tool to process ABOUT files. ' 'An ABOUT file\nprovides a way to document a software component.\n')]) >>> pprint(a.items()) [(u'version', u'3.0.0.dev6'), (u'description', u'AboutCode Toolkit is a tool to process ABOUT files. An ABOUT file\nprovides a way to document a software component.\n')] >>> print(d(a)) version: 3.0.0.dev6 description: | AboutCode Toolkit is a tool to process ABOUT files. An ABOUT file provides a way to document a software component. upstream-ontologist-0.3.6/readme_tests/saneyaml/description000064400000000000000000000007001046102023000224240ustar 00000000000000This micro library is a PyYaml wrapper with sane behaviour to read and write readable YAML safely, typically when used with configuration files. With saneyaml you can dump readable and clean YAML and load safely any YAML preserving ordering and avoiding surprises of type conversions by loading everything except booleans as strings. Optionally you can check for duplicated map keys when loading YAML. Works with Python 2 and 3. Requires PyYAML. upstream-ontologist-0.3.6/readme_tests/sfcgal/README.md000064400000000000000000000004411046102023000210650ustar 00000000000000SFCGAL ====== SFCGAL is a C++ wrapper library around [CGAL](http://www.cgal.org) with the aim of supporting ISO 191007:2013 and OGC Simple Features for 3D operations. Please refer to the project page for an updated installation procedure. upstream-ontologist-0.3.6/readme_tests/sfcgal/description000064400000000000000000000002031046102023000220500ustar 00000000000000SFCGAL is a C++ wrapper library around CGAL with the aim of supporting ISO 191007:2013 and OGC Simple Features for 3D operations. upstream-ontologist-0.3.6/readme_tests/statuscake/README.md000064400000000000000000000006071046102023000220010ustar 00000000000000# statuscake [![Build Status](https://travis-ci.org/DreamItGetIT/statuscake.svg?branch=master)](https://travis-ci.org/DreamItGetIT/statuscake) `statuscake` is a Go pkg that implements a client for the [statuscake]("https://statuscake.com") API. More documentation and examples at [http://godoc.org/github.com/DreamItGetIT/statuscake](http://godoc.org/github.com/DreamItGetIT/statuscake). upstream-ontologist-0.3.6/readme_tests/statuscake/description000064400000000000000000000001101046102023000227550ustar 00000000000000statuscake is a Go pkg that implements a client for the statuscake API. upstream-ontologist-0.3.6/readme_tests/text-worddif/README.md000064400000000000000000000022451046102023000222520ustar 00000000000000Text/WordDiff version 0.09 ========================== This library's module, Text::WordDiff, is a variation on the lovely [Text::Diff](http://search.cpan.org/perldoc?Text::Diff) module. Rather than generating traditional line-oriented diffs, however, it generates word-oriented diffs. This can be useful for tracking changes in narrative documents or documents with very long lines. To diff source code, one is still best off using Text::Diff. But if you want to see how a short story changed from one version to the next, this module will do the job very nicely. INSTALLATION To install this module, type the following: perl Build.PL ./Build ./Build test ./Build install Or, if you don't have Module::Build installed, type the following: perl Makefile.PL make make test make install Dependencies ------------ Text::WordDiff requires the following modules: * Algorithm::Diff '1.19', * Term::ANSIColor '0', * HTML::Entities '0', Copyright and License --------------------- Copyright (c) 2005-2011 David E. Wheeler. Some Rights Reserved. This module is free software; you can redistribute it and/or modify it under the same terms as Perl itself. upstream-ontologist-0.3.6/readme_tests/text-worddif/description000064400000000000000000000007211046102023000232360ustar 00000000000000This library's module, Text::WordDiff, is a variation on the lovely Text::Diff module. Rather than generating traditional line-oriented diffs, however, it generates word-oriented diffs. This can be useful for tracking changes in narrative documents or documents with very long lines. To diff source code, one is still best off using Text::Diff. But if you want to see how a short story changed from one version to the next, this module will do the job very nicely. upstream-ontologist-0.3.6/readme_tests/wandio/README000064400000000000000000000024441046102023000205150ustar 00000000000000WANDIO 4.2.1 --------------------------------------------------------------------------- Copyright (c) 2007-2019 The University of Waikato, Hamilton, New Zealand. All rights reserved. This code has been developed by the University of Waikato WAND research group. For further information please see http://www.wand.net.nz/. --------------------------------------------------------------------------- See INSTALL for instructions on how to install WANDIO. This directory contains source code for WANDIO, a library for reading from, and writing to, files. Depending on libraries available at compile time, WANDIO provides transparent compression/decompression for the following formats: - zlib (gzip) - bzip2 - lzo (write-only) - lzma - zstd - lz4 - Intel QAT (write-only) - http (read-only) WANDIO also improves IO performance by performing compression/decompression in a separate thread (if pthreads are available). Documentation for WANDIO and its included tools can be found at https://github.com/wanduow/wandio/wiki Bugs should be reported by either emailing contact@wand.net.nz or filing an issue at https://github.com/wanduow/wandio It is licensed under the Lesser GNU General Public License (LGPL) version 3. Please see the included files COPYING and COPYING.LESSER for details of this license. upstream-ontologist-0.3.6/readme_tests/wandio/description000064400000000000000000000007261046102023000221040ustar 00000000000000This directory contains source code for WANDIO, a library for reading from, and writing to, files. Depending on libraries available at compile time, WANDIO provides transparent compression/decompression for the following formats: - zlib (gzip) - bzip2 - lzo (write-only) - lzma - zstd - lz4 - Intel QAT (write-only) - http (read-only) WANDIO also improves IO performance by performing compression/decompression in a separate thread (if pthreads are available). upstream-ontologist-0.3.6/src/bin/autocodemeta.rs000064400000000000000000000110751046102023000202540ustar 00000000000000use clap::Parser; use serde::Serialize; use std::collections::HashSet; use std::io::Write; use std::path::PathBuf; use upstream_ontologist::UpstreamDatum; #[derive(Serialize, Default)] struct SoftwareSourceCode { name: Option, version: Option, #[serde(rename = "codeRepository")] code_repository: Option, #[serde(rename = "issueTracker")] issue_tracker: Option, license: Option, description: Option, // TODO(jelmer): Support setting contIntegration // TODO(jelmer): Support keywords // TODO(jelmer): Support funder // TODO(jelmer): Support funding // TODO(jelmer): Support creation date // TODO(jelmer): Support first release date // TODO(jelmer): Support unique identifier // TODO(jelmer): Support runtime platform // TODO(jelmer): Support other software requirements // TODO(jelmer): Support operating system // TODO(jelmer): Support development status // TODO(jelmer): Support reference publication // TODO(jelmer): Support part of // TODO(jelmer): Support Author #[serde(rename = "downloadUrl")] download_url: Option, #[serde(rename = "relatedLink")] related_link: HashSet, } fn valid_spdx_identifier(name: &str) -> bool { name.chars() .all(|c| c.is_ascii_alphanumeric() || c == '-' || c == '+') } fn codemeta_file_from_upstream_info(data: Vec) -> SoftwareSourceCode { let mut result = SoftwareSourceCode { ..Default::default() }; for upstream_datum in data { match upstream_datum { UpstreamDatum::Name(n) => { result.name = Some(n); } UpstreamDatum::Homepage(h) => { result.related_link.insert(h); } UpstreamDatum::Description(d) => { result.description = Some(d); } UpstreamDatum::Download(d) => { result.download_url = Some(d); } UpstreamDatum::MailingList(ml) => { result.related_link.insert(ml); } UpstreamDatum::BugDatabase(bd) => { result.issue_tracker = Some(bd); } UpstreamDatum::Screenshots(us) => { for u in us { result.related_link.insert(u); } } UpstreamDatum::Wiki(r) => { result.related_link.insert(r); } UpstreamDatum::Repository(r) => { result.code_repository = Some(r); } UpstreamDatum::RepositoryBrowse(r) => { result.related_link.insert(r); } UpstreamDatum::License(l) => { if valid_spdx_identifier(&l) { result.license = Some(format!("https://spdx.org/licenses/{}", l)); } } UpstreamDatum::Version(v) => { result.version = Some(v); } UpstreamDatum::Documentation(a) => { result.related_link.insert(a); } _ => {} } } result } #[derive(Parser, Debug)] #[command(author, version)] struct Args { /// Whether to allow running code from the package #[clap(long)] trust: bool, /// Whether to enable debug logging #[clap(long)] debug: bool, /// Do not probe external services #[clap(long)] disable_net_access: bool, /// Check guesssed metadata against external sources #[clap(long)] check: bool, /// Path to sources #[clap(default_value = ".")] path: PathBuf, /// Consult external directory for metadata #[clap(long)] consult_external_directory: bool, } #[tokio::main] async fn main() { let args = Args::parse(); env_logger::builder() .format(|buf, record| writeln!(buf, "{}", record.args())) .filter( None, if args.debug { log::LevelFilter::Debug } else { log::LevelFilter::Info }, ) .init(); let path = args.path.canonicalize().unwrap(); let upstream_info = upstream_ontologist::get_upstream_info( path.as_path(), Some(args.trust), Some(!args.disable_net_access), Some(args.consult_external_directory), Some(args.check), ) .await .unwrap(); let codemeta = codemeta_file_from_upstream_info(upstream_info.into()); std::io::stdout() .write_all(serde_json::to_string_pretty(&codemeta).unwrap().as_bytes()) .unwrap(); } upstream-ontologist-0.3.6/src/bin/autodoap.rs000064400000000000000000000223671046102023000174240ustar 00000000000000use clap::Parser; use maplit::hashmap; use std::io::Write; use std::path::PathBuf; use upstream_ontologist::UpstreamDatum; use xmltree::{Element, Namespace, XMLNode}; const DOAP_NS: &str = "http://usefulinc.com/ns/doap"; const RDF_NS: &str = "http://www.w3.org/1999/02/22-rdf-syntax-ns"; const FOAF_NS: &str = "http://xmlns.com/foaf/0.1/"; fn rdf_resource(namespace: &Namespace, url: String) -> XMLNode { XMLNode::Element(Element { prefix: Some("rdf".to_string()), namespaces: Some(namespace.clone()), namespace: Some(RDF_NS.to_string()), name: "resource".to_string(), attributes: hashmap! {"rdf:resource".to_string() => url}, children: vec![], }) } fn doap_file_from_upstream_info(data: Vec) -> Element { let mut namespace = Namespace::empty(); namespace.put("doap", DOAP_NS); namespace.put("rdf", RDF_NS); namespace.put("foaf", FOAF_NS); let mut repository = None; let mut repository_browse = None; let mut children = vec![]; for upstream_datum in data { match upstream_datum { UpstreamDatum::Name(n) => { children.push(XMLNode::Element(Element { prefix: Some("doap".to_string()), namespaces: Some(namespace.clone()), namespace: Some(DOAP_NS.to_string()), name: "name".to_string(), attributes: hashmap! {}, children: vec![XMLNode::Text(n)], })); } UpstreamDatum::Homepage(h) => { children.push(XMLNode::Element(Element { prefix: Some("doap".to_string()), namespaces: Some(namespace.clone()), namespace: Some(DOAP_NS.to_string()), name: "homepage".to_string(), attributes: hashmap! {}, children: vec![rdf_resource(&namespace, h)], })); } UpstreamDatum::Summary(s) => { children.push(XMLNode::Element(Element { prefix: Some("doap".to_string()), namespaces: Some(namespace.clone()), namespace: Some(DOAP_NS.to_string()), name: "shortdesc".to_string(), attributes: hashmap! {}, children: vec![XMLNode::Text(s)], })); } UpstreamDatum::Description(d) => { children.push(XMLNode::Element(Element { prefix: Some("doap".to_string()), namespaces: Some(namespace.clone()), namespace: Some(DOAP_NS.to_string()), name: "description".to_string(), attributes: hashmap! {}, children: vec![XMLNode::Text(d)], })); } UpstreamDatum::Download(d) => { children.push(XMLNode::Element(Element { prefix: Some("doap".to_string()), namespaces: Some(namespace.clone()), namespace: Some(DOAP_NS.to_string()), name: "download-page".to_string(), attributes: hashmap! {}, children: vec![rdf_resource(&namespace, d)], })); } UpstreamDatum::MailingList(ml) => { children.push(XMLNode::Element(Element { prefix: Some("doap".to_string()), namespaces: Some(namespace.clone()), namespace: Some(DOAP_NS.to_string()), name: "mailing-list".to_string(), attributes: hashmap! {}, children: vec![rdf_resource(&namespace, ml)], })); } UpstreamDatum::BugDatabase(bd) => { children.push(XMLNode::Element(Element { prefix: Some("doap".to_string()), namespaces: Some(namespace.clone()), namespace: Some(DOAP_NS.to_string()), name: "bug-database".to_string(), attributes: hashmap! {}, children: vec![rdf_resource(&namespace, bd)], })); } UpstreamDatum::Screenshots(us) => { for u in us { children.push(XMLNode::Element(Element { prefix: Some("doap".to_string()), namespaces: Some(namespace.clone()), namespace: Some(DOAP_NS.to_string()), name: "screenshots".to_string(), attributes: hashmap! {}, children: vec![rdf_resource(&namespace, u)], })); } } UpstreamDatum::SecurityContact(sc) => { children.push(XMLNode::Element(Element { prefix: Some("doap".to_string()), namespaces: Some(namespace.clone()), namespace: Some(DOAP_NS.to_string()), name: "security-contact".to_string(), attributes: hashmap! {}, children: vec![rdf_resource(&namespace, sc)], })); } UpstreamDatum::Wiki(r) => { children.push(XMLNode::Element(Element { prefix: Some("doap".to_string()), namespaces: Some(namespace.clone()), namespace: Some(DOAP_NS.to_string()), name: "wiki".to_string(), attributes: hashmap! {}, children: vec![rdf_resource(&namespace, r)], })); } UpstreamDatum::Repository(r) => { repository = Some(r); } UpstreamDatum::RepositoryBrowse(r) => { repository_browse = Some(r); } _ => {} } } if repository.is_some() || repository_browse.is_some() { let mut git_repo_el = Element { prefix: Some("doap".to_string()), namespaces: Some(namespace.clone()), namespace: Some(DOAP_NS.to_string()), name: "GitRepository".to_string(), attributes: hashmap! {}, children: vec![], }; if let Some(r) = repository { git_repo_el.children.push(XMLNode::Element(Element { prefix: Some("doap".to_string()), namespaces: Some(namespace.clone()), namespace: Some(DOAP_NS.to_string()), name: "location".to_string(), attributes: hashmap! {}, children: vec![rdf_resource(&namespace, r)], })); } if let Some(b) = repository_browse { git_repo_el.children.push(XMLNode::Element(Element { prefix: Some("doap".to_string()), namespaces: Some(namespace.clone()), namespace: Some(DOAP_NS.to_string()), name: "browse".to_string(), attributes: hashmap! {}, children: vec![rdf_resource(&namespace, b)], })); } children.push(XMLNode::Element(Element { prefix: Some("doap".to_string()), namespaces: Some(namespace.clone()), namespace: Some(DOAP_NS.to_string()), name: "repository".to_string(), attributes: hashmap! {}, children: vec![XMLNode::Element(git_repo_el)], })); } Element { prefix: Some("doap".to_string()), namespaces: Some(namespace), namespace: Some(DOAP_NS.to_string()), name: "Project".to_string(), attributes: hashmap! {}, children, } } #[derive(Parser, Debug)] #[command(author, version)] struct Args { /// Whether to allow running code from the package #[clap(long)] trust: bool, /// Whether to enable debug logging #[clap(long)] debug: bool, /// Do not probe external services #[clap(long)] disable_net_access: bool, /// Check guesssed metadata against external sources #[clap(long)] check: bool, /// Path to sources #[clap(default_value = ".")] path: PathBuf, /// Consult external directory #[clap(long)] consult_external_directory: bool, } #[tokio::main] async fn main() { let args = Args::parse(); env_logger::builder() .format(|buf, record| writeln!(buf, "{}", record.args())) .filter( None, if args.debug { log::LevelFilter::Debug } else { log::LevelFilter::Info }, ) .init(); let path = args.path.canonicalize().unwrap(); let upstream_info = upstream_ontologist::get_upstream_info( path.as_path(), Some(args.trust), Some(!args.disable_net_access), Some(args.consult_external_directory), Some(args.check), ) .await .unwrap(); let el = doap_file_from_upstream_info(upstream_info.into()); use xmltree::EmitterConfig; let config = EmitterConfig::new() .perform_indent(true) .normalize_empty_elements(true); el.write_with_config(&mut std::io::stdout(), config) .unwrap(); } upstream-ontologist-0.3.6/src/bin/guess-upstream-metadata.rs000064400000000000000000000110711046102023000223400ustar 00000000000000use clap::Parser; use std::io::Write; use futures::stream::StreamExt; use std::path::PathBuf; #[derive(Parser, Debug)] #[command(author, version)] struct Args { /// Whether to allow running code from the package #[clap(long)] trust: bool, /// Whether to enable debug logging #[clap(long)] debug: bool, /// Whether to enable trace logging #[clap(long)] trace: bool, /// Do not probe external services #[clap(long)] disable_net_access: bool, /// Check guesssed metadata against external sources #[clap(long)] check: bool, /// Path to sources #[clap(default_value = ".")] path: PathBuf, /// Scan for metadata rather than printing results #[clap(long)] scan: bool, /// Scan specified homepage rather than current directory #[clap(long)] from_homepage: Option, /// Find data based on specified repology id #[clap(long)] from_repology: Option, /// Pull in external (not maintained by upstream) directory data #[clap(long)] consult_external_directory: bool, } #[tokio::main] async fn main() { let args = Args::parse(); env_logger::builder() .format(|buf, record| writeln!(buf, "{}", record.args())) .filter( None, if args.trace { log::LevelFilter::Trace } else if args.debug { log::LevelFilter::Debug } else { log::LevelFilter::Info }, ) .init(); if let Some(from_homepage) = args.from_homepage { for d in upstream_ontologist::homepage::guess_from_homepage(&from_homepage) .await .unwrap() { println!( "{}: {:?} - certainty {} (from {:?})", d.datum.field(), d.datum, d.certainty .map_or_else(|| "unknown".to_string(), |d| d.to_string()), d.origin ); } } else if let Some(id) = args.from_repology { for d in upstream_ontologist::repology::find_upstream_from_repology(&id) .await .unwrap() { println!( "{}: {:?} - certainty {} (from {:?})", d.datum.field(), d.datum, d.certainty .map_or_else(|| "unknown".to_string(), |d| d.to_string()), d.origin ); } } else if args.scan { let stream = upstream_ontologist::upstream_metadata_stream( &args.path.canonicalize().unwrap(), Some(args.trust), ); tokio::pin!(stream); while let Some(entry) = stream.next().await { let entry = entry.unwrap(); println!( "{}: {:?} - certainty {}{}", entry.datum.field(), entry.datum, entry .certainty .map_or("unknown".to_string(), |c| c.to_string()), entry .origin .map_or_else(|| "".to_string(), |o| format!(" (from {:?})", o)) ); } } else { let metadata = match upstream_ontologist::guess_upstream_metadata( &args.path.canonicalize().unwrap(), Some(args.trust), Some(!args.disable_net_access), Some(args.consult_external_directory), Some(args.check), ) .await { Ok(m) => m, Err(upstream_ontologist::ProviderError::ParseError(e)) => { eprintln!("Error parsing metadata: {}", e); std::process::exit(1); } Err(upstream_ontologist::ProviderError::IoError(e)) => { eprintln!("I/O Error: {}", e); std::process::exit(1); } Err(upstream_ontologist::ProviderError::Other(e)) => { eprintln!("Error: {}", e); std::process::exit(1); } Err(upstream_ontologist::ProviderError::HttpJsonError(e)) => { eprintln!("Error: {}", e); std::process::exit(1); } Err(upstream_ontologist::ProviderError::ExtrapolationLimitExceeded(l)) => { eprintln!("Extraoplation limit exceeded: {}", l); std::process::exit(1); } }; let out = serde_yaml::to_value(&metadata).unwrap(); std::io::stdout() .write_all(serde_yaml::to_string(&out).unwrap().as_bytes()) .unwrap(); } } upstream-ontologist-0.3.6/src/extrapolate.rs000064400000000000000000000471161046102023000173670ustar 00000000000000use crate::{Certainty, UpstreamDatum, UpstreamDatumWithMetadata}; use crate::{ProviderError, UpstreamMetadata}; use log::warn; const DEFAULT_ITERATION_LIMIT: usize = 10; type ExtrapolationCallback = fn( UpstreamMetadata, bool, ) -> std::pin::Pin< Box< dyn std::future::Future, ProviderError>> + Send, >, >; struct Extrapolation { from_fields: &'static [&'static str], to_fields: &'static [&'static str], cb: ExtrapolationCallback, } async fn extrapolate_repository_from_homepage( upstream_metadata: &UpstreamMetadata, net_access: bool, ) -> Result, ProviderError> { let mut ret = vec![]; let homepage = upstream_metadata.get("Homepage").unwrap(); let url = match homepage.datum.to_url() { Some(url) => url, None => { return { warn!("Homepage field is not a URL"); Ok(vec![]) } } }; if let Some(repo) = crate::vcs::guess_repo_from_url(&url, Some(net_access)).await { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(repo), certainty: Some( std::cmp::min(homepage.certainty, Some(Certainty::Likely)) .unwrap_or(Certainty::Likely), ), origin: homepage.origin.clone(), }); } Ok(ret) } async fn extrapolate_homepage_from_repository_browse( upstream_metadata: &UpstreamMetadata, net_access: bool, ) -> Result, ProviderError> { let mut ret = vec![]; let browse_url = upstream_metadata.get("Repository-Browse").unwrap(); let url = match browse_url.datum.to_url() { Some(url) => url, None => { return { warn!("Repository-Browse field is not a URL"); Ok(vec![]) } } }; // Some hosting sites are commonly used as Homepage // TODO(jelmer): Maybe check that there is a README file that // can serve as index? let forge = crate::find_forge(&url, Some(net_access)).await; if forge.is_some() && forge.unwrap().repository_browse_can_be_homepage() { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Homepage(browse_url.datum.as_str().unwrap().to_string()), certainty: Some( std::cmp::min(browse_url.certainty, Some(Certainty::Possible)) .unwrap_or(Certainty::Possible), ), origin: browse_url.origin.clone(), }); } Ok(ret) } async fn copy_bug_db_field( upstream_metadata: &UpstreamMetadata, _net_access: bool, ) -> Result, ProviderError> { let mut ret = vec![]; let old_bug_db = upstream_metadata.get("Bugs-Database").unwrap(); ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::BugDatabase(old_bug_db.datum.as_str().unwrap().to_string()), certainty: old_bug_db.certainty, origin: old_bug_db.origin.clone(), }); Ok(ret) } async fn extrapolate_repository_from_bug_db( upstream_metadata: &UpstreamMetadata, net_access: bool, ) -> Result, ProviderError> { let old_value = upstream_metadata.get("Bug-Database").unwrap(); let url = match old_value.datum.to_url() { Some(url) => url, None => { return { warn!("Bug-Database field is not a URL"); Ok(vec![]) } } }; let repo = crate::vcs::guess_repo_from_url(&url, Some(net_access)).await; Ok(if let Some(repo) = repo { vec![UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(repo), certainty: Some( std::cmp::min(old_value.certainty, Some(Certainty::Likely)) .unwrap_or(Certainty::Likely), ), origin: old_value.origin.clone(), }] } else { vec![] }) } async fn extrapolate_repository_browse_from_repository( upstream_metadata: &UpstreamMetadata, net_access: bool, ) -> Result, ProviderError> { let old_value = upstream_metadata.get("Repository").unwrap(); let url = match old_value.datum.to_url() { Some(url) => url, None => { return { warn!("Repository field is not a URL"); Ok(vec![]) } } }; let browse_url = crate::vcs::browse_url_from_repo_url( &crate::vcs::VcsLocation { url, branch: None, subpath: None, }, Some(net_access), ) .await; Ok(if let Some(browse_url) = browse_url { vec![UpstreamDatumWithMetadata { datum: UpstreamDatum::RepositoryBrowse(browse_url.to_string()), certainty: old_value.certainty, origin: old_value.origin.clone(), }] } else { vec![] }) } async fn extrapolate_repository_from_repository_browse( upstream_metadata: &UpstreamMetadata, net_access: bool, ) -> Result, ProviderError> { let old_value = upstream_metadata.get("Repository-Browse").unwrap(); let url = match old_value.datum.to_url() { Some(url) => url, None => { return { warn!("Repository-Browse field is not a URL"); Ok(vec![]) } } }; let repo = crate::vcs::guess_repo_from_url(&url, Some(net_access)).await; Ok(if let Some(repo) = repo { vec![UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(repo), certainty: old_value.certainty, origin: old_value.origin.clone(), }] } else { vec![] }) } async fn extrapolate_bug_database_from_repository( upstream_metadata: &UpstreamMetadata, net_access: bool, ) -> Result, ProviderError> { let old_value = upstream_metadata.get("Repository").unwrap(); let url = match old_value.datum.to_url() { Some(url) => url, None => { return { warn!("Repository field is not a URL"); Ok(vec![]) } } }; Ok( if let Some(bug_db_url) = crate::guess_bug_database_url_from_repo_url(&url, Some(net_access)).await { vec![UpstreamDatumWithMetadata { datum: UpstreamDatum::BugDatabase(bug_db_url.to_string()), certainty: Some( std::cmp::min(old_value.certainty, Some(Certainty::Likely)) .unwrap_or(Certainty::Likely), ), origin: old_value.origin.clone(), }] } else { vec![] }, ) } async fn extrapolate_bug_submit_from_bug_db( upstream_metadata: &UpstreamMetadata, net_access: bool, ) -> Result, ProviderError> { let old_value = upstream_metadata.get("Bug-Database").unwrap(); let url = match old_value.datum.to_url() { Some(url) => url, None => { return { warn!("Bug-Database field is not a URL"); Ok(vec![]) } } }; let bug_submit_url = crate::bug_submit_url_from_bug_database_url(&url, Some(net_access)).await; Ok(if let Some(bug_submit_url) = bug_submit_url { vec![UpstreamDatumWithMetadata { datum: UpstreamDatum::BugSubmit(bug_submit_url.to_string()), certainty: old_value.certainty, origin: old_value.origin.clone(), }] } else { vec![] }) } async fn extrapolate_bug_db_from_bug_submit( upstream_metadata: &UpstreamMetadata, net_access: bool, ) -> Result, ProviderError> { let old_value = upstream_metadata.get("Bug-Submit").unwrap(); let old_value_url = match old_value.datum.to_url() { Some(url) => url, None => return Ok(vec![]), }; let bug_db_url = crate::bug_database_url_from_bug_submit_url(&old_value_url, Some(net_access)).await; Ok(if let Some(bug_db_url) = bug_db_url { vec![UpstreamDatumWithMetadata { datum: UpstreamDatum::BugDatabase(bug_db_url.to_string()), certainty: old_value.certainty, origin: old_value.origin.clone(), }] } else { vec![] }) } async fn extrapolate_repository_from_download( upstream_metadata: &UpstreamMetadata, net_access: bool, ) -> Result, ProviderError> { let old_value = upstream_metadata.get("Download").unwrap(); let url = match old_value.datum.to_url() { Some(url) => url, None => { return { warn!("Download field is not a URL"); Ok(vec![]) } } }; let repo = crate::vcs::guess_repo_from_url(&url, Some(net_access)).await; Ok(if let Some(repo) = repo { vec![UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(repo), certainty: Some( std::cmp::min(old_value.certainty, Some(Certainty::Likely)) .unwrap_or(Certainty::Likely), ), origin: old_value.origin.clone(), }] } else { vec![] }) } async fn extrapolate_name_from_repository( upstream_metadata: &UpstreamMetadata, net_access: bool, ) -> Result, ProviderError> { let mut ret = vec![]; let old_value = upstream_metadata.get("Repository").unwrap(); let url = match old_value.datum.to_url() { Some(url) => url, None => { return { warn!("Repository field is not a URL"); Ok(vec![]) } } }; let repo = crate::vcs::guess_repo_from_url(&url, Some(net_access)).await; if let Some(repo) = repo { let parsed: url::Url = repo.parse().unwrap(); let name = parsed.path_segments().unwrap().next_back().unwrap(); let name = name.strip_suffix(".git").unwrap_or(name); if !name.is_empty() { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(name.to_string()), certainty: Some( std::cmp::min(old_value.certainty, Some(Certainty::Likely)) .unwrap_or(Certainty::Likely), ), origin: old_value.origin.clone(), }); } } Ok(ret) } async fn extrapolate_security_contact_from_security_md( upstream_metadata: &UpstreamMetadata, net_access: bool, ) -> Result, ProviderError> { let repository_url = upstream_metadata.get("Repository").unwrap(); let security_md_path = upstream_metadata.get("Security-MD").unwrap(); let url = match repository_url.datum.to_url() { Some(url) => url, None => { return { warn!("Repository field is not a URL"); Ok(vec![]) } } }; let security_url = crate::vcs::browse_url_from_repo_url( &crate::vcs::VcsLocation { url, branch: None, subpath: security_md_path.datum.as_str().map(|x| x.to_string()), }, Some(net_access), ) .await; Ok(if let Some(security_url) = security_url { vec![UpstreamDatumWithMetadata { datum: UpstreamDatum::SecurityContact(security_url.to_string()), certainty: std::cmp::min(repository_url.certainty, security_md_path.certainty), origin: repository_url.origin.clone(), }] } else { vec![] }) } async fn extrapolate_contact_from_maintainer( upstream_metadata: &UpstreamMetadata, _net_access: bool, ) -> Result, ProviderError> { let maintainer = upstream_metadata.get("Maintainer").unwrap(); Ok(vec![UpstreamDatumWithMetadata { datum: UpstreamDatum::Contact(maintainer.datum.as_person().unwrap().to_string()), certainty: maintainer.certainty, origin: maintainer.origin.clone(), }]) } async fn consult_homepage( upstream_metadata: &UpstreamMetadata, net_access: bool, ) -> Result, ProviderError> { if !net_access { return Ok(vec![]); } let homepage = upstream_metadata.get("Homepage").unwrap(); let url = match homepage.datum.to_url() { Some(url) => url, None => { return { warn!("Homepage field is not a URL"); Ok(vec![]) } } }; let mut ret = vec![]; for mut entry in crate::homepage::guess_from_homepage(&url).await? { entry.certainty = std::cmp::min(homepage.certainty, entry.certainty); ret.push(entry); } Ok(ret) } const EXTRAPOLATIONS: &[Extrapolation] = &[ Extrapolation { from_fields: &["Homepage"], to_fields: &["Repository"], cb: |us, na| Box::pin(async move { extrapolate_repository_from_homepage(&us, na).await }), }, Extrapolation { from_fields: &["Repository-Browse"], to_fields: &["Homepage"], cb: |us, na| { Box::pin(async move { extrapolate_homepage_from_repository_browse(&us, na).await }) }, }, Extrapolation { from_fields: &["Bugs-Database"], to_fields: &["Bug-Database"], cb: |us, na| Box::pin(async move { copy_bug_db_field(&us, na).await }), }, Extrapolation { from_fields: &["Bug-Database"], to_fields: &["Repository"], cb: |us, na| Box::pin(async move { extrapolate_repository_from_bug_db(&us, na).await }), }, Extrapolation { from_fields: &["Repository"], to_fields: &["Repository-Browse"], cb: |us, na| { Box::pin(async move { extrapolate_repository_browse_from_repository(&us, na).await }) }, }, Extrapolation { from_fields: &["Repository-Browse"], to_fields: &["Repository"], cb: |us, na| { Box::pin(async move { extrapolate_repository_from_repository_browse(&us, na).await }) }, }, Extrapolation { from_fields: &["Repository"], to_fields: &["Bug-Database"], cb: |us, na| { Box::pin(async move { extrapolate_bug_database_from_repository(&us, na).await }) }, }, Extrapolation { from_fields: &["Bug-Database"], to_fields: &["Bug-Submit"], cb: |us, na| Box::pin(async move { extrapolate_bug_submit_from_bug_db(&us, na).await }), }, Extrapolation { from_fields: &["Bug-Submit"], to_fields: &["Bug-Database"], cb: |us, na| Box::pin(async move { extrapolate_bug_db_from_bug_submit(&us, na).await }), }, Extrapolation { from_fields: &["Download"], to_fields: &["Repository"], cb: |us, na| Box::pin(async move { extrapolate_repository_from_download(&us, na).await }), }, Extrapolation { from_fields: &["Repository"], to_fields: &["Name"], cb: |us, na| Box::pin(async move { extrapolate_name_from_repository(&us, na).await }), }, Extrapolation { from_fields: &["Repository", "Security-MD"], to_fields: &["Security-Contact"], cb: |us, na| { Box::pin(async move { extrapolate_security_contact_from_security_md(&us, na).await }) }, }, Extrapolation { from_fields: &["Maintainer"], to_fields: &["Contact"], cb: |us, na| Box::pin(async move { extrapolate_contact_from_maintainer(&us, na).await }), }, Extrapolation { from_fields: &["Homepage"], to_fields: &["Bug-Database", "Repository"], cb: |us, na| Box::pin(async move { consult_homepage(&us, na).await }), }, ]; /// Extrapolates missing fields from existing upstream metadata pub async fn extrapolate_fields( upstream_metadata: &mut UpstreamMetadata, net_access: bool, iteration_limit: Option, ) -> Result<(), ProviderError> { let iteration_limit = iteration_limit.unwrap_or(DEFAULT_ITERATION_LIMIT); let mut changed = true; let mut iterations = 0; while changed { changed = false; iterations += 1; if iterations > iteration_limit { return Err(ProviderError::ExtrapolationLimitExceeded(iteration_limit)); } for extrapolation in EXTRAPOLATIONS { let from_fields = extrapolation.from_fields; let to_fields = extrapolation.to_fields; let cb = extrapolation.cb; let from_values = from_fields .iter() .map(|f| upstream_metadata.get(f)) .collect::>(); if !from_values.iter().all(|v| v.is_some()) { log::trace!( "Not enough values for extrapolation from {:?} to {:?}", from_fields, to_fields ); continue; } let from_values = from_values .iter() .map(|v| v.unwrap().clone()) .collect::>(); let from_certainties = from_fields .iter() .map(|f| upstream_metadata.get(f).unwrap().certainty) .collect::>(); let from_certainty = *from_certainties.iter().min().unwrap(); let old_to_values: std::collections::HashMap<_, _> = to_fields .iter() .filter_map(|f| upstream_metadata.get(f).map(|v| (f, v.clone()))) .collect(); assert!(old_to_values.values().all(|v| v.certainty.is_some())); // If any of the to_fields already exist in old_to_values with a better or same // certainty, then we don't need to extrapolate. if to_fields.iter().all(|f| { old_to_values .get(f) .map(|v| v.certainty >= from_certainty) .unwrap_or(false) }) { log::trace!( "Not extrapolating from {:?} to {:?} because of certainty ({:?} >= {:?})", from_fields, to_fields, old_to_values .values() .map(|v| v.certainty) .collect::>(), from_certainty ); continue; } let extra_upstream_metadata = cb(upstream_metadata.clone(), net_access).await?; let changes = upstream_metadata.update(extra_upstream_metadata.into_iter()); if !changes.is_empty() { log::debug!( "Extrapolating ({:?} ⇒ {:?}) from ({:?})", old_to_values .iter() .map(|(k, v)| format!("{}: {}", k, v.datum)) .collect::>(), changes .iter() .map(|d| format!("{}: {}", d.datum.field(), d.datum)) .collect::>(), from_values .iter() .map(|v| format!( "{}: {} ({})", v.datum.field(), v.datum, v.certainty .map_or_else(|| "unknown".to_string(), |c| c.to_string()) )) .collect::>() ); changed = true; } } } Ok(()) } upstream-ontologist-0.3.6/src/forges/mod.rs000064400000000000000000000000721046102023000170710ustar 00000000000000/// SourceForge forge implementation pub mod sourceforge; upstream-ontologist-0.3.6/src/forges/sourceforge.rs000064400000000000000000000212771046102023000206470ustar 00000000000000use crate::check_bug_database_canonical; use crate::UpstreamDatum; use crate::{load_json_url, HTTPJSONError}; use lazy_regex::regex; use log::{debug, error, warn}; use reqwest::Url; async fn get_sf_metadata(project: &str) -> Option { let url = format!("https://sourceforge.net/rest/p/{}", project); match load_json_url(&Url::parse(url.as_str()).unwrap(), None).await { Ok(data) => Some(data), Err(HTTPJSONError::Error { status, .. }) if status == reqwest::StatusCode::NOT_FOUND => { None } r => panic!("Unexpected result from {}: {:?}", url, r), } } async fn parse_sf_json( data: serde_json::Value, project: &str, subproject: Option<&str>, ) -> Vec { let mut results = Vec::new(); if let Some(name) = data.get("name").and_then(|name| name.as_str()) { results.push(UpstreamDatum::Name(name.to_string())); } if let Some(external_homepage) = data.get("external_homepage").and_then(|url| url.as_str()) { results.push(UpstreamDatum::Homepage(external_homepage.to_string())); } if let Some(preferred_support_url) = data .get("preferred_support_url") .and_then(|url| url.as_str()) .filter(|x| !x.is_empty()) { let preferred_support_url = Url::parse(preferred_support_url).expect("preferred_support_url is not a valid URL"); match check_bug_database_canonical(&preferred_support_url, Some(true)).await { Ok(canonical_url) => { results.push(UpstreamDatum::BugDatabase(canonical_url.to_string())); } Err(_) => { results.push(UpstreamDatum::BugDatabase( preferred_support_url.to_string(), )); } } } let vcs_names = ["hg", "git", "svn", "cvs", "bzr"]; let mut vcs_tools = data.get("tools").map_or_else(Vec::new, |tools| { tools .as_array() .unwrap() .iter() .filter(|tool| vcs_names.contains(&tool.get("name").unwrap().as_str().unwrap())) .map(|tool| { ( tool.get("name").map_or("", |n| n.as_str().unwrap()), tool.get("mount_label").map(|l| l.as_str().unwrap()), tool.clone(), ) }) .collect::, serde_json::Value)>>() }); if vcs_tools.len() > 1 { vcs_tools.retain(|tool| { if let Some(url) = tool .2 .get("url") .and_then(|x| x.as_str()) .and_then(|url| url.strip_suffix('/')) { !["www", "web", "homepage"].contains(&url.rsplit('/').next().unwrap_or("")) } else { true } }); } if vcs_tools.len() > 1 && subproject.is_some() { let new_vcs_tools = vcs_tools .iter() .filter(|tool| tool.1 == subproject) .cloned() .collect::>(); if !new_vcs_tools.is_empty() { vcs_tools = new_vcs_tools; } } if vcs_tools.iter().any(|tool| tool.0 == "cvs") { vcs_tools.retain(|tool| tool.0 != "cvs"); } match vcs_tools.len().cmp(&1) { std::cmp::Ordering::Equal => { let (kind, _, data) = &vcs_tools[0]; match *kind { "git" => { if let Some(url) = data.get("clone_url_https_anon").and_then(|x| x.as_str()) { results.push(UpstreamDatum::Repository(url.to_owned())); } } "svn" => { if let Some(url) = data.get("clone_url_https_anon").and_then(|x| x.as_str()) { results.push(UpstreamDatum::Repository(url.to_owned())); } } "hg" => { if let Some(url) = data.get("clone_url_ro").and_then(|x| x.as_str()) { results.push(UpstreamDatum::Repository(url.to_owned())); } } "cvs" => { let url = format!( "cvs+pserver://anonymous@{}.cvs.sourceforge.net/cvsroot/{}", project, data.get("url") .unwrap() .as_str() .unwrap() .strip_suffix('/') .unwrap_or("") .rsplit('/') .nth(1) .unwrap_or("") ); results.push(UpstreamDatum::Repository(url)); } "bzr" => { // TODO: Implement Bazaar (BZR) handling } _ => { error!("Unknown VCS kind: {}", kind); } } } std::cmp::Ordering::Greater => { warn!("Multiple possible VCS URLs found"); } _ => {} } results } /// Guesses upstream metadata from SourceForge project information pub async fn guess_from_sf(sf_project: &str, subproject: Option<&str>) -> Vec { let mut results = Vec::new(); match get_sf_metadata(sf_project).await { Some(data) => { results.extend(parse_sf_json(data, sf_project, subproject).await); } None => { debug!("No SourceForge metadata found for {}", sf_project); } } results } /// Extracts the SourceForge project name from a URL pub fn extract_sf_project_name(url: &str) -> Option { let projects_regex = regex!(r"https?://sourceforge\.net/(projects|p)/([^/]+)"); if let Some(captures) = projects_regex.captures(url) { return captures.get(2).map(|m| m.as_str().to_string()); } let sf_regex = regex!(r"https?://(.*).(sf|sourceforge).(net|io)/.*"); if let Some(captures) = sf_regex.captures(url) { return captures.get(1).map(|m| m.as_str().to_string()); } None } #[cfg(test)] mod tests { use super::*; #[tokio::test] async fn test_parse_sf_json_svn() { // From https://sourceforge.net/rest/p/gtab let data: serde_json::Value = serde_json::from_str(include_str!("../testdata/gtab.json")).unwrap(); assert_eq!( parse_sf_json(data, "gtab", Some("gtab")).await, vec![ UpstreamDatum::Name("gtab".to_string()), UpstreamDatum::Homepage("http://gtab.sourceforge.net".to_string()), UpstreamDatum::Repository("https://svn.code.sf.net/p/gtab/svn/trunk".to_string()), ] ); } #[tokio::test] async fn test_parse_sf_json_git() { // From https://sourceforge.net/rest/p/zsh let data: serde_json::Value = serde_json::from_str(include_str!("../testdata/zsh.json")).unwrap(); assert_eq!( parse_sf_json(data, "zsh", Some("zsh")).await, vec![ UpstreamDatum::Name("zsh".to_string()), UpstreamDatum::Homepage("http://zsh.sourceforge.net/".to_string()), UpstreamDatum::Repository("https://git.code.sf.net/p/zsh/code".to_string()), ] ); } #[tokio::test] async fn test_parse_sf_json_hg_diff() { // From https://sourceforge.net/rest/p/hg-diff let data: serde_json::Value = serde_json::from_str(include_str!("../testdata/hg-diff.json")).unwrap(); assert_eq!( parse_sf_json(data, "hg-diff", Some("hg-diff")).await, vec![ UpstreamDatum::Name("hg-diff".to_string()), UpstreamDatum::Homepage("http://hg-diff.sourceforge.net/".to_string()), UpstreamDatum::Repository("http://hg.code.sf.net/p/hg-diff/code".to_string()) ] ); } #[tokio::test] async fn test_parse_sf_json_docdb_v() { // From https://sourceforge.net/rest/p/docdb-v let data: serde_json::Value = serde_json::from_str(include_str!("../testdata/docdb-v.json")).unwrap(); assert_eq!( parse_sf_json(data, "docdb-v", Some("docdb-v")).await, vec![ UpstreamDatum::Name("DocDB".to_string()), UpstreamDatum::Homepage("http://docdb-v.sourceforge.net".to_string()), UpstreamDatum::BugDatabase( "http://sourceforge.net/tracker/?func=add&group_id=164024&atid=830064" .to_string() ), UpstreamDatum::Repository("https://git.code.sf.net/p/docdb-v/git".to_string()) ] ); } } upstream-ontologist-0.3.6/src/homepage.rs000064400000000000000000000073151046102023000166210ustar 00000000000000use crate::{Certainty, Origin, ProviderError, UpstreamDatum, UpstreamDatumWithMetadata}; use select::document::Document; use select::predicate::Name; /// Guesses upstream metadata by analyzing a project's homepage pub async fn guess_from_homepage( url: &url::Url, ) -> Result, ProviderError> { let client = crate::http::build_client().build().unwrap(); let response = client.get(url.as_str()).send().await?; let body = response.text().await?; Ok(guess_from_page(&body, url)) } fn guess_from_page(text: &str, basehref: &url::Url) -> Vec { let fragment = Document::from(text); let mut result = Vec::new(); let origin = Some(Origin::Url(basehref.clone())); for element in fragment.find(Name("a")) { if let Some(href) = element.attr("href") { let labels: Vec> = vec![ element.attr("aria-label").map(|s| s.to_string()), Some(element.text().trim().to_string()), ]; for label in labels.iter().filter_map(|x| x.as_ref()) { match label.to_lowercase().as_str() { "github" | "git" | "repository" | "github repository" => { result.push(UpstreamDatumWithMetadata { origin: origin.clone(), datum: UpstreamDatum::Repository( basehref.join(href).unwrap().to_string(), ), certainty: Some(Certainty::Possible), }); } "github bug tracking" | "bug tracker" => { result.push(UpstreamDatumWithMetadata { origin: origin.clone(), datum: UpstreamDatum::BugDatabase( basehref.join(href).unwrap().to_string(), ), certainty: Some(Certainty::Possible), }); } _ => {} } } } } result } #[cfg(test)] mod tests { use super::*; #[test] fn test_guess_from_page() { let basehref = url::Url::parse("https://example.com").unwrap(); let text = r#" GitHub repository And here is a link with an aria-label: Debian bug tracker "#; let result = guess_from_page(text, &basehref); assert_eq!( result, vec![ UpstreamDatumWithMetadata { origin: Some(Origin::Url(basehref.clone())), datum: UpstreamDatum::Repository("https://github.com/owner/repo".to_string()), certainty: Some(Certainty::Possible), }, UpstreamDatumWithMetadata { origin: Some(Origin::Url(basehref.clone())), datum: UpstreamDatum::Repository("https://git.samba.org/samba.org".to_string()), certainty: Some(Certainty::Possible), }, UpstreamDatumWithMetadata { origin: Some(Origin::Url(basehref.clone())), datum: UpstreamDatum::BugDatabase("https://bugs.debian.org/123".to_string()), certainty: Some(Certainty::Possible), }, ] ); } } upstream-ontologist-0.3.6/src/http.rs000064400000000000000000000005151046102023000160060ustar 00000000000000// Too aggressive? const DEFAULT_URLLIB_TIMEOUT: u64 = 3; /// Builds an HTTP client with default settings for upstream metadata fetching pub fn build_client() -> reqwest::ClientBuilder { reqwest::Client::builder() .user_agent(crate::USER_AGENT) .timeout(std::time::Duration::from_secs(DEFAULT_URLLIB_TIMEOUT)) } upstream-ontologist-0.3.6/src/lib.rs000064400000000000000000004541331046102023000156060ustar 00000000000000// pyo3 macros use a gil-refs feature #![allow(unexpected_cfgs)] #![deny(missing_docs)] #![doc = include_str!(concat!(env!("CARGO_MANIFEST_DIR"), "/README.md"))] use futures::stream::StreamExt; use futures::Stream; use lazy_regex::regex; use log::{debug, warn}; use percent_encoding::utf8_percent_encode; #[cfg(feature = "pyo3")] use pyo3::{ exceptions::{PyRuntimeError, PyTypeError, PyValueError}, prelude::*, types::PyDict, }; use reqwest::header::HeaderMap; use serde::ser::SerializeSeq; use std::cmp::Ordering; use std::fs::File; use std::io::Read; use std::pin::Pin; use std::str::FromStr; use std::path::{Path, PathBuf}; use url::Url; static USER_AGENT: &str = concat!("upstream-ontologist/", env!("CARGO_PKG_VERSION")); /// Functionality for extrapolating upstream metadata from various sources pub mod extrapolate; /// Support for various code forges (GitHub, GitLab, etc.) pub mod forges; /// Homepage URL detection and validation pub mod homepage; /// HTTP utilities for fetching remote resources pub mod http; /// Various metadata providers for different programming languages and ecosystems pub mod providers; /// README file parsing and metadata extraction pub mod readme; /// Integration with Repology package repository aggregator pub mod repology; /// Version control system utilities and URL handling pub mod vcs; /// Command-line interface for version control operations pub mod vcs_command; #[cfg(test)] mod upstream_tests { include!(concat!(env!("OUT_DIR"), "/upstream_tests.rs")); } #[cfg(test)] mod readme_tests { include!(concat!(env!("OUT_DIR"), "/readme_tests.rs")); } #[derive(Clone, Copy, Debug, Eq, PartialEq, PartialOrd, Ord)] /// Certainty levels for the data pub enum Certainty { /// This datum is possibly correct, but it is a guess Possible, /// This datum is likely to be correct, but we are not sure Likely, /// We're confident about this datum, but there is a chance it is wrong Confident, /// We're certain about this datum Certain, } #[derive(Clone, Debug, PartialEq, Eq)] /// Origin of the data pub enum Origin { /// Read from a file Path(PathBuf), /// Read from a URL Url(url::Url), /// Other origin; described by a string Other(String), } impl std::fmt::Display for Origin { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { Origin::Path(path) => write!(f, "{}", path.display()), Origin::Url(url) => write!(f, "{}", url), Origin::Other(s) => write!(f, "{}", s), } } } impl From<&std::path::Path> for Origin { fn from(path: &std::path::Path) -> Self { Origin::Path(path.to_path_buf()) } } impl From for Origin { fn from(path: std::path::PathBuf) -> Self { Origin::Path(path) } } impl From for Origin { fn from(url: url::Url) -> Self { Origin::Url(url) } } #[cfg(feature = "pyo3")] impl<'py> IntoPyObject<'py> for &Origin { type Target = PyAny; type Output = Bound<'py, Self::Target>; type Error = PyErr; fn into_pyobject(self, py: Python<'py>) -> Result { match self { Origin::Path(path) => Ok(path.to_str().unwrap().into_pyobject(py)?.into_any()), Origin::Url(url) => Ok(url.to_string().into_pyobject(py)?.into_any()), Origin::Other(s) => Ok(s.into_pyobject(py)?.into_any()), } } } #[cfg(feature = "pyo3")] impl<'py> IntoPyObject<'py> for Origin { type Target = PyAny; type Output = Bound<'py, Self::Target>; type Error = PyErr; fn into_pyobject(self, py: Python<'py>) -> Result { match self { Origin::Path(path) => Ok(path.to_str().unwrap().into_pyobject(py)?.into_any()), Origin::Url(url) => Ok(url.to_string().into_pyobject(py)?.into_any()), Origin::Other(s) => Ok(s.into_pyobject(py)?.into_any()), } } } #[cfg(feature = "pyo3")] impl<'py> FromPyObject<'_, 'py> for Origin { type Error = PyErr; fn extract(ob: pyo3::Borrowed<'_, 'py, PyAny>) -> PyResult { if let Ok(path) = ob.extract::() { Ok(Origin::Path(path)) } else if let Ok(s) = ob.extract::() { Ok(Origin::Other(s)) } else { Err(PyTypeError::new_err("expected str or Path")) } } } impl FromStr for Certainty { type Err = String; fn from_str(s: &str) -> Result { match s { "certain" => Ok(Certainty::Certain), "confident" => Ok(Certainty::Confident), "likely" => Ok(Certainty::Likely), "possible" => Ok(Certainty::Possible), _ => Err(format!("unknown certainty: {}", s)), } } } impl std::fmt::Display for Certainty { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { Certainty::Certain => write!(f, "certain"), Certainty::Confident => write!(f, "confident"), Certainty::Likely => write!(f, "likely"), Certainty::Possible => write!(f, "possible"), } } } #[cfg(feature = "pyo3")] impl<'py> FromPyObject<'_, 'py> for Certainty { type Error = PyErr; fn extract(ob: pyo3::Borrowed<'_, 'py, PyAny>) -> PyResult { let o: String = ob.extract::()?; o.parse().map_err(PyValueError::new_err) } } /// Represents a person (author, maintainer, etc.) with optional contact information #[derive(Default, Clone, Debug, PartialEq, Eq)] pub struct Person { /// The person's name pub name: Option, /// The person's email address pub email: Option, /// The person's URL (e.g., personal website, profile) pub url: Option, } impl serde::ser::Serialize for Person { fn serialize(&self, serializer: S) -> Result where S: serde::ser::Serializer, { let mut map = serde_yaml::Mapping::new(); if let Some(name) = &self.name { map.insert( serde_yaml::Value::String("name".to_string()), serde_yaml::Value::String(name.to_string()), ); } if let Some(email) = &self.email { map.insert( serde_yaml::Value::String("email".to_string()), serde_yaml::Value::String(email.to_string()), ); } if let Some(url) = &self.url { map.insert( serde_yaml::Value::String("url".to_string()), serde_yaml::Value::String(url.to_string()), ); } let tag = serde_yaml::value::TaggedValue { tag: serde_yaml::value::Tag::new("!Person"), value: serde_yaml::Value::Mapping(map), }; tag.serialize(serializer) } } impl<'a> serde::de::Deserialize<'a> for Person { fn deserialize(deserializer: D) -> Result where D: serde::de::Deserializer<'a>, { let value = serde_yaml::Value::deserialize(deserializer)?; if let serde_yaml::Value::Mapping(map) = value { let mut name = None; let mut email = None; let mut url = None; for (k, v) in map { match k { serde_yaml::Value::String(k) => match k.as_str() { "name" => { if let serde_yaml::Value::String(s) = v { name = Some(s); } } "email" => { if let serde_yaml::Value::String(s) = v { email = Some(s); } } "url" => { if let serde_yaml::Value::String(s) = v { url = Some(s); } } n => { return Err(serde::de::Error::custom(format!("unknown key: {}", n))); } }, n => { return Err(serde::de::Error::custom(format!( "expected string key, got {:?}", n ))); } } } Ok(Person { name, email, url }) } else { Err(serde::de::Error::custom("expected mapping")) } } } impl std::fmt::Display for Person { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "{}", self.name.as_ref().unwrap_or(&"".to_string()))?; if let Some(email) = &self.email { write!(f, " <{}>", email)?; } if let Some(url) = &self.url { write!(f, " ({})", url)?; } Ok(()) } } impl From<&str> for Person { fn from(text: &str) -> Self { let mut text = text.replace(" at ", "@"); text = text.replace(" -at- ", "@"); text = text.replace(" -dot- ", "."); text = text.replace("[AT]", "@"); if text.contains('(') && text.ends_with(')') { if let Some((p1, p2)) = text[..text.len() - 1].split_once('(') { if p2.starts_with("https://") || p2.starts_with("http://") { let url = p2.to_string(); if let Some((name, email)) = parseaddr(p1) { Person { name: Some(name), email: Some(email), url: Some(url), } } else { Person { name: Some(p1.to_string()), url: Some(url), ..Default::default() } } } else if p2.contains('@') { Person { name: Some(p1.to_string()), email: Some(p2.to_string()), ..Default::default() } } else { Person { name: Some(text.to_string()), ..Default::default() } } } else { Person { name: Some(text.to_string()), ..Default::default() } } } else if text.contains('<') { if let Some((name, email)) = parseaddr(text.as_str()) { return Person { name: Some(name), email: Some(email), ..Default::default() }; } else { Person { name: Some(text.to_string()), ..Default::default() } } } else if text.contains('@') && !text.contains(' ') { return Person { email: Some(text), ..Default::default() }; } else { Person { name: Some(text), ..Default::default() } } } } #[cfg(feature = "pyo3")] impl<'py> IntoPyObject<'py> for &Person { type Target = PyAny; type Output = Bound<'py, Self::Target>; type Error = PyErr; fn into_pyobject(self, py: Python<'py>) -> Result { let m = PyModule::import(py, "upstream_ontologist")?; let person_cls = m.getattr("Person")?; person_cls.call1((self.name.as_ref(), self.email.as_ref(), self.url.as_ref())) } } fn parseaddr(text: &str) -> Option<(String, String)> { let re = regex!(r"(.*?)\s*<([^<>]+)>"); if let Some(captures) = re.captures(text) { let name = captures.get(1).map(|m| m.as_str().trim().to_string()); let email = captures.get(2).map(|m| m.as_str().trim().to_string()); if let (Some(name), Some(email)) = (name, email) { return Some((name, email)); } } None } #[cfg(feature = "pyo3")] impl<'py> FromPyObject<'_, 'py> for Person { type Error = PyErr; fn extract(ob: pyo3::Borrowed<'_, 'py, PyAny>) -> PyResult { let name = ob.getattr("name")?.extract::>()?; let email = ob.getattr("email")?.extract::>()?; let url = ob.getattr("url")?.extract::>()?; Ok(Person { name, email, url }) } } /// Represents various types of upstream metadata for a software project #[derive(Clone, Debug, PartialEq, Eq)] pub enum UpstreamDatum { /// Name of the project. /// /// This is a brief name of the project, as it would be used in a URL. /// Generally speaking it would be lowercase, and may contain dashes or underscores. /// It would commonly be the name of the repository. Name(String), /// URL to project homepage. /// /// This is the URL to the project's homepage, which may be a website or a /// repository. It is not a URL to a specific file or page, but rather the main /// entry point for the project. Homepage(String), /// URL to the project's source code repository. /// /// This is the URL to the project's source code repository, as it would be used /// in a command line tool to clone the repository. It may be a URL to a specific /// branch or tag, but it is generally the URL to the main repository. Repository(String), /// URL to browse the project's source code repository /// /// This is the URL to the project's source code repository, as it would be used /// in a web browser to browse the repository. It may be a URL to a specific /// branch or tag, but it is generally the URL to the main repository. RepositoryBrowse(String), /// Long description of the project /// /// This is a long description of the project, which may be several paragraphs /// long. It is generally a more detailed description of the project than the /// summary. Description(String), /// Short summary of the project (one line) /// /// This is a short summary of the project, which is generally one line long. /// It is generally a brief description of the project, and may be used in /// search results or in a list of projects. Summary(String), /// License name or SPDX identifier /// /// This is the name of the license under which the project is released. It may /// be a full license name, or it may be an SPDX identifier (preferred). /// /// See for a list of SPDX identifiers. License(String), /// List of authors /// /// This is a list of authors of the project, which may be a list of names, /// email addresses, or URLs. Author(Vec), /// List of maintainers /// /// This is a list of maintainers of the project, which may be a list of names, /// email addresses, or URLs. Maintainer(Person), /// URL of the project's issue tracker /// /// This is the URL to the project's issue tracker, which may be a bug tracker, /// feature tracker, or other type of issue tracker. It is not a URL to a /// specific issue, but rather the main entry point for the issue tracker. BugDatabase(String), /// URL to submit a new bug /// /// This is the URL to submit a new bug to the project's issue tracker. It /// may be a URL to a specific page or form. /// /// It can also be an email address (mailto:...), in which case it is the email address to send /// the bug report to. BugSubmit(String), /// URL to the project's contact page or email address /// /// This is the URL to the project's contact page, which may be a web page or /// an email address. It is not a URL to a specific file or page, but rather /// the main entry point for the contact page. Contact(String), /// Cargo crate name /// /// If the project is a Rust crate, this is the name of the crate on /// crates.io. It is not a URL to the crate, but rather the name of the /// crate. CargoCrate(String), /// Name of the security page name /// /// This would be the name of a markdown file in the source directory /// that contains security information about the project. It is not a URL to /// a specific file or page, but rather the name of the file. SecurityMD(String), /// URL to the security page or email address /// /// This is the URL to the project's security page, which may be a web page or /// an email address. It is not a URL to a specific file or page, but rather /// the main entry point for the security page. /// /// It can also be an email address (mailto:...), in which case it is the email address to send /// the security report to. SecurityContact(String), /// Last version of the project /// /// This is the last version of the project, which would generally be a version string /// /// There is no guarantee that this is the last version of the project. /// /// There is no guarantee about which versioning scheme is used, e.g. it may be /// a semantic version, a date-based version, or a commit hash. Version(String), /// List of keywords /// /// This is a list of keywords that describe the project. It may be a list of /// words, phrases, or tags. Keywords(Vec), /// Copyright notice /// /// This is the copyright notice for the project, which may be a list of /// copyright holders, years, or other information. Copyright(String), /// URL to the project's documentation /// /// This is the URL to the project's documentation, which may be a web page or /// a file. It is not a URL to a specific file or page, but rather the main /// entry point for the documentation. Documentation(String), /// URL to the project's API documentation /// /// This is the URL to the project's API documentation, which may be a web page or /// a file. It is not a URL to a specific file or page, but rather the main /// entry point for the API documentation. APIDocumentation(String), /// Go import path /// /// If this is a Go project, this is the import path for the project. It is not a URL /// to the project, but rather the import path. GoImportPath(String), /// URL to the project's download page /// /// This is the URL to the project's download page, which may be a web page or /// a file. It is not a URL to a specific file or page, but rather the main /// entry point for the download page. Download(String), /// URL to the project's wiki /// /// This is the URL to the project's wiki. Wiki(String), /// URL to the project's mailing list /// /// This is the URL to the project's mailing list, which may be a web page or /// an email address. It is not a URL to a specific file or page, but rather /// the main entry point for the mailing list. /// /// It can also be an email address (mailto:...), in which case it is the email address to send /// email to to subscribe to the mailing list. MailingList(String), /// SourceForge project name /// /// This is the name of the project on SourceForge. It is not a URL to the /// project, but rather the name of the project. SourceForgeProject(String), /// If this project is provided by a specific archive, this is the name of the archive. /// /// E.g. "CRAN", "CPAN", "PyPI", "RubyGems", "NPM", etc. Archive(String), /// URL to a demo instance /// /// This is the URL to a demo instance of the project. This instance will be loaded /// with sample data, and will be used to demonstrate the project. It is not /// a full instance of the project - the Webservice field should be used for that. Demo(String), /// PHP PECL package name /// /// If this is a PHP project, this is the name of the package on PECL. It is not a URL /// to the package, but rather the name of the package. PeclPackage(String), /// Description of funding sources /// /// This is a description of the funding sources for the project. It may be a /// URL to a page that describes the funding sources, or it may be a list of /// funding sources. /// /// Note that this is different from the Donation field, which is a URL to a /// donation page. Funding(String), /// URL to the changelog /// /// This is the URL to the project's changelog, which may be a web page or /// a file. No guarantee is made about the format of the changelog, but it is /// generally a file that contains a list of changes made to the project. Changelog(String), /// Haskell package name /// /// If this is a Haskell project, this is the name of the package on Hackage. It is not a URL /// to the package, but rather the name of the package. HaskellPackage(String), /// Debian ITP (Intent To Package) bug number /// /// This is the bug number of the ITP bug in the Debian bug tracker. It is not a URL /// to the bug, but rather the bug number. DebianITP(i32), /// List of URLs to screenshots /// /// This is a list of URLs to screenshots of the project. It will be a list of /// URLs, which may be web pages or images. Screenshots(Vec), /// Name of registry Registry(Vec<(String, String)>), /// Recommended way to cite the software /// /// This is the recommended way to cite the software, which may be a URL or a /// DOI. CiteAs(String), /// Link for donations (e.g. Paypal, Libera, etc) /// /// This is a URL to a donation page, which should be a web page. /// It is different from the Funding field, which describes /// the funding the project has received. Donation(String), /// Link to a life instance of the webservice /// /// This is the URL to the live instance of the project. This should generally /// be the canonical instance of the project. /// /// For demo instances, see the Demo field. Webservice(String), /// Name of the buildsystem used /// /// This is the name of the buildsystem used by the project. E.g. "make", "cmake", /// "meson", etc BuildSystem(String), /// FAQ /// /// This is the URL to the project's FAQ, which may be a web page or a file. FAQ(String), } /// Upstream datum with additional metadata about its origin and certainty #[derive(PartialEq, Eq, Debug, Clone)] pub struct UpstreamDatumWithMetadata { /// The upstream datum itself pub datum: UpstreamDatum, /// Where this datum was obtained from pub origin: Option, /// How certain we are about this datum pub certainty: Option, } fn known_bad_url(value: &str) -> bool { if value.contains("${") { return true; } false } impl UpstreamDatum { /// Returns the field name for this datum type pub fn field(&self) -> &'static str { match self { UpstreamDatum::Summary(..) => "Summary", UpstreamDatum::Description(..) => "Description", UpstreamDatum::Name(..) => "Name", UpstreamDatum::Homepage(..) => "Homepage", UpstreamDatum::Repository(..) => "Repository", UpstreamDatum::RepositoryBrowse(..) => "Repository-Browse", UpstreamDatum::License(..) => "License", UpstreamDatum::Author(..) => "Author", UpstreamDatum::BugDatabase(..) => "Bug-Database", UpstreamDatum::BugSubmit(..) => "Bug-Submit", UpstreamDatum::Contact(..) => "Contact", UpstreamDatum::CargoCrate(..) => "Cargo-Crate", UpstreamDatum::SecurityMD(..) => "Security-MD", UpstreamDatum::SecurityContact(..) => "Security-Contact", UpstreamDatum::Version(..) => "Version", UpstreamDatum::Keywords(..) => "Keywords", UpstreamDatum::Maintainer(..) => "Maintainer", UpstreamDatum::Copyright(..) => "Copyright", UpstreamDatum::Documentation(..) => "Documentation", UpstreamDatum::APIDocumentation(..) => "API-Documentation", UpstreamDatum::GoImportPath(..) => "Go-Import-Path", UpstreamDatum::Download(..) => "Download", UpstreamDatum::Wiki(..) => "Wiki", UpstreamDatum::MailingList(..) => "MailingList", UpstreamDatum::SourceForgeProject(..) => "SourceForge-Project", UpstreamDatum::Archive(..) => "Archive", UpstreamDatum::Demo(..) => "Demo", UpstreamDatum::PeclPackage(..) => "Pecl-Package", UpstreamDatum::HaskellPackage(..) => "Haskell-Package", UpstreamDatum::Funding(..) => "Funding", UpstreamDatum::Changelog(..) => "Changelog", UpstreamDatum::DebianITP(..) => "Debian-ITP", UpstreamDatum::Screenshots(..) => "Screenshots", UpstreamDatum::Registry(..) => "Registry", UpstreamDatum::CiteAs(..) => "Cite-As", UpstreamDatum::Donation(..) => "Donation", UpstreamDatum::Webservice(..) => "Webservice", UpstreamDatum::BuildSystem(..) => "BuildSystem", UpstreamDatum::FAQ(..) => "FAQ", } } /// Returns the string value if this datum contains a simple string pub fn as_str(&self) -> Option<&str> { match self { UpstreamDatum::Name(s) => Some(s), UpstreamDatum::Homepage(s) => Some(s), UpstreamDatum::Repository(s) => Some(s), UpstreamDatum::RepositoryBrowse(s) => Some(s), UpstreamDatum::Description(s) => Some(s), UpstreamDatum::Summary(s) => Some(s), UpstreamDatum::License(s) => Some(s), UpstreamDatum::BugDatabase(s) => Some(s), UpstreamDatum::BugSubmit(s) => Some(s), UpstreamDatum::Contact(s) => Some(s), UpstreamDatum::CargoCrate(s) => Some(s), UpstreamDatum::SecurityMD(s) => Some(s), UpstreamDatum::SecurityContact(s) => Some(s), UpstreamDatum::Version(s) => Some(s), UpstreamDatum::Documentation(s) => Some(s), UpstreamDatum::APIDocumentation(s) => Some(s), UpstreamDatum::GoImportPath(s) => Some(s), UpstreamDatum::Download(s) => Some(s), UpstreamDatum::Wiki(s) => Some(s), UpstreamDatum::MailingList(s) => Some(s), UpstreamDatum::SourceForgeProject(s) => Some(s), UpstreamDatum::Archive(s) => Some(s), UpstreamDatum::Demo(s) => Some(s), UpstreamDatum::PeclPackage(s) => Some(s), UpstreamDatum::HaskellPackage(s) => Some(s), UpstreamDatum::Author(..) => None, UpstreamDatum::Maintainer(..) => None, UpstreamDatum::Keywords(..) => None, UpstreamDatum::Copyright(c) => Some(c), UpstreamDatum::Funding(f) => Some(f), UpstreamDatum::Changelog(c) => Some(c), UpstreamDatum::Screenshots(..) => None, UpstreamDatum::DebianITP(_c) => None, UpstreamDatum::CiteAs(c) => Some(c), UpstreamDatum::Registry(_) => None, UpstreamDatum::Donation(d) => Some(d), UpstreamDatum::Webservice(w) => Some(w), UpstreamDatum::BuildSystem(b) => Some(b), UpstreamDatum::FAQ(f) => Some(f), } } /// Converts the datum to a URL if applicable pub fn to_url(&self) -> Option { match self { UpstreamDatum::Name(..) => None, UpstreamDatum::Homepage(s) => Some(s.parse().ok()?), UpstreamDatum::Repository(s) => Some(s.parse().ok()?), UpstreamDatum::RepositoryBrowse(s) => Some(s.parse().ok()?), UpstreamDatum::Description(..) => None, UpstreamDatum::Summary(..) => None, UpstreamDatum::License(..) => None, UpstreamDatum::BugDatabase(s) => Some(s.parse().ok()?), UpstreamDatum::BugSubmit(s) => Some(s.parse().ok()?), UpstreamDatum::Contact(..) => None, UpstreamDatum::CargoCrate(s) => Some(s.parse().ok()?), UpstreamDatum::SecurityMD(..) => None, UpstreamDatum::SecurityContact(..) => None, UpstreamDatum::Version(..) => None, UpstreamDatum::Documentation(s) => Some(s.parse().ok()?), UpstreamDatum::APIDocumentation(s) => Some(s.parse().ok()?), UpstreamDatum::GoImportPath(_s) => None, UpstreamDatum::Download(s) => Some(s.parse().ok()?), UpstreamDatum::Wiki(s) => Some(s.parse().ok()?), UpstreamDatum::MailingList(s) => Some(s.parse().ok()?), UpstreamDatum::SourceForgeProject(s) => Some(s.parse().ok()?), UpstreamDatum::Archive(s) => Some(s.parse().ok()?), UpstreamDatum::Demo(s) => Some(s.parse().ok()?), UpstreamDatum::PeclPackage(_s) => None, UpstreamDatum::HaskellPackage(_s) => None, UpstreamDatum::Author(..) => None, UpstreamDatum::Maintainer(..) => None, UpstreamDatum::Keywords(..) => None, UpstreamDatum::Copyright(..) => None, UpstreamDatum::Funding(s) => Some(s.parse().ok()?), UpstreamDatum::Changelog(s) => Some(s.parse().ok()?), UpstreamDatum::Screenshots(..) => None, UpstreamDatum::DebianITP(_c) => None, UpstreamDatum::Registry(_r) => None, UpstreamDatum::CiteAs(_c) => None, UpstreamDatum::Donation(_d) => None, UpstreamDatum::Webservice(w) => Some(w.parse().ok()?), UpstreamDatum::BuildSystem(_) => None, UpstreamDatum::FAQ(f) => Some(f.parse().ok()?), } } /// Returns the person if this datum contains person information pub fn as_person(&self) -> Option<&Person> { match self { UpstreamDatum::Maintainer(p) => Some(p), _ => None, } } /// Checks if this datum is known to be a bad guess based on common patterns pub fn known_bad_guess(&self) -> bool { match self { UpstreamDatum::BugDatabase(s) | UpstreamDatum::BugSubmit(s) => { if known_bad_url(s) { return true; } let url = match Url::parse(s) { Ok(url) => url, Err(_) => return false, }; if url.host_str() == Some("bugzilla.gnome.org") { return true; } if url.host_str() == Some("bugs.freedesktop.org") { return true; } if url.path().ends_with("/sign_in") { return true; } } UpstreamDatum::Repository(s) => { if known_bad_url(s) { return true; } let url = match Url::parse(s) { Ok(url) => url, Err(_) => return false, }; if url.host_str() == Some("anongit.kde.org") { return true; } if url.host_str() == Some("git.gitorious.org") { return true; } if url.path().ends_with("/sign_in") { return true; } } UpstreamDatum::Homepage(s) => { let url = match Url::parse(s) { Ok(url) => url, Err(_) => return false, }; if url.host_str() == Some("pypi.org") { return true; } if url.host_str() == Some("rubygems.org") { return true; } } UpstreamDatum::RepositoryBrowse(s) => { if known_bad_url(s) { return true; } let url = match Url::parse(s) { Ok(url) => url, Err(_) => return false, }; if url.host_str() == Some("cgit.kde.org") { return true; } if url.path().ends_with("/sign_in") { return true; } } UpstreamDatum::Author(authors) => { for a in authors { if let Some(name) = &a.name { let lc = name.to_lowercase(); if lc.contains("unknown") { return true; } if lc.contains("maintainer") { return true; } if lc.contains("contributor") { return true; } } } } UpstreamDatum::Name(s) => { let lc = s.to_lowercase(); if lc.contains("unknown") { return true; } if lc == "package" { return true; } } UpstreamDatum::Version(s) => { let lc = s.to_lowercase(); if ["devel", "unknown"].contains(&lc.as_str()) { return true; } } _ => (), } false } } impl std::fmt::Display for UpstreamDatum { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { UpstreamDatum::Name(s) => write!(f, "Name: {}", s), UpstreamDatum::Homepage(s) => write!(f, "Homepage: {}", s), UpstreamDatum::Repository(s) => write!(f, "Repository: {}", s), UpstreamDatum::RepositoryBrowse(s) => write!(f, "RepositoryBrowse: {}", s), UpstreamDatum::Description(s) => write!(f, "Description: {}", s), UpstreamDatum::Summary(s) => write!(f, "Summary: {}", s), UpstreamDatum::License(s) => write!(f, "License: {}", s), UpstreamDatum::BugDatabase(s) => write!(f, "BugDatabase: {}", s), UpstreamDatum::BugSubmit(s) => write!(f, "BugSubmit: {}", s), UpstreamDatum::Contact(s) => write!(f, "Contact: {}", s), UpstreamDatum::CargoCrate(s) => write!(f, "CargoCrate: {}", s), UpstreamDatum::SecurityMD(s) => write!(f, "SecurityMD: {}", s), UpstreamDatum::SecurityContact(s) => write!(f, "SecurityContact: {}", s), UpstreamDatum::Version(s) => write!(f, "Version: {}", s), UpstreamDatum::Documentation(s) => write!(f, "Documentation: {}", s), UpstreamDatum::APIDocumentation(s) => write!(f, "API-Documentation: {}", s), UpstreamDatum::GoImportPath(s) => write!(f, "GoImportPath: {}", s), UpstreamDatum::Download(s) => write!(f, "Download: {}", s), UpstreamDatum::Wiki(s) => write!(f, "Wiki: {}", s), UpstreamDatum::MailingList(s) => write!(f, "MailingList: {}", s), UpstreamDatum::SourceForgeProject(s) => write!(f, "SourceForgeProject: {}", s), UpstreamDatum::Archive(s) => write!(f, "Archive: {}", s), UpstreamDatum::Demo(s) => write!(f, "Demo: {}", s), UpstreamDatum::PeclPackage(s) => write!(f, "PeclPackage: {}", s), UpstreamDatum::Author(authors) => { write!( f, "Author: {}", authors .iter() .map(|a| a.to_string()) .collect::>() .join(", ") ) } UpstreamDatum::Maintainer(maintainer) => { write!(f, "Maintainer: {}", maintainer) } UpstreamDatum::Keywords(keywords) => { write!( f, "Keywords: {}", keywords .iter() .map(|a| a.to_string()) .collect::>() .join(", ") ) } UpstreamDatum::Copyright(s) => { write!(f, "Copyright: {}", s) } UpstreamDatum::Funding(s) => { write!(f, "Funding: {}", s) } UpstreamDatum::Changelog(s) => { write!(f, "Changelog: {}", s) } UpstreamDatum::DebianITP(s) => { write!(f, "DebianITP: {}", s) } UpstreamDatum::HaskellPackage(p) => { write!(f, "HaskellPackage: {}", p) } UpstreamDatum::Screenshots(s) => { write!(f, "Screenshots: {}", s.join(", ")) } UpstreamDatum::Registry(r) => { write!(f, "Registry:")?; for (k, v) in r { write!(f, " - Name: {}", k)?; write!(f, " Entry: {}", v)?; } Ok(()) } UpstreamDatum::CiteAs(c) => { write!(f, "Cite-As: {}", c) } UpstreamDatum::Donation(d) => { write!(f, "Donation: {}", d) } UpstreamDatum::Webservice(w) => { write!(f, "Webservice: {}", w) } UpstreamDatum::BuildSystem(bs) => { write!(f, "BuildSystem: {}", bs) } UpstreamDatum::FAQ(faq) => { write!(f, "FAQ: {}", faq) } } } } impl serde::ser::Serialize for UpstreamDatum { fn serialize(&self, serializer: S) -> Result { match self { UpstreamDatum::Name(s) => serializer.serialize_str(s), UpstreamDatum::Homepage(s) => serializer.serialize_str(s), UpstreamDatum::Repository(s) => serializer.serialize_str(s), UpstreamDatum::RepositoryBrowse(s) => serializer.serialize_str(s), UpstreamDatum::Description(s) => serializer.serialize_str(s), UpstreamDatum::Summary(s) => serializer.serialize_str(s), UpstreamDatum::License(s) => serializer.serialize_str(s), UpstreamDatum::BugDatabase(s) => serializer.serialize_str(s), UpstreamDatum::BugSubmit(s) => serializer.serialize_str(s), UpstreamDatum::Contact(s) => serializer.serialize_str(s), UpstreamDatum::CargoCrate(s) => serializer.serialize_str(s), UpstreamDatum::SecurityMD(s) => serializer.serialize_str(s), UpstreamDatum::SecurityContact(s) => serializer.serialize_str(s), UpstreamDatum::Version(s) => serializer.serialize_str(s), UpstreamDatum::Documentation(s) => serializer.serialize_str(s), UpstreamDatum::APIDocumentation(s) => serializer.serialize_str(s), UpstreamDatum::GoImportPath(s) => serializer.serialize_str(s), UpstreamDatum::Download(s) => serializer.serialize_str(s), UpstreamDatum::Wiki(s) => serializer.serialize_str(s), UpstreamDatum::MailingList(s) => serializer.serialize_str(s), UpstreamDatum::SourceForgeProject(s) => serializer.serialize_str(s), UpstreamDatum::Archive(s) => serializer.serialize_str(s), UpstreamDatum::Demo(s) => serializer.serialize_str(s), UpstreamDatum::PeclPackage(s) => serializer.serialize_str(s), UpstreamDatum::Author(authors) => { let mut seq = serializer.serialize_seq(Some(authors.len()))?; for a in authors { seq.serialize_element(a)?; } seq.end() } UpstreamDatum::Maintainer(maintainer) => maintainer.serialize(serializer), UpstreamDatum::Keywords(keywords) => { let mut seq = serializer.serialize_seq(Some(keywords.len()))?; for a in keywords { seq.serialize_element(a)?; } seq.end() } UpstreamDatum::Copyright(s) => serializer.serialize_str(s), UpstreamDatum::Funding(s) => serializer.serialize_str(s), UpstreamDatum::Changelog(s) => serializer.serialize_str(s), UpstreamDatum::DebianITP(s) => serializer.serialize_i32(*s), UpstreamDatum::HaskellPackage(p) => serializer.serialize_str(p), UpstreamDatum::Screenshots(s) => { let mut seq = serializer.serialize_seq(Some(s.len()))?; for s in s { seq.serialize_element(s)?; } seq.end() } UpstreamDatum::CiteAs(c) => serializer.serialize_str(c), UpstreamDatum::Registry(r) => { let mut l = serializer.serialize_seq(Some(r.len()))?; for (k, v) in r { let mut m = serde_yaml::Mapping::new(); m.insert( serde_yaml::Value::String("Name".to_string()), serde_yaml::to_value(k).unwrap(), ); m.insert( serde_yaml::Value::String("Entry".to_string()), serde_yaml::to_value(v).unwrap(), ); l.serialize_element(&m)?; } l.end() } UpstreamDatum::Donation(d) => serializer.serialize_str(d), UpstreamDatum::Webservice(w) => serializer.serialize_str(w), UpstreamDatum::BuildSystem(bs) => serializer.serialize_str(bs), UpstreamDatum::FAQ(faq) => serializer.serialize_str(faq), } } } /// Collection of upstream metadata with convenience methods for accessing specific fields #[derive(PartialEq, Eq, Debug, Clone)] pub struct UpstreamMetadata(Vec); impl UpstreamMetadata { /// Creates a new empty UpstreamMetadata instance pub fn new() -> Self { UpstreamMetadata(Vec::new()) } /// Returns true if the metadata collection is empty pub fn is_empty(&self) -> bool { self.0.is_empty() } /// Returns the number of metadata items pub fn len(&self) -> usize { self.0.len() } /// Sorts the metadata items by field name pub fn sort(&mut self) { self.0.sort_by(|a, b| a.datum.field().cmp(b.datum.field())); } /// Creates a new UpstreamMetadata from a vector of data pub fn from_data(data: Vec) -> Self { Self(data) } /// Returns a mutable reference to the underlying data vector pub fn mut_items(&mut self) -> &mut Vec { &mut self.0 } /// Returns an iterator over the metadata items pub fn iter(&self) -> impl Iterator { self.0.iter() } /// Returns a mutable iterator over the metadata items pub fn mut_iter(&mut self) -> impl Iterator { self.0.iter_mut() } /// Gets a metadata item by field name pub fn get(&self, field: &str) -> Option<&UpstreamDatumWithMetadata> { self.0.iter().find(|d| d.datum.field() == field) } /// Gets a mutable reference to a metadata item by field name pub fn get_mut(&mut self, field: &str) -> Option<&mut UpstreamDatumWithMetadata> { self.0.iter_mut().find(|d| d.datum.field() == field) } /// Inserts a new metadata item pub fn insert(&mut self, datum: UpstreamDatumWithMetadata) { self.0.push(datum); } /// Checks if a field exists in the metadata pub fn contains_key(&self, field: &str) -> bool { self.get(field).is_some() } /// Removes metadata items that are known to be bad guesses pub fn discard_known_bad(&mut self) { self.0.retain(|d| !d.datum.known_bad_guess()); } /// Updates the metadata with new items, returning the replaced items pub fn update( &mut self, new_items: impl Iterator, ) -> Vec { update_from_guesses(&mut self.0, new_items) } /// Removes and returns a metadata item by field name pub fn remove(&mut self, field: &str) -> Option { let index = self.0.iter().position(|d| d.datum.field() == field)?; Some(self.0.remove(index)) } /// Gets the project name pub fn name(&self) -> Option<&str> { self.get("Name").and_then(|d| d.datum.as_str()) } /// Gets the project homepage URL pub fn homepage(&self) -> Option<&str> { self.get("Homepage").and_then(|d| d.datum.as_str()) } /// Gets the repository URL pub fn repository(&self) -> Option<&str> { self.get("Repository").and_then(|d| d.datum.as_str()) } /// Gets the repository browse URL pub fn repository_browse(&self) -> Option<&str> { self.get("Repository-Browse").and_then(|d| d.datum.as_str()) } /// Gets the project description pub fn description(&self) -> Option<&str> { self.get("Description").and_then(|d| d.datum.as_str()) } /// Gets the project summary pub fn summary(&self) -> Option<&str> { self.get("Summary").and_then(|d| d.datum.as_str()) } /// Gets the project license pub fn license(&self) -> Option<&str> { self.get("License").and_then(|d| d.datum.as_str()) } /// Gets the list of authors pub fn author(&self) -> Option<&Vec> { self.get("Author").map(|d| match &d.datum { UpstreamDatum::Author(authors) => authors, _ => unreachable!(), }) } /// Gets the maintainer information pub fn maintainer(&self) -> Option<&Person> { self.get("Maintainer").map(|d| match &d.datum { UpstreamDatum::Maintainer(maintainer) => maintainer, _ => unreachable!(), }) } /// Gets the bug database URL pub fn bug_database(&self) -> Option<&str> { self.get("Bug-Database").and_then(|d| d.datum.as_str()) } /// Gets the bug submission URL or email pub fn bug_submit(&self) -> Option<&str> { self.get("Bug-Submit").and_then(|d| d.datum.as_str()) } /// Gets the contact information pub fn contact(&self) -> Option<&str> { self.get("Contact").and_then(|d| d.datum.as_str()) } /// Gets the Cargo crate name pub fn cargo_crate(&self) -> Option<&str> { self.get("Cargo-Crate").and_then(|d| d.datum.as_str()) } /// Gets the security markdown file name pub fn security_md(&self) -> Option<&str> { self.get("Security-MD").and_then(|d| d.datum.as_str()) } /// Gets the security contact information pub fn security_contact(&self) -> Option<&str> { self.get("Security-Contact").and_then(|d| d.datum.as_str()) } /// Gets the project version pub fn version(&self) -> Option<&str> { self.get("Version").and_then(|d| d.datum.as_str()) } /// Gets the list of keywords pub fn keywords(&self) -> Option<&Vec> { self.get("Keywords").map(|d| match &d.datum { UpstreamDatum::Keywords(keywords) => keywords, _ => unreachable!(), }) } /// Gets the documentation URL pub fn documentation(&self) -> Option<&str> { self.get("Documentation").and_then(|d| d.datum.as_str()) } /// Gets the API documentation URL pub fn api_documentation(&self) -> Option<&str> { self.get("API-Documentation").and_then(|d| d.datum.as_str()) } /// Gets the Go import path pub fn go_import_path(&self) -> Option<&str> { self.get("Go-Import-Path").and_then(|d| d.datum.as_str()) } /// Gets the download URL pub fn download(&self) -> Option<&str> { self.get("Download").and_then(|d| d.datum.as_str()) } /// Gets the wiki URL pub fn wiki(&self) -> Option<&str> { self.get("Wiki").and_then(|d| d.datum.as_str()) } /// Gets the mailing list URL or email pub fn mailing_list(&self) -> Option<&str> { self.get("MailingList").and_then(|d| d.datum.as_str()) } /// Gets the SourceForge project name pub fn sourceforge_project(&self) -> Option<&str> { self.get("SourceForge-Project") .and_then(|d| d.datum.as_str()) } /// Gets the archive name (e.g., CRAN, PyPI) pub fn archive(&self) -> Option<&str> { self.get("Archive").and_then(|d| d.datum.as_str()) } /// Gets the demo URL pub fn demo(&self) -> Option<&str> { self.get("Demo").and_then(|d| d.datum.as_str()) } /// Gets the PECL package name pub fn pecl_package(&self) -> Option<&str> { self.get("Pecl-Package").and_then(|d| d.datum.as_str()) } /// Gets the Haskell package name pub fn haskell_package(&self) -> Option<&str> { self.get("Haskell-Package").and_then(|d| d.datum.as_str()) } /// Gets funding information pub fn funding(&self) -> Option<&str> { self.get("Funding").and_then(|d| d.datum.as_str()) } /// Gets the changelog URL pub fn changelog(&self) -> Option<&str> { self.get("Changelog").and_then(|d| d.datum.as_str()) } /// Gets the Debian ITP bug number pub fn debian_itp(&self) -> Option { self.get("Debian-ITP").and_then(|d| match &d.datum { UpstreamDatum::DebianITP(itp) => Some(*itp), _ => unreachable!(), }) } /// Gets the list of screenshot URLs pub fn screenshots(&self) -> Option<&Vec> { self.get("Screenshots").map(|d| match &d.datum { UpstreamDatum::Screenshots(screenshots) => screenshots, _ => unreachable!(), }) } /// Gets the donation URL pub fn donation(&self) -> Option<&str> { self.get("Donation").and_then(|d| d.datum.as_str()) } /// Gets the citation information pub fn cite_as(&self) -> Option<&str> { self.get("Cite-As").and_then(|d| d.datum.as_str()) } /// Gets the registry entries pub fn registry(&self) -> Option<&Vec<(String, String)>> { self.get("Registry").map(|d| match &d.datum { UpstreamDatum::Registry(registry) => registry, _ => unreachable!(), }) } /// Gets the webservice URL pub fn webservice(&self) -> Option<&str> { self.get("Webservice").and_then(|d| d.datum.as_str()) } /// Gets the build system name pub fn buildsystem(&self) -> Option<&str> { self.get("BuildSystem").and_then(|d| d.datum.as_str()) } /// Gets the copyright information pub fn copyright(&self) -> Option<&str> { self.get("Copyright").and_then(|d| d.datum.as_str()) } /// Gets the FAQ URL pub fn faq(&self) -> Option<&str> { self.get("FAQ").and_then(|d| d.datum.as_str()) } } impl std::ops::Index<&str> for UpstreamMetadata { type Output = UpstreamDatumWithMetadata; fn index(&self, index: &str) -> &Self::Output { self.get(index).unwrap() } } impl Default for UpstreamMetadata { fn default() -> Self { UpstreamMetadata::new() } } impl Iterator for UpstreamMetadata { type Item = UpstreamDatumWithMetadata; fn next(&mut self) -> Option { self.0.pop() } } impl From for UpstreamDatumWithMetadata { fn from(d: UpstreamDatum) -> Self { UpstreamDatumWithMetadata { datum: d, certainty: None, origin: None, } } } impl From> for UpstreamMetadata { fn from(v: Vec) -> Self { UpstreamMetadata(v) } } impl From> for UpstreamMetadata { fn from(v: Vec) -> Self { UpstreamMetadata( v.into_iter() .map(|d| UpstreamDatumWithMetadata { datum: d, certainty: None, origin: None, }) .collect(), ) } } impl From for Vec { fn from(v: UpstreamMetadata) -> Self { v.0 } } impl From for Vec { fn from(v: UpstreamMetadata) -> Self { v.0.into_iter().map(|d| d.datum).collect() } } impl serde::ser::Serialize for UpstreamMetadata { fn serialize(&self, serializer: S) -> Result where S: serde::ser::Serializer, { let mut map = serde_yaml::Mapping::new(); for datum in &self.0 { map.insert( serde_yaml::Value::String(datum.datum.field().to_string()), serde_yaml::to_value(datum).unwrap(), ); } map.serialize(serializer) } } #[cfg(feature = "pyo3")] impl<'py> IntoPyObject<'py> for &UpstreamDatumWithMetadata { type Target = PyAny; type Output = Bound<'py, Self::Target>; type Error = PyErr; fn into_pyobject(self, py: Python<'py>) -> Result { let m = PyModule::import(py, "upstream_ontologist.guess")?; let cls = m.getattr("UpstreamDatum")?; let (field, py_datum) = self .datum .into_pyobject(py)? .extract::<(String, Bound)>()?; let kwargs = pyo3::types::PyDict::new(py); kwargs.set_item("certainty", self.certainty.map(|x| x.to_string()))?; kwargs.set_item("origin", self.origin.as_ref())?; cls.call((field, py_datum), Some(&kwargs)) } } impl serde::ser::Serialize for UpstreamDatumWithMetadata { fn serialize(&self, serializer: S) -> Result where S: serde::ser::Serializer, { UpstreamDatum::serialize(&self.datum, serializer) } } /// Trait for providing upstream metadata pub trait UpstreamDataProvider { /// Provides upstream metadata from a given path fn provide( path: &std::path::Path, trust_package: bool, ) -> dyn Iterator; } /// Errors that can occur when loading JSON from HTTP #[derive(Debug)] pub enum HTTPJSONError { /// HTTP request error HTTPError(reqwest::Error), /// Request timed out Timeout(tokio::time::Duration), /// HTTP error response Error { /// The URL that failed url: reqwest::Url, /// HTTP status code status: u16, /// The response object response: Box, }, } impl std::fmt::Display for HTTPJSONError { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { match self { HTTPJSONError::HTTPError(e) => write!(f, "{}", e), HTTPJSONError::Timeout(timeout) => write!(f, "Timeout after {:?}", timeout), HTTPJSONError::Error { url, status, response: _, } => write!(f, "HTTP error {} for {}:", status, url,), } } } /// Loads JSON data from a URL with optional timeout pub async fn load_json_url( http_url: &Url, timeout: Option, ) -> Result { let mut headers = HeaderMap::new(); headers.insert(reqwest::header::ACCEPT, "application/json".parse().unwrap()); if let Some(hostname) = http_url.host_str() { if hostname == "github.com" || hostname == "raw.githubusercontent.com" { if let Ok(token) = std::env::var("GITHUB_TOKEN") { headers.insert( reqwest::header::WWW_AUTHENTICATE, format!("Bearer {}", token).parse().unwrap(), ); } } } let client = crate::http::build_client() .default_headers(headers) .build() .map_err(HTTPJSONError::HTTPError)?; let http_url: reqwest::Url = Into::::into(http_url.clone()).parse().unwrap(); let request = client .get(http_url) .build() .map_err(HTTPJSONError::HTTPError)?; let timeout = timeout.unwrap_or(std::time::Duration::from_secs(30)); let response = tokio::time::timeout(timeout, client.execute(request)) .await .map_err(|_| HTTPJSONError::Timeout(timeout))? .map_err(HTTPJSONError::HTTPError)?; if !response.status().is_success() { return Err(HTTPJSONError::Error { url: response.url().clone(), status: response.status().as_u16(), response: Box::new(response), }); } let json_contents: serde_json::Value = response.json().await.map_err(HTTPJSONError::HTTPError)?; Ok(json_contents) } fn xmlparse_simplify_namespaces(path: &Path, namespaces: &[&str]) -> Option { let namespaces = namespaces .iter() .map(|ns| format!("{{{}{}}}", ns, ns)) .collect::>(); let mut f = std::fs::File::open(path).unwrap(); let mut buf = Vec::new(); f.read_to_end(&mut buf).ok()?; let mut tree = xmltree::Element::parse(std::io::Cursor::new(buf)).ok()?; simplify_namespaces(&mut tree, &namespaces); Some(tree) } fn simplify_namespaces(element: &mut xmltree::Element, namespaces: &[String]) { use xmltree::XMLNode; element.prefix = None; if let Some(namespace) = namespaces.iter().find(|&ns| element.name.starts_with(ns)) { element.name = element.name[namespace.len()..].to_string(); } for child in &mut element.children { if let XMLNode::Element(ref mut child_element) = child { simplify_namespaces(child_element, namespaces); } } } /// Errors that can occur when canonicalizing URLs pub enum CanonicalizeError { /// URL is invalid with reason InvalidUrl(Url, String), /// URL cannot be verified with reason Unverifiable(Url, String), /// Request was rate limited RateLimited(Url), } #[derive(Debug)] /// Error when manipulating URL path segments pub struct PathSegmentError; /// Checks if a URL is canonical by following redirects pub async fn check_url_canonical(url: &Url) -> Result { if url.scheme() != "http" && url.scheme() != "https" { return Err(CanonicalizeError::Unverifiable( url.clone(), format!("Unsupported scheme {}", url.scheme()), )); } let client = crate::http::build_client() .build() .map_err(|e| CanonicalizeError::Unverifiable(url.clone(), format!("HTTP error {}", e)))?; let response = client.get(url.as_str()).send().await.map_err(|e| { CanonicalizeError::Unverifiable(url.clone(), format!("HTTP error {}", e)) })?; match response.status() { status if status.is_success() => Ok(response.url().clone()), status if status == reqwest::StatusCode::TOO_MANY_REQUESTS => { Err(CanonicalizeError::RateLimited(url.clone())) } status if status == reqwest::StatusCode::NOT_FOUND => Err(CanonicalizeError::InvalidUrl( url.clone(), format!("Not found: {}", response.status()), )), status if status.is_server_error() => Err(CanonicalizeError::Unverifiable( url.clone(), format!("Server down: {}", response.status()), )), _ => Err(CanonicalizeError::Unverifiable( url.clone(), format!("Unknown HTTP error {}", response.status()), )), } } /// Creates a new URL with the specified path segments pub fn with_path_segments(url: &Url, path_segments: &[&str]) -> Result { let mut url = url.clone(); url.path_segments_mut() .map_err(|_| PathSegmentError)? .clear() .extend(path_segments.iter().copied()); Ok(url) } /// Trait for different code forges (GitHub, GitLab, etc.) #[async_trait::async_trait] pub trait Forge: Send + Sync { /// Whether the repository browse URL can be used as homepage fn repository_browse_can_be_homepage(&self) -> bool; /// Returns the name of the forge fn name(&self) -> &'static str; /// Derives the bug database URL from a bug submission URL fn bug_database_url_from_bug_submit_url(&self, _url: &Url) -> Option { None } /// Derives the bug submission URL from a bug database URL fn bug_submit_url_from_bug_database_url(&self, _url: &Url) -> Option { None } /// Checks if a bug database URL is canonical async fn check_bug_database_canonical(&self, url: &Url) -> Result { Err(CanonicalizeError::Unverifiable( url.clone(), "Not implemented".to_string(), )) } /// Checks if a bug submission URL is canonical async fn check_bug_submit_url_canonical(&self, url: &Url) -> Result { Err(CanonicalizeError::Unverifiable( url.clone(), "Not implemented".to_string(), )) } /// Gets the bug database URL from an issue URL fn bug_database_from_issue_url(&self, _url: &Url) -> Option { None } /// Gets the bug database URL from a repository URL fn bug_database_url_from_repo_url(&self, _url: &Url) -> Option { None } /// Gets the repository URL from a merge request URL fn repo_url_from_merge_request_url(&self, _url: &Url) -> Option { None } /// Extends metadata with forge-specific information async fn extend_metadata( &self, _metadata: &mut Vec, _project: &str, _max_certainty: Option, ) { } } /// GitHub forge implementation pub struct GitHub; impl Default for GitHub { fn default() -> Self { Self::new() } } impl GitHub { /// Creates a new GitHub forge instance pub fn new() -> Self { Self } } #[async_trait::async_trait] impl Forge for GitHub { fn name(&self) -> &'static str { "GitHub" } fn repository_browse_can_be_homepage(&self) -> bool { true } fn bug_database_url_from_bug_submit_url(&self, url: &Url) -> Option { assert_eq!(url.host(), Some(url::Host::Domain("github.com"))); let path_elements = url.path_segments().unwrap().collect::>(); if path_elements.len() != 3 && path_elements.len() != 4 { return None; } if path_elements[2] != "issues" { return None; } let mut url = url.clone(); url.set_scheme("https").expect("valid scheme"); Some(with_path_segments(&url, &path_elements[0..3]).unwrap()) } fn bug_submit_url_from_bug_database_url(&self, url: &Url) -> Option { assert_eq!(url.host(), Some(url::Host::Domain("github.com"))); let path_elements = url.path_segments().unwrap().collect::>(); if path_elements.len() != 3 { return None; } if path_elements[2] != "issues" { return None; } let mut url = url.clone(); url.set_scheme("https").expect("valid scheme"); url.path_segments_mut().unwrap().push("new"); Some(url) } async fn check_bug_database_canonical(&self, url: &Url) -> Result { assert_eq!(url.host(), Some(url::Host::Domain("github.com"))); let path_elements = url.path_segments().unwrap().collect::>(); if path_elements.len() != 3 { return Err(CanonicalizeError::InvalidUrl( url.clone(), "GitHub URL with missing path elements".to_string(), )); } if path_elements[2] != "issues" { return Err(CanonicalizeError::InvalidUrl( url.clone(), "GitHub URL with missing path elements".to_string(), )); } let api_url = Url::parse(&format!( "https://api.github.com/repos/{}/{}", path_elements[0], path_elements[1] )) .unwrap(); let response = match reqwest::get(api_url).await { Ok(response) => response, Err(e) if e.status() == Some(reqwest::StatusCode::NOT_FOUND) => { return Err(CanonicalizeError::InvalidUrl( url.clone(), format!("Project does not exist {}", e), )); } Err(e) if e.status() == Some(reqwest::StatusCode::FORBIDDEN) => { // Probably rate limited warn!("Unable to verify bug database URL {}: {}", url, e); return Err(CanonicalizeError::RateLimited(url.clone())); } Err(e) => { return Err(CanonicalizeError::Unverifiable( url.clone(), format!("Unable to verify bug database URL: {}", e), )); } }; let data = response.json::().await.map_err(|e| { CanonicalizeError::Unverifiable( url.clone(), format!("Unable to verify bug database URL: {}", e), ) })?; if data["has_issues"].as_bool() != Some(true) { return Err(CanonicalizeError::InvalidUrl( url.clone(), "Project does not have issues enabled".to_string(), )); } if data.get("archived").unwrap_or(&serde_json::Value::Null) == &serde_json::Value::Bool(true) { return Err(CanonicalizeError::InvalidUrl( url.clone(), "Project is archived".to_string(), )); } let mut url = Url::parse(data["html_url"].as_str().ok_or_else(|| { CanonicalizeError::Unverifiable( url.clone(), "Unable to verify bug database URL: no html_url".to_string(), ) })?) .map_err(|e| { CanonicalizeError::Unverifiable( url.clone(), format!("Unable to verify bug database URL: {}", e), ) })?; url.set_scheme("https").expect("valid scheme"); url.path_segments_mut() .expect("path segments") .push("issues"); Ok(url) } async fn check_bug_submit_url_canonical(&self, url: &Url) -> Result { let mut path_segments = url.path_segments().unwrap().collect::>(); path_segments.pop(); let db_url = with_path_segments(url, &path_segments).unwrap(); let mut canonical_db_url = self.check_bug_database_canonical(&db_url).await?; canonical_db_url.set_scheme("https").expect("valid scheme"); canonical_db_url .path_segments_mut() .expect("path segments") .push("new"); Ok(canonical_db_url) } fn bug_database_from_issue_url(&self, url: &Url) -> Option { let path_elements = url .path_segments() .expect("path segments") .collect::>(); if path_elements.len() < 2 || path_elements[1] != "issues" { return None; } let mut url = url.clone(); url.set_scheme("https").unwrap(); Some(with_path_segments(&url, &path_elements[0..3]).unwrap()) } fn bug_database_url_from_repo_url(&self, url: &Url) -> Option { let mut path = url .path_segments() .into_iter() .take(2) .flatten() .collect::>(); path[1] = path[1].strip_suffix(".git").unwrap_or(path[1]); path.push("issues"); let mut url = url.clone(); url.set_scheme("https").unwrap(); Some(with_path_segments(&url, path.as_slice()).unwrap()) } fn repo_url_from_merge_request_url(&self, url: &Url) -> Option { let path_elements = url .path_segments() .expect("path segments") .collect::>(); if path_elements.len() < 2 || path_elements[1] != "issues" { return None; } let mut url = url.clone(); url.set_scheme("https").expect("valid scheme"); Some(with_path_segments(&url, &path_elements[0..2]).unwrap()) } } static DEFAULT_ASCII_SET: percent_encoding::AsciiSet = percent_encoding::CONTROLS .add(b'/') .add(b'?') .add(b'#') .add(b'%'); /// GitLab forge implementation pub struct GitLab; impl Default for GitLab { fn default() -> Self { Self::new() } } impl GitLab { /// Creates a new GitLab forge instance pub fn new() -> Self { Self } } #[async_trait::async_trait] impl Forge for GitLab { fn name(&self) -> &'static str { "GitLab" } fn repository_browse_can_be_homepage(&self) -> bool { true } fn bug_database_url_from_bug_submit_url(&self, url: &Url) -> Option { let mut path_elements = url .path_segments() .expect("path segments") .collect::>(); if path_elements.len() < 2 { return None; } if path_elements[path_elements.len() - 2] != "issues" { return None; } if path_elements[path_elements.len() - 1] != "new" { path_elements.pop(); } Some(with_path_segments(url, &path_elements[0..path_elements.len() - 3]).unwrap()) } fn bug_submit_url_from_bug_database_url(&self, url: &Url) -> Option { let path_elements = url .path_segments() .expect("path segments") .collect::>(); if path_elements.len() < 2 { return None; } if path_elements[path_elements.len() - 1] != "issues" { return None; } let mut url = url.clone(); url.path_segments_mut().expect("path segments").push("new"); Some(url) } async fn check_bug_database_canonical(&self, url: &Url) -> Result { let host = url .host() .ok_or_else(|| CanonicalizeError::InvalidUrl(url.clone(), "no host".to_string()))?; let mut path_elements = url .path_segments() .expect("path segments") .collect::>(); if path_elements.len() < 2 || path_elements[path_elements.len() - 1] != "issues" { return Err(CanonicalizeError::InvalidUrl( url.clone(), "GitLab URL with missing path elements".to_string(), )); } path_elements.pop(); let proj = path_elements.join("/"); let proj_segment = utf8_percent_encode(proj.as_str(), &DEFAULT_ASCII_SET); let api_url = Url::parse(&format!( "https://{}/api/v4/projects/{}", host, proj_segment )) .map_err(|_| { CanonicalizeError::InvalidUrl( url.clone(), "GitLab URL with invalid project path".to_string(), ) })?; match load_json_url(&api_url, None).await { Ok(data) => { // issues_enabled is only provided when the user is authenticated, // so if we're not then we just fall back to checking the canonical URL let issues_enabled = data .get("issues_enabled") .unwrap_or(&serde_json::Value::Null); if issues_enabled.as_bool() == Some(false) { return Err(CanonicalizeError::InvalidUrl( url.clone(), "Project does not have issues enabled".to_string(), )); } let mut canonical_url = Url::parse(data["web_url"].as_str().unwrap()).unwrap(); canonical_url .path_segments_mut() .unwrap() .extend(&["-", "issues"]); if issues_enabled.as_bool() == Some(true) { return Ok(canonical_url); } check_url_canonical(&canonical_url).await } Err(HTTPJSONError::Error { status, .. }) if status == reqwest::StatusCode::NOT_FOUND => { Err(CanonicalizeError::InvalidUrl( url.clone(), "Project not found".to_string(), )) } Err(e) => Err(CanonicalizeError::Unverifiable( url.clone(), format!("Unable to verify bug database URL: {:?}", e), )), } } async fn check_bug_submit_url_canonical(&self, url: &Url) -> Result { let path_elements = url .path_segments() .expect("valid segments") .collect::>(); if path_elements.len() < 2 || path_elements[path_elements.len() - 2] != "issues" { return Err(CanonicalizeError::InvalidUrl( url.clone(), "GitLab URL with missing path elements".to_string(), )); } if path_elements[path_elements.len() - 1] != "new" { return Err(CanonicalizeError::InvalidUrl( url.clone(), "GitLab URL with missing path elements".to_string(), )); } let db_url = with_path_segments(url, &path_elements[0..path_elements.len() - 1]).unwrap(); let mut canonical_db_url = self.check_bug_database_canonical(&db_url).await?; canonical_db_url .path_segments_mut() .expect("valid segments") .push("new"); Ok(canonical_db_url) } fn bug_database_from_issue_url(&self, url: &Url) -> Option { let path_elements = url .path_segments() .expect("valid segments") .collect::>(); if path_elements.len() < 2 || path_elements[path_elements.len() - 2] != "issues" || path_elements[path_elements.len() - 1] .parse::() .is_err() { return None; } Some(with_path_segments(url, &path_elements[0..path_elements.len() - 1]).unwrap()) } fn bug_database_url_from_repo_url(&self, url: &Url) -> Option { let mut url = url.clone(); let last = url .path_segments() .expect("valid segments") .next_back() .unwrap() .to_string(); url.path_segments_mut() .unwrap() .pop() .push(last.trim_end_matches(".git")) .push("issues"); Some(url) } fn repo_url_from_merge_request_url(&self, url: &Url) -> Option { let path_elements = url .path_segments() .expect("path segments") .collect::>(); if path_elements.len() < 3 || path_elements[path_elements.len() - 2] != "merge_requests" || path_elements[path_elements.len() - 1] .parse::() .is_err() { return None; } Some(with_path_segments(url, &path_elements[0..path_elements.len() - 2]).unwrap()) } } /// Extracts upstream metadata from a Travis CI configuration file pub fn guess_from_travis_yml( path: &Path, _settings: &GuesserSettings, ) -> std::result::Result, ProviderError> { let mut file = File::open(path)?; let mut contents = String::new(); file.read_to_string(&mut contents)?; let data: serde_yaml::Value = serde_yaml::from_str(&contents).map_err(|e| ProviderError::ParseError(e.to_string()))?; let mut ret = Vec::new(); if let Some(go_import_path) = data.get("go_import_path") { if let Some(go_import_path) = go_import_path.as_str() { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::GoImportPath(go_import_path.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } Ok(ret) } /// Extracts upstream metadata from environment variables pub fn guess_from_environment() -> std::result::Result, ProviderError> { let mut results = Vec::new(); if let Ok(url) = std::env::var("UPSTREAM_BRANCH_URL") { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(url), certainty: Some(Certainty::Certain), origin: Some(Origin::Other("environment".to_string())), }); } Ok(results) } fn find_datum<'a>( metadata: &'a [UpstreamDatumWithMetadata], field: &str, ) -> Option<&'a UpstreamDatumWithMetadata> { metadata.iter().find(|d| d.datum.field() == field) } fn set_datum(metadata: &mut Vec, datum: UpstreamDatumWithMetadata) { if let Some(idx) = metadata .iter() .position(|d| d.datum.field() == datum.datum.field()) { metadata[idx] = datum; } else { metadata.push(datum); } } /// Updates metadata collection with new guesses based on certainty levels pub fn update_from_guesses( metadata: &mut Vec, new_items: impl Iterator, ) -> Vec { let mut changed = vec![]; for datum in new_items { let current_datum = find_datum(metadata, datum.datum.field()); if current_datum.is_none() || datum.certainty > current_datum.unwrap().certainty { changed.push(datum.clone()); set_datum(metadata, datum); } } changed } fn possible_fields_missing( upstream_metadata: &[UpstreamDatumWithMetadata], fields: &[&str], _field_certainty: Certainty, ) -> bool { for field in fields { match find_datum(upstream_metadata, field) { Some(datum) if datum.certainty != Some(Certainty::Certain) => return true, None => return true, _ => (), } } false } async fn extend_from_external_guesser< F: Fn() -> Fut, Fut: std::future::Future>, >( metadata: &mut Vec, max_certainty: Option, supported_fields: &[&str], new_items: F, ) { if max_certainty.is_some() && !possible_fields_missing(metadata, supported_fields, max_certainty.unwrap()) { return; } let new_items = new_items() .await .into_iter() .map(|item| UpstreamDatumWithMetadata { datum: item, certainty: max_certainty, origin: None, }); update_from_guesses(metadata, new_items); } /// SourceForge forge implementation pub struct SourceForge; impl Default for SourceForge { fn default() -> Self { Self::new() } } impl SourceForge { /// Creates a new SourceForge forge instance pub fn new() -> Self { Self } } #[async_trait::async_trait] impl Forge for SourceForge { fn name(&self) -> &'static str { "SourceForge" } fn repository_browse_can_be_homepage(&self) -> bool { false } fn bug_database_url_from_bug_submit_url(&self, url: &Url) -> Option { let mut segments = url.path_segments()?; if segments.next() != Some("p") { return None; } let project = segments.next()?; if segments.next() != Some("bugs") { return None; } with_path_segments(url, &["p", project, "bugs"]).ok() } async fn extend_metadata( &self, metadata: &mut Vec, project: &str, max_certainty: Option, ) { let subproject = find_datum(metadata, "Name").and_then(|f| match f.datum { UpstreamDatum::Name(ref name) => Some(name.to_string()), _ => None, }); extend_from_external_guesser( metadata, max_certainty, &["Homepage", "Name", "Repository", "Bug-Database"], || async { crate::forges::sourceforge::guess_from_sf(project, subproject.as_deref()).await }, ) .await } } /// Launchpad forge implementation pub struct Launchpad; impl Default for Launchpad { fn default() -> Self { Self::new() } } impl Launchpad { /// Creates a new Launchpad forge instance pub fn new() -> Self { Self } } impl Forge for Launchpad { fn name(&self) -> &'static str { "launchpad" } fn repository_browse_can_be_homepage(&self) -> bool { false } fn bug_database_url_from_bug_submit_url(&self, url: &Url) -> Option { if url.host_str()? != "bugs.launchpad.net" { return None; } let mut segments = url.path_segments()?; let project = segments.next()?; with_path_segments(url, &[project]).ok() } fn bug_submit_url_from_bug_database_url(&self, url: &Url) -> Option { if url.host_str()? != "bugs.launchpad.net" { return None; } let mut segments = url.path_segments()?; let project = segments.next()?; with_path_segments(url, &[project, "+filebug"]).ok() } } /// Determines which forge a URL belongs to pub async fn find_forge(url: &Url, net_access: Option) -> Option> { if url.host_str()? == "sourceforge.net" { return Some(Box::new(SourceForge::new())); } if url.host_str()?.ends_with(".launchpad.net") { return Some(Box::new(Launchpad::new())); } if url.host_str()? == "github.com" { return Some(Box::new(GitHub::new())); } if vcs::is_gitlab_site(url.host_str()?, net_access).await { return Some(Box::new(GitLab::new())); } None } /// Checks if a bug database URL is canonical pub async fn check_bug_database_canonical( url: &Url, net_access: Option, ) -> Result { if let Some(forge) = find_forge(url, net_access).await { forge .bug_database_url_from_bug_submit_url(url) .ok_or(CanonicalizeError::Unverifiable( url.clone(), "no bug database URL found".to_string(), )) } else { Err(CanonicalizeError::Unverifiable( url.clone(), "unknown forge".to_string(), )) } } /// Derives a bug submission URL from a bug database URL pub async fn bug_submit_url_from_bug_database_url( url: &Url, net_access: Option, ) -> Option { if let Some(forge) = find_forge(url, net_access).await { forge.bug_submit_url_from_bug_database_url(url) } else { None } } /// Derives a bug database URL from a bug submission URL pub async fn bug_database_url_from_bug_submit_url( url: &Url, net_access: Option, ) -> Option { if let Some(forge) = find_forge(url, net_access).await { forge.bug_database_url_from_bug_submit_url(url) } else { None } } /// Guesses the bug database URL from a repository URL pub async fn guess_bug_database_url_from_repo_url( url: &Url, net_access: Option, ) -> Option { if let Some(forge) = find_forge(url, net_access).await { forge.bug_database_url_from_repo_url(url) } else { None } } /// Extracts the repository URL from a merge request URL pub async fn repo_url_from_merge_request_url(url: &Url, net_access: Option) -> Option { if let Some(forge) = find_forge(url, net_access).await { forge.repo_url_from_merge_request_url(url) } else { None } } /// Extracts the bug database URL from an issue URL pub async fn bug_database_from_issue_url(url: &Url, net_access: Option) -> Option { if let Some(forge) = find_forge(url, net_access).await { forge.bug_database_from_issue_url(url) } else { None } } /// Checks if a bug submission URL is canonical pub async fn check_bug_submit_url_canonical( url: &Url, net_access: Option, ) -> Result { if let Some(forge) = find_forge(url, net_access).await { forge .bug_submit_url_from_bug_database_url(url) .ok_or(CanonicalizeError::Unverifiable( url.clone(), "no bug submit URL found".to_string(), )) } else { Err(CanonicalizeError::Unverifiable( url.clone(), "unknown forge".to_string(), )) } } /// Extracts the PECL package name from a URL pub fn extract_pecl_package_name(url: &str) -> Option { let pecl_regex = regex!(r"https?://pecl\.php\.net/package/(.*)"); if let Some(captures) = pecl_regex.captures(url) { return captures.get(1).map(|m| m.as_str().to_string()); } None } /// Extracts the Hackage package name from a URL pub fn extract_hackage_package(url: &str) -> Option { let hackage_regex = regex!(r"https?://hackage\.haskell\.org/package/([^/]+)/.*"); if let Some(captures) = hackage_regex.captures(url) { return captures.get(1).map(|m| m.as_str().to_string()); } None } /// Obtain metadata from a URL related to the project pub fn metadata_from_url(url: &str, origin: &Origin) -> Vec { let mut results = Vec::new(); if let Some(sf_project) = crate::forges::sourceforge::extract_sf_project_name(url) { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::SourceForgeProject(sf_project), certainty: Some(Certainty::Certain), origin: Some(origin.clone()), }); results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Archive("SourceForge".to_string()), certainty: Some(Certainty::Certain), origin: Some(origin.clone()), }); } if let Some(pecl_package) = extract_pecl_package_name(url) { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::PeclPackage(pecl_package), certainty: Some(Certainty::Certain), origin: Some(origin.clone()), }); results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Archive("Pecl".to_string()), certainty: Some(Certainty::Certain), origin: Some(origin.clone()), }); } if let Some(haskell_package) = extract_hackage_package(url) { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::HaskellPackage(haskell_package), certainty: Some(Certainty::Certain), origin: Some(origin.clone()), }); results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Archive("Hackage".to_string()), certainty: Some(Certainty::Certain), origin: Some(origin.clone()), }); } results } /// Fetches metadata from the Repology API for a given source package pub async fn get_repology_metadata(srcname: &str, repo: Option<&str>) -> Option { let repo = repo.unwrap_or("debian_unstable"); let url = format!( "https://repology.org/tools/project-by?repo={}&name_type=srcname' '&target_page=api_v1_project&name={}", repo, srcname ); match load_json_url(&Url::parse(url.as_str()).unwrap(), None).await { Ok(json) => Some(json), Err(HTTPJSONError::Error { status: 404, .. }) => None, Err(e) => { debug!("Failed to load repology metadata: {:?}", e); None } } } /// Guesses upstream metadata from a file or directory path pub fn guess_from_path( path: &Path, _settings: &GuesserSettings, ) -> std::result::Result, ProviderError> { let basename = path.file_name().and_then(|s| s.to_str()); let mut ret = Vec::new(); if let Some(basename_str) = basename { let re = regex!(r"(.*)-([0-9.]+)"); if let Some(captures) = re.captures(basename_str) { if let Some(name) = captures.get(1) { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(name.as_str().to_string()), certainty: Some(Certainty::Possible), origin: Some(path.into()), }); } if let Some(version) = captures.get(2) { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Version(version.as_str().to_string()), certainty: Some(Certainty::Possible), origin: Some(path.into()), }); } } else { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(basename_str.to_string()), certainty: Some(Certainty::Possible), origin: Some(path.into()), }); } } Ok(ret) } #[cfg(feature = "pyo3")] impl<'py> FromPyObject<'_, 'py> for UpstreamDatum { type Error = PyErr; fn extract(obj: pyo3::Borrowed<'_, 'py, PyAny>) -> PyResult { let (field, val): (String, Bound<'py, PyAny>) = if let Ok((field, val)) = obj.extract::<(String, Bound<'py, PyAny>)>() { (field, val) } else if let Ok(datum) = obj.getattr("datum") { let field = datum.getattr("field")?.extract::()?; let val = datum.getattr("value")?; (field, val) } else if obj.hasattr("field")? && obj.hasattr("value")? { let field = obj.getattr("field")?.extract::()?; let val = obj.getattr("value")?; (field, val) } else { return Err(PyTypeError::new_err(( format!("Expected a tuple of (field, value) or an object with field and value attributesm, found {:?}", obj), ))); }; match field.as_str() { "Name" => Ok(UpstreamDatum::Name(val.extract::()?)), "Version" => Ok(UpstreamDatum::Version(val.extract::()?)), "Homepage" => Ok(UpstreamDatum::Homepage(val.extract::()?)), "Bug-Database" => Ok(UpstreamDatum::BugDatabase(val.extract::()?)), "Bug-Submit" => Ok(UpstreamDatum::BugSubmit(val.extract::()?)), "Contact" => Ok(UpstreamDatum::Contact(val.extract::()?)), "Repository" => Ok(UpstreamDatum::Repository(val.extract::()?)), "Repository-Browse" => Ok(UpstreamDatum::RepositoryBrowse(val.extract::()?)), "License" => Ok(UpstreamDatum::License(val.extract::()?)), "Description" => Ok(UpstreamDatum::Description(val.extract::()?)), "Summary" => Ok(UpstreamDatum::Summary(val.extract::()?)), "Cargo-Crate" => Ok(UpstreamDatum::CargoCrate(val.extract::()?)), "Security-MD" => Ok(UpstreamDatum::SecurityMD(val.extract::()?)), "Security-Contact" => Ok(UpstreamDatum::SecurityContact(val.extract::()?)), "Keywords" => Ok(UpstreamDatum::Keywords(val.extract::>()?)), "Copyright" => Ok(UpstreamDatum::Copyright(val.extract::()?)), "Documentation" => Ok(UpstreamDatum::Documentation(val.extract::()?)), "API-Documentation" => Ok(UpstreamDatum::APIDocumentation(val.extract::()?)), "Go-Import-Path" => Ok(UpstreamDatum::GoImportPath(val.extract::()?)), "Download" => Ok(UpstreamDatum::Download(val.extract::()?)), "Wiki" => Ok(UpstreamDatum::Wiki(val.extract::()?)), "MailingList" => Ok(UpstreamDatum::MailingList(val.extract::()?)), "Funding" => Ok(UpstreamDatum::Funding(val.extract::()?)), "SourceForge-Project" => { Ok(UpstreamDatum::SourceForgeProject(val.extract::()?)) } "Archive" => Ok(UpstreamDatum::Archive(val.extract::()?)), "Demo" => Ok(UpstreamDatum::Demo(val.extract::()?)), "Pecl-Package" => Ok(UpstreamDatum::PeclPackage(val.extract::()?)), "Haskell-Package" => Ok(UpstreamDatum::HaskellPackage(val.extract::()?)), "Author" => Ok(UpstreamDatum::Author(val.extract::>()?)), "Maintainer" => Ok(UpstreamDatum::Maintainer(val.extract::()?)), "Changelog" => Ok(UpstreamDatum::Changelog(val.extract::()?)), "Screenshots" => Ok(UpstreamDatum::Screenshots(val.extract::>()?)), "Cite-As" => Ok(UpstreamDatum::CiteAs(val.extract::()?)), "Registry" => { let v = val.extract::>>()?; let mut registry = Vec::new(); for item in v { let name = item.get_item("Name")?.extract::()?; let entry = item.get_item("Entry")?.extract::()?; registry.push((name, entry)); } Ok(UpstreamDatum::Registry(registry)) } "Donation" => Ok(UpstreamDatum::Donation(val.extract::()?)), "Webservice" => Ok(UpstreamDatum::Webservice(val.extract::()?)), "BuildSystem" => Ok(UpstreamDatum::BuildSystem(val.extract::()?)), "FAQ" => Ok(UpstreamDatum::FAQ(val.extract::()?)), _ => Err(PyRuntimeError::new_err(format!("Unknown field: {}", field))), } } } #[cfg(feature = "pyo3")] impl<'py> IntoPyObject<'py> for &UpstreamDatum { type Target = PyAny; type Output = Bound<'py, Self::Target>; type Error = PyErr; fn into_pyobject(self, py: Python<'py>) -> Result { let field = self.field().to_string(); let value: Bound<'py, PyAny> = match self { UpstreamDatum::Name(n) => n.into_pyobject(py)?.into_any(), UpstreamDatum::Version(v) => v.into_pyobject(py)?.into_any(), UpstreamDatum::Contact(c) => c.into_pyobject(py)?.into_any(), UpstreamDatum::Summary(s) => s.into_pyobject(py)?.into_any(), UpstreamDatum::License(l) => l.into_pyobject(py)?.into_any(), UpstreamDatum::Homepage(h) => h.into_pyobject(py)?.into_any(), UpstreamDatum::Description(d) => d.into_pyobject(py)?.into_any(), UpstreamDatum::BugDatabase(b) => b.into_pyobject(py)?.into_any(), UpstreamDatum::BugSubmit(b) => b.into_pyobject(py)?.into_any(), UpstreamDatum::Repository(r) => r.into_pyobject(py)?.into_any(), UpstreamDatum::RepositoryBrowse(r) => r.into_pyobject(py)?.into_any(), UpstreamDatum::SecurityMD(s) => s.into_pyobject(py)?.into_any(), UpstreamDatum::SecurityContact(s) => s.into_pyobject(py)?.into_any(), UpstreamDatum::CargoCrate(c) => c.into_pyobject(py)?.into_any(), UpstreamDatum::Keywords(ks) => ks.into_pyobject(py)?, UpstreamDatum::Copyright(c) => c.into_pyobject(py)?.into_any(), UpstreamDatum::Documentation(a) => a.into_pyobject(py)?.into_any(), UpstreamDatum::APIDocumentation(a) => a.into_pyobject(py)?.into_any(), UpstreamDatum::GoImportPath(ip) => ip.into_pyobject(py)?.into_any(), UpstreamDatum::Archive(a) => a.into_pyobject(py)?.into_any(), UpstreamDatum::Demo(d) => d.into_pyobject(py)?.into_any(), UpstreamDatum::Maintainer(m) => m.into_pyobject(py)?, UpstreamDatum::Author(a) => a.into_pyobject(py)?, UpstreamDatum::Wiki(w) => w.into_pyobject(py)?.into_any(), UpstreamDatum::Download(d) => d.into_pyobject(py)?.into_any(), UpstreamDatum::MailingList(m) => m.into_pyobject(py)?.into_any(), UpstreamDatum::SourceForgeProject(m) => m.into_pyobject(py)?.into_any(), UpstreamDatum::PeclPackage(p) => p.into_pyobject(py)?.into_any(), UpstreamDatum::Funding(p) => p.into_pyobject(py)?.into_any(), UpstreamDatum::Changelog(c) => c.into_pyobject(py)?.into_any(), UpstreamDatum::HaskellPackage(p) => p.into_pyobject(py)?.into_any(), UpstreamDatum::DebianITP(i) => i.into_pyobject(py)?.into_any(), UpstreamDatum::Screenshots(s) => s.into_pyobject(py)?, UpstreamDatum::CiteAs(s) => s.into_pyobject(py)?.into_any(), UpstreamDatum::Registry(r) => { let list: Result, _> = r .iter() .map(|(name, entry)| { let dict = PyDict::new(py); dict.set_item("Name", name)?; dict.set_item("Entry", entry)?; Ok::, PyErr>(dict.into_any()) }) .collect(); list?.into_pyobject(py)? } UpstreamDatum::Donation(d) => d.into_pyobject(py)?.into_any(), UpstreamDatum::Webservice(w) => w.into_pyobject(py)?.into_any(), UpstreamDatum::BuildSystem(b) => b.into_pyobject(py)?.into_any(), UpstreamDatum::FAQ(f) => f.into_pyobject(py)?.into_any(), }; Ok((field, value).into_pyobject(py)?.into_any()) } } #[cfg(feature = "pyo3")] impl<'py> FromPyObject<'_, 'py> for UpstreamDatumWithMetadata { type Error = PyErr; fn extract(obj: pyo3::Borrowed<'_, 'py, PyAny>) -> PyResult { let certainty = obj.getattr("certainty")?.extract::>()?; let origin = obj.getattr("origin")?.extract::>()?; let datum = if obj.hasattr("datum")? { obj.getattr("datum")?.extract::() } else { obj.extract::() }?; Ok(UpstreamDatumWithMetadata { datum, certainty: certainty.map(|s| s.parse().unwrap()), origin, }) } } /// Errors that can occur when fetching metadata from providers #[derive(Debug)] pub enum ProviderError { /// Parse error with description ParseError(String), /// I/O error IoError(std::io::Error), /// Other error with description Other(String), /// HTTP JSON fetching error HttpJsonError(Box), /// Extrapolation limit exceeded with limit value ExtrapolationLimitExceeded(usize), } impl std::fmt::Display for ProviderError { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { match self { ProviderError::ParseError(e) => write!(f, "Parse error: {}", e), ProviderError::IoError(e) => write!(f, "IO error: {}", e), ProviderError::Other(e) => write!(f, "Other error: {}", e), ProviderError::HttpJsonError(e) => write!(f, "HTTP JSON error: {}", e), ProviderError::ExtrapolationLimitExceeded(e) => { write!(f, "Extrapolation limit exceeded: {}", e) } } } } impl std::error::Error for ProviderError {} impl From for ProviderError { fn from(e: HTTPJSONError) -> Self { ProviderError::HttpJsonError(Box::new(e)) } } impl From for ProviderError { fn from(e: std::io::Error) -> Self { ProviderError::IoError(e) } } impl From for ProviderError { fn from(e: reqwest::Error) -> Self { ProviderError::Other(e.to_string()) } } #[cfg(feature = "pyo3")] mod py_exceptions { #![allow(missing_docs)] pyo3::create_exception!( upstream_ontologist, ParseError, pyo3::exceptions::PyException ); } #[cfg(feature = "pyo3")] pub use py_exceptions::ParseError; #[cfg(feature = "pyo3")] impl From for PyErr { fn from(e: ProviderError) -> PyErr { match e { ProviderError::IoError(e) => e.into(), ProviderError::ParseError(e) => ParseError::new_err((e,)), ProviderError::Other(e) => PyRuntimeError::new_err((e,)), ProviderError::HttpJsonError(e) => PyRuntimeError::new_err((e.to_string(),)), ProviderError::ExtrapolationLimitExceeded(e) => { PyRuntimeError::new_err((e.to_string(),)) } } } } /// Settings for upstream metadata guessers #[derive(Debug, Default, Clone)] pub struct GuesserSettings { /// Whether to trust the package contents and run executables pub trust_package: bool, } type GuesserFunction = Box Result, ProviderError>>; /// A guesser that can extract upstream metadata from a specific file pub struct UpstreamMetadataGuesser { /// Name/path of the guesser pub name: std::path::PathBuf, /// Function that performs the guessing pub guess: GuesserFunction, } impl std::fmt::Debug for UpstreamMetadataGuesser { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_struct("UpstreamMetadataGuesser") .field("name", &self.name) .finish() } } type OldAsyncGuesser = fn( PathBuf, GuesserSettings, ) -> Pin< Box< dyn std::future::Future, ProviderError>> + Send, >, >; const OLD_STATIC_GUESSERS: &[(&str, OldAsyncGuesser)] = &[ #[cfg(feature = "debian")] ("debian/watch", |path, settings| { Box::pin(async move { crate::providers::debian::guess_from_debian_watch(&path, &settings).await }) }), #[cfg(feature = "debian")] ("debian/control", |path, settings| { Box::pin( async move { crate::providers::debian::guess_from_debian_control(&path, &settings) }, ) }), #[cfg(feature = "debian")] ("debian/changelog", |path, settings| { Box::pin(async move { crate::providers::debian::guess_from_debian_changelog(&path, &settings).await }) }), #[cfg(feature = "debian")] ("debian/rules", |path, settings| { Box::pin(async move { crate::providers::debian::guess_from_debian_rules(&path, &settings) }) }), #[cfg(feature = "python-pkginfo")] ("PKG-INFO", |path, settings| { Box::pin( async move { crate::providers::python::guess_from_pkg_info(&path, &settings).await }, ) }), ("package.json", |path, settings| { Box::pin(async move { crate::providers::package_json::guess_from_package_json(&path, &settings) }) }), ("composer.json", |path, settings| { Box::pin(async move { crate::providers::composer_json::guess_from_composer_json(&path, &settings) }) }), ("package.xml", |path, settings| { Box::pin( async move { crate::providers::package_xml::guess_from_package_xml(&path, &settings) }, ) }), ("package.yaml", |path, settings| { Box::pin(async move { crate::providers::package_yaml::guess_from_package_yaml(&path, &settings) }) }), #[cfg(feature = "dist-ini")] ("dist.ini", |path, settings| { Box::pin(async move { crate::providers::perl::guess_from_dist_ini(&path, &settings) }) }), #[cfg(feature = "debian")] ("debian/copyright", |path, settings| { Box::pin(async move { crate::providers::debian::guess_from_debian_copyright(&path, &settings).await }) }), ("META.json", |path, settings| { Box::pin(async move { crate::providers::perl::guess_from_meta_json(&path, &settings) }) }), ("MYMETA.json", |path, settings| { Box::pin(async move { crate::providers::perl::guess_from_meta_json(&path, &settings) }) }), ("META.yml", |path, settings| { Box::pin(async move { crate::providers::perl::guess_from_meta_yml(&path, &settings) }) }), ("MYMETA.yml", |path, settings| { Box::pin(async move { crate::providers::perl::guess_from_meta_yml(&path, &settings) }) }), ("configure", |path, settings| { Box::pin(async move { crate::providers::autoconf::guess_from_configure(&path, &settings) }) }), #[cfg(feature = "r-description")] ("DESCRIPTION", |path, settings| { Box::pin( async move { crate::providers::r::guess_from_r_description(&path, &settings).await }, ) }), #[cfg(feature = "cargo")] ("Cargo.toml", |path, settings| { Box::pin(async move { crate::providers::rust::guess_from_cargo(&path, &settings) }) }), ("pom.xml", |path, settings| { Box::pin(async move { crate::providers::maven::guess_from_pom_xml(&path, &settings) }) }), #[cfg(feature = "git-config")] (".git/config", |path, settings| { Box::pin(async move { crate::providers::git::guess_from_git_config(&path, &settings) }) }), ("debian/get-orig-source.sh", |path, settings| { Box::pin(async move { crate::vcs_command::guess_from_get_orig_source(&path, &settings) }) }), #[cfg(feature = "pyproject-toml")] ("pyproject.toml", |path, settings| { Box::pin( async move { crate::providers::python::guess_from_pyproject_toml(&path, &settings) }, ) }), #[cfg(feature = "setup-cfg")] ("setup.cfg", |path, settings| { Box::pin( async move { crate::providers::python::guess_from_setup_cfg(&path, &settings).await }, ) }), ("go.mod", |path, settings| { Box::pin(async move { crate::providers::go::guess_from_go_mod(&path, &settings) }) }), ("Makefile.PL", |path, settings| { Box::pin(async move { crate::providers::perl::guess_from_makefile_pl(&path, &settings) }) }), ("wscript", |path, settings| { Box::pin(async move { crate::providers::waf::guess_from_wscript(&path, &settings) }) }), ("AUTHORS", |path, settings| { Box::pin(async move { crate::providers::authors::guess_from_authors(&path, &settings) }) }), ("INSTALL", |path, settings| { Box::pin(async move { crate::providers::guess_from_install(&path, &settings).await }) }), ("pubspec.yaml", |path, settings| { Box::pin( async move { crate::providers::pubspec::guess_from_pubspec_yaml(&path, &settings) }, ) }), ("pubspec.yml", |path, settings| { Box::pin( async move { crate::providers::pubspec::guess_from_pubspec_yaml(&path, &settings) }, ) }), ("meson.build", |path, settings| { Box::pin(async move { crate::providers::meson::guess_from_meson(&path, &settings) }) }), ("metadata.json", |path, settings| { Box::pin(async move { crate::providers::metadata_json::guess_from_metadata_json(&path, &settings) }) }), (".travis.yml", |path, settings| { Box::pin(async move { crate::guess_from_travis_yml(&path, &settings) }) }), ]; fn find_guessers(path: &std::path::Path) -> Vec> { let mut candidates: Vec> = Vec::new(); let path = path.canonicalize().unwrap(); for (name, cb) in OLD_STATIC_GUESSERS { let subpath = path.join(name); if subpath.exists() { candidates.push(Box::new(PathGuesser { name: name.to_string(), subpath: subpath.clone(), cb: Box::new(move |p, s| Box::pin(cb(p.to_path_buf(), s.clone()))), })); } } for name in ["SECURITY.md", ".github/SECURITY.md", "docs/SECURITY.md"].iter() { if path.join(name).exists() { let subpath = path.join(name); candidates.push(Box::new(PathGuesser { name: name.to_string(), subpath: subpath.clone(), cb: Box::new(|p, s| { let name = name.to_string(); Box::pin(async move { crate::providers::security_md::guess_from_security_md(&name, &p, &s) }) }), })); } } let mut found_pkg_info = path.join("PKG-INFO").exists(); #[cfg(feature = "python-pkginfo")] for entry in std::fs::read_dir(&path).unwrap() { let entry = entry.unwrap(); let filename = entry.file_name().to_string_lossy().to_string(); if filename.ends_with(".egg-info") { candidates.push(Box::new(PathGuesser { name: format!("{}/PKG-INFO", filename), subpath: entry.path().join("PKG-INFO"), cb: Box::new(|p, s| { Box::pin( async move { crate::providers::python::guess_from_pkg_info(&p, &s).await }, ) }), })); found_pkg_info = true; } else if filename.ends_with(".dist-info") { candidates.push(Box::new(PathGuesser { name: format!("{}/METADATA", filename), subpath: entry.path().join("METADATA"), cb: Box::new(|p, s| { Box::pin( async move { crate::providers::python::guess_from_pkg_info(&p, &s).await }, ) }), })); found_pkg_info = true; } } #[cfg(feature = "pyo3")] if !found_pkg_info && path.join("setup.py").exists() { candidates.push(Box::new(PathGuesser { name: "setup.py".to_string(), subpath: path.join("setup.py"), cb: Box::new(|path, s| { Box::pin(async move { crate::providers::python::guess_from_setup_py(&path, s.trust_package).await }) }), })); } for entry in std::fs::read_dir(&path).unwrap() { let entry = entry.unwrap(); if entry.file_name().to_string_lossy().ends_with(".gemspec") { candidates.push(Box::new(PathGuesser { name: entry.file_name().to_string_lossy().to_string(), subpath: entry.path(), cb: Box::new(|p, s| { Box::pin( async move { crate::providers::ruby::guess_from_gemspec(&p, &s).await }, ) }), })); } } // TODO(jelmer): Perhaps scan all directories if no other primary project information file has been found? #[cfg(feature = "r-description")] for entry in std::fs::read_dir(&path).unwrap() { let entry = entry.unwrap(); let path = entry.path(); if entry.file_type().unwrap().is_dir() { let description_name = format!("{}/DESCRIPTION", entry.file_name().to_string_lossy()); if path.join(&description_name).exists() { candidates.push(Box::new(PathGuesser { name: description_name, subpath: path.join("DESCRIPTION"), cb: Box::new(|p, s| { Box::pin(async move { crate::providers::r::guess_from_r_description(&p, &s).await }) }), })); } } } let mut doap_filenames = std::fs::read_dir(&path) .unwrap() .filter_map(|entry| { let entry = entry.unwrap(); let filename = entry.file_name().to_string_lossy().to_string(); if filename.ends_with(".doap") || (filename.ends_with(".xml") && filename.starts_with("doap_XML_")) { Some(entry.file_name()) } else { None } }) .collect::>(); if doap_filenames.len() == 1 { let doap_filename = doap_filenames.remove(0); candidates.push(Box::new(PathGuesser { name: doap_filename.to_string_lossy().to_string(), subpath: path.join(&doap_filename), cb: Box::new(|p, s| { Box::pin( async move { crate::providers::doap::guess_from_doap(&p, s.trust_package) }, ) }), })); } else if doap_filenames.len() > 1 { log::warn!( "Multiple DOAP files found: {:?}, ignoring all.", doap_filenames ); } let mut metainfo_filenames = std::fs::read_dir(&path) .unwrap() .filter_map(|entry| { let entry = entry.unwrap(); if entry .file_name() .to_string_lossy() .ends_with(".metainfo.xml") { Some(entry.file_name()) } else { None } }) .collect::>(); if metainfo_filenames.len() == 1 { let metainfo_filename = metainfo_filenames.remove(0); candidates.push(Box::new(PathGuesser { name: metainfo_filename.to_string_lossy().to_string(), subpath: path.join(&metainfo_filename), cb: Box::new(|p, s| { Box::pin(async move { crate::providers::metainfo::guess_from_metainfo(&p, s.trust_package) }) }), })); } else if metainfo_filenames.len() > 1 { log::warn!( "Multiple metainfo files found: {:?}, ignoring all.", metainfo_filenames ); } let mut cabal_filenames = std::fs::read_dir(&path) .unwrap() .filter_map(|entry| { let entry = entry.unwrap(); if entry.file_name().to_string_lossy().ends_with(".cabal") { Some(entry.file_name()) } else { None } }) .collect::>(); if cabal_filenames.len() == 1 { let cabal_filename = cabal_filenames.remove(0); candidates.push(Box::new(PathGuesser { name: cabal_filename.to_string_lossy().to_string(), subpath: path.join(&cabal_filename), cb: Box::new(|path, s| { Box::pin(async move { crate::providers::haskell::guess_from_cabal(&path, s.trust_package) }) }), })); } else if cabal_filenames.len() > 1 { log::warn!( "Multiple cabal files found: {:?}, ignoring all.", cabal_filenames ); } let readme_filenames = std::fs::read_dir(&path) .unwrap() .filter_map(|entry| { let entry = entry.unwrap(); let filename = entry.file_name().to_string_lossy().to_string(); if !(filename.to_lowercase().starts_with("readme") || filename.to_lowercase().starts_with("hacking") || filename.to_lowercase().starts_with("contributing")) { return None; } if filename.ends_with('~') { return None; } let extension = entry .path() .extension() .map(|s| s.to_string_lossy().to_string()); if extension.as_deref() == Some("html") || extension.as_deref() == Some("pdf") || extension.as_deref() == Some("xml") { return None; } Some(entry.file_name()) }) .collect::>(); for filename in readme_filenames { candidates.push(Box::new(PathGuesser { name: filename.to_string_lossy().to_string(), subpath: path.join(&filename), cb: Box::new(|path, s| { Box::pin( async move { crate::readme::guess_from_readme(&path, s.trust_package).await }, ) }), })); } let mut nuspec_filenames = std::fs::read_dir(&path) .unwrap() .filter_map(|entry| { let entry = entry.unwrap(); if entry.file_name().to_string_lossy().ends_with(".nuspec") { Some(entry.file_name()) } else { None } }) .collect::>(); if nuspec_filenames.len() == 1 { let nuspec_filename = nuspec_filenames.remove(0); candidates.push(Box::new(PathGuesser { name: nuspec_filename.to_string_lossy().to_string(), subpath: path.join(&nuspec_filename), cb: Box::new(|path, s| { Box::pin(async move { crate::providers::nuspec::guess_from_nuspec(&path, s.trust_package).await }) }), })); } else if nuspec_filenames.len() > 1 { log::warn!( "Multiple nuspec files found: {:?}, ignoring all.", nuspec_filenames ); } #[cfg(feature = "opam")] let mut opam_filenames = std::fs::read_dir(&path) .unwrap() .filter_map(|entry| { let entry = entry.unwrap(); if entry.file_name().to_string_lossy().ends_with(".opam") { Some(entry.file_name()) } else { None } }) .collect::>(); #[cfg(feature = "opam")] match opam_filenames.len().cmp(&1) { Ordering::Equal => { let opam_filename = opam_filenames.remove(0); candidates.push(Box::new(PathGuesser { name: opam_filename.to_string_lossy().to_string(), subpath: path.join(&opam_filename), cb: Box::new(|path, s| { Box::pin(async move { crate::providers::ocaml::guess_from_opam(&path, s.trust_package) }) }), })); } Ordering::Greater => { log::warn!( "Multiple opam files found: {:?}, ignoring all.", opam_filenames ); } Ordering::Less => {} } let debian_patches = match std::fs::read_dir(path.join("debian").join("patches")) { Ok(patches) => patches .filter_map(|entry| { let entry = entry.unwrap(); if entry.file_name().to_string_lossy().ends_with(".patch") { Some(format!( "debian/patches/{}", entry.file_name().to_string_lossy() )) } else { None } }) .collect::>(), Err(_) => Vec::new(), }; for filename in debian_patches { candidates.push(Box::new(PathGuesser { name: filename.clone(), subpath: path.join(&filename), cb: Box::new(|path, s| { Box::pin(async move { crate::providers::debian::guess_from_debian_patch(&path, &s).await }) }), })); } candidates.push(Box::new(EnvironmentGuesser::new())); candidates.push(Box::new(PathGuesser { name: ".".to_string(), subpath: path.clone(), cb: Box::new(|p, s| Box::pin(async move { crate::guess_from_path(&p, &s) })), })); candidates } pub(crate) fn stream( path: &Path, config: &GuesserSettings, guessers: Vec>, ) -> impl Stream> { // For each of the guessers, create concurrent tasks that run the guessers in parallel let abspath = std::env::current_dir().unwrap().join(path); let config = config.clone(); // Run guessers concurrently using buffered (no tokio::spawn required) futures::stream::iter(guessers) .map(move |mut guesser| { let abspath = abspath.clone(); let config = config.clone(); let guesser_name = guesser.name().to_string(); async move { let results = match guesser.guess(&config).await { Ok(results) => results, Err(e) => return futures::stream::iter(vec![Err(e)]).boxed(), }; futures::stream::iter(results.into_iter().map(move |mut datum| { rewrite_upstream_datum(&guesser_name, &mut datum, &abspath); Ok(datum) })) .boxed() } }) .buffered(10) // Run up to 10 guessers concurrently while preserving order .flatten() } fn rewrite_upstream_datum( guesser_name: &str, datum: &mut UpstreamDatumWithMetadata, abspath: &std::path::Path, ) { log::trace!("{}: {:?}", guesser_name, datum); datum.origin = datum .origin .clone() .or(Some(Origin::Other(guesser_name.to_string()))); if let Some(Origin::Path(p)) = datum.origin.as_ref() { if let Ok(suffix) = p.strip_prefix(abspath) { if suffix.to_str().unwrap().is_empty() { datum.origin = Some(Origin::Path(PathBuf::from_str(".").unwrap())); } else { datum.origin = Some(Origin::Path(PathBuf::from_str(".").unwrap().join(suffix))); } } } } /// Creates a stream of upstream metadata by running all applicable guessers pub fn upstream_metadata_stream( path: &std::path::Path, trust_package: Option, ) -> impl Stream> { let trust_package = trust_package.unwrap_or(false); let guessers = find_guessers(path); stream(path, &GuesserSettings { trust_package }, guessers) } /// Extends upstream metadata with additional information from external sources pub async fn extend_upstream_metadata( upstream_metadata: &mut UpstreamMetadata, path: &std::path::Path, minimum_certainty: Option, net_access: Option, consult_external_directory: Option, ) -> Result<(), ProviderError> { let net_access = net_access.unwrap_or(false); let consult_external_directory = consult_external_directory.unwrap_or(false); let minimum_certainty = minimum_certainty.unwrap_or(Certainty::Confident); // TODO(jelmer): Use EXTRAPOLATE_FNS mechanism for this? for field in [ "Homepage", "Bug-Database", "Bug-Submit", "Repository", "Repository-Browse", "Download", ] { let value = match upstream_metadata.get(field) { Some(value) => value, None => continue, }; if let Some(project) = crate::forges::sourceforge::extract_sf_project_name(value.datum.as_str().unwrap()) { let certainty = Some( std::cmp::min(Some(Certainty::Likely), value.certainty) .unwrap_or(Certainty::Likely), ); upstream_metadata.insert(UpstreamDatumWithMetadata { datum: UpstreamDatum::Archive("SourceForge".to_string()), certainty, origin: Some(Origin::Other(format!("derived from {}", field))), }); upstream_metadata.insert(UpstreamDatumWithMetadata { datum: UpstreamDatum::SourceForgeProject(project), certainty, origin: Some(Origin::Other(format!("derived from {}", field))), }); break; } } let archive = upstream_metadata.get("Archive"); if archive.is_some() && archive.unwrap().datum.as_str().unwrap() == "SourceForge" && upstream_metadata.contains_key("SourceForge-Project") && net_access { let sf_project = upstream_metadata .get("SourceForge-Project") .unwrap() .datum .as_str() .unwrap() .to_string(); let sf_certainty = archive.unwrap().certainty; SourceForge::new() .extend_metadata( upstream_metadata.mut_items(), sf_project.as_str(), sf_certainty, ) .await; } let archive = upstream_metadata.get("Archive"); if archive.is_some() && archive.unwrap().datum.as_str().unwrap() == "Hackage" && upstream_metadata.contains_key("Hackage-Package") && net_access { let hackage_package = upstream_metadata .get("Hackage-Package") .unwrap() .datum .as_str() .unwrap() .to_string(); let hackage_certainty = archive.unwrap().certainty; crate::providers::haskell::Hackage::new() .extend_metadata( upstream_metadata.mut_items(), hackage_package.as_str(), hackage_certainty, ) .await .unwrap(); } let archive = upstream_metadata.get("Archive"); #[cfg(feature = "cargo")] if archive.is_some() && archive.unwrap().datum.as_str().unwrap() == "crates.io" && upstream_metadata.contains_key("Cargo-Crate") && net_access { let cargo_crate = upstream_metadata .get("Cargo-Crate") .unwrap() .datum .as_str() .unwrap() .to_string(); let crates_io_certainty = upstream_metadata.get("Archive").unwrap().certainty; crate::providers::rust::CratesIo::new() .extend_metadata( upstream_metadata.mut_items(), cargo_crate.as_str(), crates_io_certainty, ) .await .unwrap(); } let archive = upstream_metadata.get("Archive"); if archive.is_some() && archive.unwrap().datum.as_str().unwrap() == "Pecl" && upstream_metadata.contains_key("Pecl-Package") && net_access { let pecl_package = upstream_metadata .get("Pecl-Package") .unwrap() .datum .as_str() .unwrap() .to_string(); let pecl_certainty = upstream_metadata.get("Archive").unwrap().certainty; crate::providers::php::Pecl::new() .extend_metadata( upstream_metadata.mut_items(), pecl_package.as_str(), pecl_certainty, ) .await .unwrap(); } #[cfg(feature = "debian")] if net_access && consult_external_directory { // TODO(jelmer): Don't assume debian/control exists let package = match debian_control::Control::from_file_relaxed(path.join("debian/control")) { Ok((control, _)) => control.source().and_then(|s| s.name()), Err(_) => None, }; if let Some(package) = package { #[cfg(feature = "launchpad")] extend_from_lp( upstream_metadata.mut_items(), minimum_certainty, package.as_str(), None, None, ) .await; crate::providers::arch::Aur::new() .extend_metadata( upstream_metadata.mut_items(), package.as_str(), Some(minimum_certainty), ) .await .unwrap(); crate::providers::gobo::Gobo::new() .extend_metadata( upstream_metadata.mut_items(), package.as_str(), Some(minimum_certainty), ) .await .unwrap(); extend_from_repology( upstream_metadata.mut_items(), minimum_certainty, package.as_str(), ) .await; } } crate::extrapolate::extrapolate_fields(upstream_metadata, net_access, None).await?; Ok(()) } /// Trait for third-party repositories that can provide upstream metadata #[async_trait::async_trait] pub trait ThirdPartyRepository { /// Returns the name of the repository fn name(&self) -> &'static str; /// Returns the list of fields this repository can provide fn supported_fields(&self) -> &'static [&'static str]; /// Returns the maximum certainty level this repository can provide fn max_supported_certainty(&self) -> Certainty; /// Extends metadata with information from this repository async fn extend_metadata( &self, metadata: &mut Vec, name: &str, min_certainty: Option, ) -> Result<(), ProviderError> { if min_certainty.is_some() && min_certainty.unwrap() > self.max_supported_certainty() { // Don't bother if we can't meet minimum certainty return Ok(()); } extend_from_external_guesser( metadata, Some(self.max_supported_certainty()), self.supported_fields(), || async { self.guess_metadata(name).await.unwrap() }, ) .await; Ok(()) } /// Guesses metadata for a given package name async fn guess_metadata(&self, name: &str) -> Result, ProviderError>; } #[cfg(feature = "launchpad")] async fn extend_from_lp( upstream_metadata: &mut Vec, minimum_certainty: Certainty, package: &str, distribution: Option<&str>, suite: Option<&str>, ) { // The set of fields that Launchpad can possibly provide: let lp_fields = &["Homepage", "Repository", "Name", "Download"][..]; let lp_certainty = Certainty::Possible; if lp_certainty < minimum_certainty { // Don't bother talking to launchpad if we're not // speculating. return; } extend_from_external_guesser(upstream_metadata, Some(lp_certainty), lp_fields, || async { crate::providers::launchpad::guess_from_launchpad(package, distribution, suite) .await .unwrap() }) .await } async fn extend_from_repology( upstream_metadata: &mut Vec, minimum_certainty: Certainty, source_package: &str, ) { // The set of fields that repology can possibly provide: let repology_fields = &["Homepage", "License", "Summary", "Download"][..]; let certainty = Certainty::Confident; if certainty < minimum_certainty { // Don't bother talking to repology if we're not speculating. return; } extend_from_external_guesser( upstream_metadata, Some(certainty), repology_fields, || async { crate::providers::repology::guess_from_repology(source_package) .await .unwrap() }, ) .await } /// Fix existing upstream metadata. pub async fn fix_upstream_metadata(upstream_metadata: &mut UpstreamMetadata) { if let Some(repository) = upstream_metadata.get_mut("Repository") { if let Some(repo_str) = repository.datum.as_str() { let url = crate::vcs::sanitize_url(repo_str).await; repository.datum = UpstreamDatum::Repository(url.to_string()); } } if let Some(summary) = upstream_metadata.get_mut("Summary") { if let Some(s) = summary.datum.as_str() { let s = s.split_once(". ").map_or(s, |(a, _)| a); let s = s.trim_end().trim_end_matches('.'); summary.datum = UpstreamDatum::Summary(s.to_string()); } } } /// Summarize the upstream metadata into a dictionary. /// /// # Arguments /// * `metadata_items`: Iterator over metadata items /// * `path`: Path to the package /// * `trust_package`: Whether to trust the package contents and i.e. run executables in it /// * `net_access`: Whether to allow net access /// * `consult_external_directory`: Whether to pull in data from external (user-maintained) directories. pub async fn summarize_upstream_metadata( metadata_items: impl Stream, path: &std::path::Path, net_access: Option, consult_external_directory: Option, check: Option, ) -> Result { let check = check.unwrap_or(false); let mut upstream_metadata = UpstreamMetadata::new(); let metadata_items = metadata_items.filter_map(|item| async move { let bad: bool = item.datum.known_bad_guess(); if bad { log::debug!("Excluding known bad item {:?}", item); None } else { Some(item) } }); let metadata_items = metadata_items.collect::>().await; upstream_metadata.update(metadata_items.into_iter()); extend_upstream_metadata( &mut upstream_metadata, path, None, net_access, consult_external_directory, ) .await?; if check { check_upstream_metadata(&mut upstream_metadata, None).await; } fix_upstream_metadata(&mut upstream_metadata).await; // Sort by name upstream_metadata.sort(); Ok(upstream_metadata) } /// Guess upstream metadata items, in no particular order. /// /// # Arguments /// * `path`: Path to the package /// * `trust_package`: Whether to trust the package contents and i.e. run executables in it /// * `minimum_certainty`: Minimum certainty of guesses to return pub fn guess_upstream_metadata_items( path: &std::path::Path, trust_package: Option, minimum_certainty: Option, ) -> impl Stream> { let items = upstream_metadata_stream(path, trust_package); items.filter_map(move |e| async move { match e { Err(e) => Some(Err(e)), Ok(UpstreamDatumWithMetadata { datum, certainty, origin, }) => { if minimum_certainty.is_some() && certainty < minimum_certainty { None } else { Some(Ok(UpstreamDatumWithMetadata { datum, certainty, origin, })) } } } }) } /// Gets upstream information for a project pub async fn get_upstream_info( path: &std::path::Path, trust_package: Option, net_access: Option, consult_external_directory: Option, check: Option, ) -> Result { let metadata_items = upstream_metadata_stream(path, trust_package); let metadata_items = metadata_items.filter_map(|x| async { match x { Ok(x) => Some(x), Err(e) => { log::error!("{}", e); None } } }); summarize_upstream_metadata( metadata_items, path, net_access, consult_external_directory, check, ) .await } /// Guess the upstream metadata dictionary. /// /// # Arguments /// * `path`: Path to the package /// * `trust_package`: Whether to trust the package contents and i.e. run executables in it /// * `net_access`: Whether to allow net access /// * `consult_external_directory`: Whether to pull in data from external (user-maintained) directories. pub async fn guess_upstream_metadata( path: &std::path::Path, trust_package: Option, net_access: Option, consult_external_directory: Option, check: Option, ) -> Result { let metadata_items = guess_upstream_metadata_items(path, trust_package, None); let metadata_items = metadata_items.filter_map(|x| async { match x { Ok(x) => Some(x), Err(e) => { log::error!("{}", e); None } } }); summarize_upstream_metadata( metadata_items, path, net_access, consult_external_directory, check, ) .await } /// Verifies that screenshot URLs are accessible pub async fn verify_screenshots(urls: &[&str]) -> Vec<(String, Option)> { let mut ret = Vec::new(); for url in urls { let mut request = reqwest::Request::new(reqwest::Method::GET, url.parse().unwrap()); request.headers_mut().insert( reqwest::header::USER_AGENT, reqwest::header::HeaderValue::from_static(USER_AGENT), ); match reqwest::Client::new().execute(request).await { Ok(response) => { let status = response.status(); if status.is_success() { ret.push((url.to_string(), Some(true))); } else if status.is_client_error() { ret.push((url.to_string(), Some(false))); } else { ret.push((url.to_string(), None)); } } Err(e) => { log::debug!("Error fetching {}: {}", url, e); ret.push((url.to_string(), None)); } } } ret } /// Check upstream metadata. /// /// This will make network connections, etc. pub async fn check_upstream_metadata( upstream_metadata: &mut UpstreamMetadata, version: Option<&str>, ) { let repository = upstream_metadata.get_mut("Repository"); if let Some(repository) = repository { if let Some(repo_url) = repository.datum.to_url() { match vcs::check_repository_url_canonical(repo_url, version).await { Ok(canonical_url) => { repository.datum = UpstreamDatum::Repository(canonical_url.to_string()); if repository.certainty == Some(Certainty::Confident) { repository.certainty = Some(Certainty::Certain); } if let Some(url) = repository.datum.to_url() { let derived_browse_url = vcs::browse_url_from_repo_url( &vcs::VcsLocation { url, branch: None, subpath: None, }, Some(true), ) .await; let certainty = repository.certainty; if let Some(browse_repo) = upstream_metadata.get_mut("Repository-Browse") { if derived_browse_url == browse_repo.datum.to_url() { browse_repo.certainty = certainty; } } } } Err(CanonicalizeError::Unverifiable(u, _)) | Err(CanonicalizeError::RateLimited(u)) => { log::debug!("Unverifiable URL: {}", u); } Err(CanonicalizeError::InvalidUrl(u, e)) => { log::debug!("Deleting invalid Repository URL {}: {}", u, e); upstream_metadata.remove("Repository"); } } } else { log::debug!("Repository field is not a valid URL, skipping check"); } } let homepage = upstream_metadata.get_mut("Homepage"); if let Some(homepage) = homepage { if let Some(homepage_url) = homepage.datum.to_url() { match check_url_canonical(&homepage_url).await { Ok(canonical_url) => { homepage.datum = UpstreamDatum::Homepage(canonical_url.to_string()); if homepage.certainty >= Some(Certainty::Likely) { homepage.certainty = Some(Certainty::Certain); } } Err(CanonicalizeError::Unverifiable(u, _)) | Err(CanonicalizeError::RateLimited(u)) => { log::debug!("Unverifiable URL: {}", u); } Err(CanonicalizeError::InvalidUrl(u, e)) => { log::debug!("Deleting invalid Homepage URL {}: {}", u, e); upstream_metadata.remove("Homepage"); } } } else { log::debug!("Homepage field is not a valid URL, skipping check"); } } if let Some(repository_browse) = upstream_metadata.get_mut("Repository-Browse") { if let Some(browse_url) = repository_browse.datum.to_url() { match check_url_canonical(&browse_url).await { Ok(u) => { repository_browse.datum = UpstreamDatum::RepositoryBrowse(u.to_string()); if repository_browse.certainty >= Some(Certainty::Likely) { repository_browse.certainty = Some(Certainty::Certain); } } Err(CanonicalizeError::InvalidUrl(u, e)) => { log::debug!("Deleting invalid Repository-Browse URL {}: {}", u, e); upstream_metadata.remove("Repository-Browse"); } Err(CanonicalizeError::Unverifiable(u, _)) | Err(CanonicalizeError::RateLimited(u)) => { log::debug!("Unable to verify Repository-Browse URL {}", u); } } } else { log::debug!("Repository-Browse field is not a valid URL, skipping check"); } } if let Some(bug_database) = upstream_metadata.get_mut("Bug-Database") { if let Some(bug_db_url) = bug_database.datum.to_url() { match check_bug_database_canonical(&bug_db_url, Some(true)).await { Ok(u) => { bug_database.datum = UpstreamDatum::BugDatabase(u.to_string()); if bug_database.certainty >= Some(Certainty::Likely) { bug_database.certainty = Some(Certainty::Certain); } } Err(CanonicalizeError::InvalidUrl(u, e)) => { log::debug!("Deleting invalid Bug-Database URL {}: {}", u, e); upstream_metadata.remove("Bug-Database"); } Err(CanonicalizeError::Unverifiable(u, _)) | Err(CanonicalizeError::RateLimited(u)) => { log::debug!("Unable to verify Bug-Database URL {}", u); } } } else { log::debug!("Bug-Database field is not a valid URL, skipping check"); } } let bug_submit = upstream_metadata.get_mut("Bug-Submit"); if let Some(bug_submit) = bug_submit { if let Some(bug_submit_url) = bug_submit.datum.to_url() { match check_bug_submit_url_canonical(&bug_submit_url, Some(true)).await { Ok(u) => { bug_submit.datum = UpstreamDatum::BugSubmit(u.to_string()); if bug_submit.certainty >= Some(Certainty::Likely) { bug_submit.certainty = Some(Certainty::Certain); } } Err(CanonicalizeError::InvalidUrl(u, e)) => { log::debug!("Deleting invalid Bug-Submit URL {}: {}", u, e); upstream_metadata.remove("Bug-Submit"); } Err(CanonicalizeError::Unverifiable(u, _)) | Err(CanonicalizeError::RateLimited(u)) => { log::debug!("Unable to verify Bug-Submit URL {}", u); } } } else { log::debug!("Bug-Submit field is not a valid URL, skipping check"); } } let mut screenshots = upstream_metadata.get_mut("Screenshots"); if screenshots.is_some() && screenshots.as_ref().unwrap().certainty == Some(Certainty::Likely) { let mut newvalue = vec![]; screenshots.as_mut().unwrap().certainty = Some(Certainty::Certain); let urls = match &screenshots.as_ref().unwrap().datum { UpstreamDatum::Screenshots(urls) => urls, _ => unreachable!(), }; for (url, status) in verify_screenshots( urls.iter() .map(|x| x.as_str()) .collect::>() .as_slice(), ) .await { match status { Some(true) => { newvalue.push(url); } Some(false) => {} None => { screenshots.as_mut().unwrap().certainty = Some(Certainty::Likely); } } } screenshots.as_mut().unwrap().datum = UpstreamDatum::Screenshots(newvalue); } } #[async_trait::async_trait] pub(crate) trait Guesser: Send { fn name(&self) -> &str; /// Guess metadata from a given path. async fn guess( &mut self, settings: &GuesserSettings, ) -> Result, ProviderError>; } type AsyncGuesserFunction = Box< dyn FnMut( PathBuf, GuesserSettings, ) -> Pin< Box< dyn std::future::Future< Output = Result, ProviderError>, > + Send, >, > + Send, >; /// Guesser that extracts metadata from a specific file path pub struct PathGuesser { name: String, subpath: std::path::PathBuf, cb: AsyncGuesserFunction, } #[async_trait::async_trait] impl Guesser for PathGuesser { fn name(&self) -> &str { &self.name } async fn guess( &mut self, settings: &GuesserSettings, ) -> Result, ProviderError> { (self.cb)(self.subpath.clone(), settings.clone()).await } } /// Guesser that extracts metadata from environment variables pub struct EnvironmentGuesser; impl EnvironmentGuesser { /// Creates a new EnvironmentGuesser pub fn new() -> Self { Self } } impl Default for EnvironmentGuesser { fn default() -> Self { Self::new() } } #[async_trait::async_trait] impl Guesser for EnvironmentGuesser { fn name(&self) -> &str { "environment" } async fn guess( &mut self, _settings: &GuesserSettings, ) -> Result, ProviderError> { crate::guess_from_environment() } } #[cfg(test)] mod tests { use super::*; #[test] fn test_upstream_metadata() { let mut data = UpstreamMetadata::new(); assert_eq!(data.len(), 0); data.insert(UpstreamDatumWithMetadata { datum: UpstreamDatum::Homepage("https://example.com".to_string()), certainty: Some(Certainty::Certain), origin: None, }); assert_eq!(data.len(), 1); assert_eq!( data.get("Homepage").unwrap().datum.as_str().unwrap(), "https://example.com" ); assert_eq!(data.homepage(), Some("https://example.com")); } #[tokio::test] async fn test_bug_database_url_from_bug_submit_url() { let url = Url::parse("https://bugs.launchpad.net/bugs/+filebug").unwrap(); assert_eq!( bug_database_url_from_bug_submit_url(&url, None) .await .unwrap(), Url::parse("https://bugs.launchpad.net/bugs").unwrap() ); let url = Url::parse("https://github.com/dulwich/dulwich/issues/new").unwrap(); assert_eq!( bug_database_url_from_bug_submit_url(&url, None) .await .unwrap(), Url::parse("https://github.com/dulwich/dulwich/issues").unwrap() ); let url = Url::parse("https://sourceforge.net/p/dulwich/bugs/new").unwrap(); assert_eq!( bug_database_url_from_bug_submit_url(&url, None) .await .unwrap(), Url::parse("https://sourceforge.net/p/dulwich/bugs").unwrap() ); } #[test] fn test_person_from_str() { assert_eq!( Person::from("Foo Bar "), Person { name: Some("Foo Bar".to_string()), email: Some("foo@example.com".to_string()), url: None } ); assert_eq!( Person::from("Foo Bar"), Person { name: Some("Foo Bar".to_string()), email: None, url: None } ); assert_eq!( Person::from("foo@example.com"), Person { name: None, email: Some("foo@example.com".to_string()), url: None } ); // Test person with just email (no name) - parseaddr returns empty name assert_eq!( Person::from(""), Person { name: Some("".to_string()), email: Some("foo@example.com".to_string()), url: None } ); } #[test] fn test_upstream_metadata_accessors() { let mut metadata = UpstreamMetadata::default(); // Test empty metadata assert_eq!(metadata.version(), None); assert_eq!(metadata.description(), None); assert_eq!(metadata.wiki(), None); assert_eq!(metadata.download(), None); assert_eq!(metadata.security_contact(), None); assert_eq!(metadata.donation(), None); assert_eq!(metadata.cite_as(), None); assert_eq!(metadata.webservice(), None); assert_eq!(metadata.copyright(), None); assert_eq!(metadata.sourceforge_project(), None); assert_eq!(metadata.pecl_package(), None); // Add some data and test again metadata.insert(UpstreamDatumWithMetadata { datum: UpstreamDatum::Version("1.0.0".to_string()), certainty: Some(Certainty::Certain), origin: None, }); assert_eq!(metadata.version(), Some("1.0.0")); metadata.insert(UpstreamDatumWithMetadata { datum: UpstreamDatum::Description("Test description".to_string()), certainty: Some(Certainty::Certain), origin: None, }); assert_eq!(metadata.description(), Some("Test description")); } #[test] fn test_upstream_metadata_iterators() { let mut metadata = UpstreamMetadata::default(); // Test empty iterator assert_eq!(metadata.iter().count(), 0); assert_eq!(metadata.mut_iter().count(), 0); // Add data and test again metadata.insert(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name("test".to_string()), certainty: Some(Certainty::Certain), origin: None, }); assert_eq!(metadata.iter().count(), 1); assert_eq!(metadata.mut_iter().count(), 1); } #[test] fn test_extract_pecl_package_name() { use super::extract_pecl_package_name; assert_eq!( extract_pecl_package_name("https://pecl.php.net/package/redis"), Some("redis".to_string()) ); assert_eq!( extract_pecl_package_name("https://pecl.php.net/package/xdebug/2.9.0"), Some("xdebug/2.9.0".to_string()) ); assert_eq!( extract_pecl_package_name("https://example.com/something"), None ); } #[test] fn test_forge_names() { let github = GitHub; assert_eq!(github.name(), "GitHub"); let gitlab = GitLab; assert_eq!(gitlab.name(), "GitLab"); let sourceforge = SourceForge; assert_eq!(sourceforge.name(), "SourceForge"); let launchpad = Launchpad; assert_eq!(launchpad.name(), "launchpad"); } } upstream-ontologist-0.3.6/src/providers/arch.rs000064400000000000000000000143601046102023000177640ustar 00000000000000use crate::{vcs, UpstreamDatum, USER_AGENT}; use log::{debug, error}; use std::collections::HashMap; use std::io::BufRead; /// Parses variables from a PKGBUILD file pub fn parse_pkgbuild_variables(file: &str) -> HashMap> { let reader = std::io::Cursor::new(file); let mut variables = HashMap::new(); let mut keep: Option<(String, String)> = None; let mut existing: Option = None; for line in reader.lines() { let line = line.expect("Failed to read line"); if let Some(existing_line) = existing.take() { let line = [&existing_line[..existing_line.len() - 2], &line].concat(); existing = Some(line); continue; } if line.ends_with("\\\n") { existing = Some(line[..line.len() - 2].to_owned()); continue; } if line.starts_with('\t') || line.starts_with(' ') || line.starts_with('#') { continue; } if let Some((key, mut value)) = keep.take() { value.push_str(&line); if line.trim_end().ends_with(')') { let value_parts = match shlex::split(value.as_str()) { Some(value_parts) => value_parts, None => { error!("Failed to split value: {}", value.as_str()); continue; } }; variables.insert(key, value_parts); } else { keep = Some((key, value)); } continue; } if let Some((key, value)) = line.split_once('=') { if let Some(value) = value.strip_prefix('(') { if value.trim_end().ends_with(')') { let value = &value[0..value.len() - 1]; let value_parts = match shlex::split(value) { Some(value_parts) => value_parts, None => { error!("Failed to split value: {}", value); continue; } }; variables.insert(key.to_owned(), value_parts); } else { keep = Some((key.to_owned(), value.to_owned())); } } else { let value_parts = match shlex::split(value) { Some(value_parts) => value_parts, None => { error!("Failed to split value: {}", value); continue; } }; variables.insert(key.to_owned(), value_parts); } } } variables } /// Fetches upstream metadata from the Arch User Repository (AUR) pub async fn guess_from_aur(package: &str) -> Vec { let mut variables = HashMap::new(); for vcs in vcs::VCSES { let url = format!( "https://aur.archlinux.org/cgit/aur.git/plain/PKGBUILD?h={}-{}", package, vcs ); let mut headers = reqwest::header::HeaderMap::new(); headers.insert(reqwest::header::USER_AGENT, USER_AGENT.parse().unwrap()); let client = reqwest::Client::builder() .default_headers(headers) .build() .unwrap(); debug!("Requesting {}", url); let response = client.get(&url).send().await; match response { Ok(response) => { if response.status().is_success() { let text = response.text().await.unwrap(); variables = parse_pkgbuild_variables(&text); break; } else if response.status().as_u16() != 404 { // If the response is not 404, raise an error // response.error_for_status(); error!("Error contacting AUR: {}", response.status()); return Vec::new(); } else { continue; } } Err(e) => { error!("Error contacting AUR: {}", e); return Vec::new(); } } } let mut results = Vec::new(); for (key, value) in variables.iter() { match key.as_str() { "url" => { results.push(UpstreamDatum::Homepage(value[0].to_owned())); } "source" => { if value.is_empty() { continue; } let mut value = value[0].to_owned(); if value.contains("${") { for (k, v) in variables.iter() { value = value.replace(format!("${{{}}}", k).as_str(), v.join(" ").as_str()); value = value.replace(format!("${}", k).as_str(), v.join(" ").as_str()); } } let url = match value.split_once("::") { Some((_unique_name, url)) => url, None => value.as_str(), }; let url = url.replace("#branch=", ",branch="); results.push(UpstreamDatum::Repository( vcs::strip_vcs_prefixes(url.as_str()).to_owned(), )); } "_gitroot" => { results.push(UpstreamDatum::Repository( vcs::strip_vcs_prefixes(value[0].as_str()).to_owned(), )); } _ => { debug!("Ignoring variable: {}", key); } } } results } /// Arch User Repository (AUR) metadata provider pub struct Aur; impl Default for Aur { fn default() -> Self { Self::new() } } impl Aur { /// Creates a new AUR metadata provider pub fn new() -> Self { Self } } #[async_trait::async_trait] impl crate::ThirdPartyRepository for Aur { fn name(&self) -> &'static str { "AUR" } fn supported_fields(&self) -> &'static [&'static str] { &["Homepage", "Repository"] } fn max_supported_certainty(&self) -> crate::Certainty { crate::Certainty::Possible } async fn guess_metadata(&self, name: &str) -> Result, crate::ProviderError> { Ok(guess_from_aur(name).await) } } upstream-ontologist-0.3.6/src/providers/authors.rs000064400000000000000000000033011046102023000205250ustar 00000000000000use crate::{ Certainty, GuesserSettings, Person, ProviderError, UpstreamDatum, UpstreamDatumWithMetadata, }; use std::fs::File; use std::io::BufRead; use std::path::Path; /// Extracts author information from AUTHORS file pub fn guess_from_authors( path: &Path, _settings: &GuesserSettings, ) -> std::result::Result, ProviderError> { let file = File::open(path)?; let reader = std::io::BufReader::new(file); let mut authors: Vec = Vec::new(); for line in reader.lines().map_while(Result::ok) { let mut m = line.trim().to_string(); if m.is_empty() { continue; } if m.starts_with("arch-tag: ") { continue; } if m.ends_with(':') { continue; } if m.starts_with("$Id") { continue; } if m.starts_with('*') || m.starts_with('-') { m = m[1..].trim().to_string(); } if m.len() < 3 { continue; } if m.ends_with('.') { continue; } if m.contains(" for ") { let parts: Vec<&str> = m.split(" for ").collect(); m = parts[0].to_string(); } if !m.chars().next().unwrap().is_alphabetic() { continue; } if !m.contains('<') && line.as_bytes().starts_with(b"\t") { continue; } if m.contains('<') || m.matches(' ').count() < 5 { authors.push(Person::from(m.as_str())); } } Ok(vec![UpstreamDatumWithMetadata { datum: UpstreamDatum::Author(authors), certainty: Some(Certainty::Likely), origin: Some(path.into()), }]) } upstream-ontologist-0.3.6/src/providers/autoconf.rs000064400000000000000000000141731046102023000206670ustar 00000000000000use crate::{Certainty, GuesserSettings, ProviderError, UpstreamDatum, UpstreamDatumWithMetadata}; use log::debug; use std::fs::File; use std::io::{BufRead, BufReader}; use url::Url; fn is_email_address(email: &str) -> bool { if email.contains('@') { return true; } if email.contains(" (at) ") { return true; } false } /// Extracts upstream metadata from autoconf configure script pub fn guess_from_configure( path: &std::path::Path, _settings: &GuesserSettings, ) -> std::result::Result, ProviderError> { if std::path::Path::new(path).is_dir() { return Ok(Vec::new()); } let file = File::open(path)?; let reader = BufReader::new(file); let mut results = Vec::new(); for line in reader.split(b'\n').map_while(Result::ok) { let split = line.splitn(2, |&c| c == b'=').collect::>(); let (key, value) = if let [key, value] = split.as_slice() { (key, value) } else { continue; }; let key = String::from_utf8(key.to_vec()).expect("Failed to parse UTF-8"); let key = key.trim(); let value = String::from_utf8(value.to_vec()).expect("Failed to parse UTF-8"); let mut value = value.trim(); if key.contains(' ') { continue; } if value.contains('$') { continue; } if value.starts_with('\'') && value.ends_with('\'') { if value.len() >= 2 { value = &value[1..value.len() - 1]; if value.is_empty() { continue; } } else { // Single quote character, skip it continue; } } match key { "PACKAGE_NAME" => { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(value.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } "PACKAGE_TARNAME" => { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(value.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } "PACKAGE_VERSION" => { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Version(value.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } "PACKAGE_BUGREPORT" => { let certainty = if value == "BUG-REPORT-ADDRESS" { None } else if is_email_address(value) { // Downgrade the trustworthiness of this field for most // upstreams if it contains an e-mail address. Most // upstreams seem to just set this to some random address, // and then forget about it. Some(Certainty::Possible) } else if value.contains("mailing list") { // Downgrade the trustworthiness of this field if // it contains a mailing list Some(Certainty::Possible) } else { let parsed_url = Url::parse(value).expect("Failed to parse URL"); if !parsed_url.path().trim_end_matches('/').is_empty() { Some(Certainty::Certain) } else { // It seems unlikely that the bug submit URL lives at // the root. Some(Certainty::Possible) } }; if certainty.is_some() { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::BugSubmit(value.to_string()), certainty, origin: Some(path.into()), }); } } "PACKAGE_URL" => { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Homepage(value.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } _ => { debug!("unknown key: {}", key); } } } Ok(results) } #[cfg(test)] mod tests { use super::*; use std::io::Write; use tempfile::NamedTempFile; #[test] fn test_single_quote_value() { // Test that a single quote character doesn't cause a panic let mut file = NamedTempFile::new().unwrap(); writeln!(file, "PACKAGE_NAME='").unwrap(); let settings = GuesserSettings::default(); let result = guess_from_configure(file.path(), &settings); assert!(result.is_ok()); let data = result.unwrap(); // Single quote should be skipped, so no results assert_eq!(data.len(), 0); } #[test] fn test_empty_quoted_value() { // Test that empty quoted strings are skipped let mut file = NamedTempFile::new().unwrap(); writeln!(file, "PACKAGE_NAME=''").unwrap(); let settings = GuesserSettings::default(); let result = guess_from_configure(file.path(), &settings); assert!(result.is_ok()); let data = result.unwrap(); // Empty quoted value should be skipped assert_eq!(data.len(), 0); } #[test] fn test_valid_quoted_value() { // Test that properly quoted values are extracted let mut file = NamedTempFile::new().unwrap(); writeln!(file, "PACKAGE_NAME='my-package'").unwrap(); let settings = GuesserSettings::default(); let result = guess_from_configure(file.path(), &settings); assert!(result.is_ok()); let data = result.unwrap(); assert_eq!(data.len(), 1); assert!(matches!(data[0].datum, UpstreamDatum::Name(ref name) if name == "my-package")); } } upstream-ontologist-0.3.6/src/providers/composer_json.rs000064400000000000000000000070651046102023000217330ustar 00000000000000use crate::{Certainty, GuesserSettings, ProviderError, UpstreamDatum, UpstreamDatumWithMetadata}; use log::error; use std::path::Path; /// Extracts upstream metadata from PHP composer.json file pub fn guess_from_composer_json( path: &Path, _settings: &GuesserSettings, ) -> std::result::Result, ProviderError> { // https://getcomposer.org/doc/04-schema.md let file = std::fs::File::open(path)?; let package: serde_json::Value = serde_json::from_reader(file).map_err(|e| ProviderError::ParseError(e.to_string()))?; let mut upstream_data: Vec = Vec::new(); let package = match package.as_object() { Some(package) => package, None => { return Err(ProviderError::Other( "Failed to parse composer.json".to_string(), )) } }; for (field, value) in package { match field.as_str() { "name" => { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(value.as_str().unwrap().to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } "homepage" => { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Homepage(value.as_str().unwrap().to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } "description" => { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Summary(value.as_str().unwrap().to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } "license" => { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::License(value.as_str().unwrap().to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } "version" => { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Version(value.as_str().unwrap().to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } "type" => { if value != "project" { error!("unexpected composer.json type: {:?}", value); } } "keywords" => { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Keywords( value .as_array() .unwrap() .iter() .map(|v| v.as_str().unwrap().to_string()) .collect(), ), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } "require" | "require-dev" | "autoload" | "autoload-dev" | "scripts" | "extra" | "config" | "prefer-stable" | "minimum-stability" => { // Do nothing, skip these fields } _ => { error!("Unknown field {} ({:?}) in composer.json", field, value); } } } Ok(upstream_data) } upstream-ontologist-0.3.6/src/providers/debian.rs000064400000000000000000000723001046102023000202670ustar 00000000000000use crate::{ bug_database_from_issue_url, repo_url_from_merge_request_url, Certainty, GuesserSettings, Origin, Person, ProviderError, UpstreamDatum, UpstreamDatumWithMetadata, }; use lazy_regex::regex_captures; use log::debug; use std::fs::File; use std::io::BufRead; use std::io::Read; use std::path::Path; use url::Url; /// Extracts upstream metadata from Debian patch files pub async fn guess_from_debian_patch( path: &Path, _settings: &GuesserSettings, ) -> std::result::Result, ProviderError> { let file = File::open(path)?; let reader = std::io::BufReader::new(file); let net_access = None; let mut upstream_data: Vec = Vec::new(); for line in reader.lines().map_while(Result::ok) { if line.starts_with("Forwarded: ") { let forwarded = match line.split_once(':') { Some((_, url)) => url.trim(), None => { debug!("Malformed Forwarded line in patch {}", path.display()); continue; } }; let forwarded = match Url::parse(forwarded) { Ok(url) => url, Err(e) => { debug!( "Malformed URL in Forwarded line in patch {}: {}", path.display(), e ); continue; } }; if let Some(bug_db) = bug_database_from_issue_url(&forwarded, net_access).await { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::BugDatabase(bug_db.to_string()), certainty: Some(Certainty::Possible), origin: Some(path.into()), }); } if let Some(repo_url) = repo_url_from_merge_request_url(&forwarded, net_access).await { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(repo_url.to_string()), certainty: Some(Certainty::Possible), origin: Some(path.into()), }); } } } Ok(upstream_data) } /// Extracts metadata from Debian ITP bug body text pub fn metadata_from_itp_bug_body( body: &str, origin: Option, ) -> std::result::Result, ProviderError> { let mut results: Vec = Vec::new(); // Skip first few lines with bug metadata (severity, owner, etc) let mut line_iter = body.split_terminator('\n'); let mut next_line = line_iter.next(); while let Some(line) = next_line { if next_line.is_none() { return Err(ProviderError::ParseError( "ITP bug body ended before package name".to_string(), )); } next_line = line_iter.next(); if line.trim().is_empty() { break; } } while let Some(line) = next_line { if next_line.is_none() { return Err(ProviderError::ParseError( "ITP bug body ended before package name".to_string(), )); } if !line.is_empty() { break; } next_line = line_iter.next(); } while let Some(mut line) = next_line { line = line.trim_start_matches('*').trim_start(); if line.is_empty() { break; } match line.split_once(':') { Some((key, value)) => { let key = key.trim(); let value = value.trim(); match key { "Package name" => { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(value.to_string()), certainty: Some(Certainty::Confident), origin: origin.clone(), }); } "Version" => { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Version(value.to_string()), certainty: Some(Certainty::Possible), origin: origin.clone(), }); } "Upstream Author" if !value.is_empty() => { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Author(vec![Person::from(value)]), certainty: Some(Certainty::Confident), origin: origin.clone(), }); } "URL" => { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Homepage(value.to_string()), certainty: Some(Certainty::Confident), origin: origin.clone(), }); } "License" => { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::License(value.to_string()), certainty: Some(Certainty::Confident), origin: origin.clone(), }); } "Description" => { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Summary(value.to_string()), certainty: Some(Certainty::Confident), origin: origin.clone(), }); } _ => { debug!("Unknown pseudo-header {} in ITP bug body", key); } } } _ => { debug!("Ignoring non-semi-field line {}", line); } } next_line = line_iter.next(); } let mut rest: Vec = Vec::new(); for line in line_iter { if line.trim() == "-- System Information:" { break; } rest.push(line.to_string()); } results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Description(rest.join("\n")), certainty: Some(Certainty::Likely), origin: origin.clone(), }); Ok(results) } #[test] fn test_metadata_from_itp_bug_body() { assert_eq!( vec![ UpstreamDatumWithMetadata { datum: UpstreamDatum::Name("setuptools-gettext".to_string()), certainty: Some(Certainty::Confident), origin: None, }, UpstreamDatumWithMetadata { datum: UpstreamDatum::Version("0.0.1".to_string()), certainty: Some(Certainty::Possible), origin: None, }, UpstreamDatumWithMetadata { datum: UpstreamDatum::Author(vec![Person::from("Breezy Team ")]), certainty: Some(Certainty::Confident), origin: None, }, UpstreamDatumWithMetadata { datum: UpstreamDatum::Homepage("https://github.com/jelmer/setuptools-gettext".to_string()), certainty: Some(Certainty::Confident), origin: None, }, UpstreamDatumWithMetadata { datum: UpstreamDatum::License("GPL".to_string()), certainty: Some(Certainty::Confident), origin: None, }, UpstreamDatumWithMetadata { datum: UpstreamDatum::Summary("Compile .po files into .mo files".to_string()), certainty: Some(Certainty::Confident), origin: None, }, UpstreamDatumWithMetadata { datum: UpstreamDatum::Description("This extension for setuptools compiles gettext .po files\nfound in the source directory into .mo files and installs them.\n".to_string()), certainty: Some(Certainty::Likely), origin: None, }, ], metadata_from_itp_bug_body( r#"Package: wnpp Severity: wishlist Owner: Jelmer Vernooij Debbugs-Cc: debian-devel@lists.debian.org * Package name : setuptools-gettext Version : 0.0.1 Upstream Author : Breezy Team * URL : https://github.com/jelmer/setuptools-gettext * License : GPL Programming Lang: Python Description : Compile .po files into .mo files This extension for setuptools compiles gettext .po files found in the source directory into .mo files and installs them. "#, None ) .unwrap() ); } #[cfg(feature = "debian")] fn read_changelog_first_entry( path: &Path, ) -> Result<(String, Option, Vec), ProviderError> { let cl = debian_changelog::ChangeLog::read_path(path).map_err(|e| { ProviderError::ParseError(format!( "Failed to parse changelog {}: {}", path.display(), e )) })?; let entry = cl .iter() .next() .ok_or_else(|| ProviderError::ParseError("Empty changelog".to_string()))?; let package = entry.package().ok_or_else(|| { ProviderError::ParseError(format!("Changelog {} has no package name", path.display())) })?; let version = entry.version().map(|v| { v.to_string() .parse() .expect("debversion parse should not fail") }); let change_lines = entry.change_lines().collect::>(); Ok((package.to_string(), version, change_lines)) } #[cfg(feature = "debian")] /// Extracts upstream metadata from Debian changelog file pub async fn guess_from_debian_changelog( path: &Path, _settings: &GuesserSettings, ) -> std::result::Result, ProviderError> { let (package, version, change_lines) = read_changelog_first_entry(path)?; let mut ret = Vec::new(); ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(package.clone()), certainty: Some(Certainty::Confident), origin: Some(path.into()), }); if let Some(version) = version { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Version(version.upstream_version), certainty: Some(Certainty::Confident), origin: Some(path.into()), }); } #[cfg(feature = "debcargo")] if package.starts_with("rust-") { let debcargo_toml_path = path.parent().unwrap().join("debcargo.toml"); let debcargo_config = debcargo::config::Config::parse(debcargo_toml_path.as_path()) .map_err(|e| { ProviderError::ParseError(format!( "Failed to parse debcargo config {}: {}", path.display(), e )) })?; let semver_suffix = debcargo_config.semver_suffix; let (mut crate_name, _crate_semver_version) = parse_debcargo_source_name(&package, semver_suffix); if crate_name.contains('-') { crate_name = match crate::providers::rust::cargo_translate_dashes(crate_name.as_str()) .await .map_err(|e| { ProviderError::Other(format!( "Failed to translate dashes in crate name {}: {}", crate_name, e )) })? { Some(name) => name, None => { return Err(ProviderError::Other(format!( "Failed to translate dashes in crate name {}", crate_name ))) } }; } ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Archive("crates.io".to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::CargoCrate(crate_name), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(itp) = find_itp(&change_lines) { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::DebianITP(itp), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); ret.extend(guess_from_itp_bug(itp)?); } Ok(ret) } /// Finds ITP bug number from debian changelog entries pub fn find_itp(changes: &[String]) -> Option { for line in changes { if let Some((_, itp)) = regex_captures!(r"\* Initial release. \(?Closes: #(\d+)\)?", line) { return Some(itp.parse().unwrap()); } } None } /// Extracts upstream metadata from Debian ITP bug pub fn guess_from_itp_bug( bugno: i32, ) -> std::result::Result, ProviderError> { let debbugs = debbugs::blocking::Debbugs::default(); let log = debbugs.get_bug_log(bugno).map_err(|e| { ProviderError::ParseError(format!("Failed to get bug log for bug {}: {}", bugno, e)) })?; metadata_from_itp_bug_body( log[0].body.as_str(), Some(Origin::Other(format!("Debian bug #{}", bugno))), ) } /// Parse a debcargo source name and return crate. /// /// # Arguments /// * `source_name` - Source package name /// * `semver_suffix` - Whether semver_suffix is enabled /// /// # Returns /// tuple with crate name and optional semver pub fn parse_debcargo_source_name( source_name: &str, semver_suffix: bool, ) -> (String, Option) { let mut crate_name = source_name.strip_prefix("rust-").unwrap(); match crate_name.rsplitn(2, '-').collect::>().as_slice() { [semver, new_crate_name] if semver_suffix => { crate_name = new_crate_name; (crate_name.to_string(), Some(semver.to_string())) } _ => (crate_name.to_string(), None), } } #[cfg(feature = "debian")] /// Extracts upstream metadata from debian/rules file pub fn guess_from_debian_rules( path: &Path, _settings: &GuesserSettings, ) -> Result, ProviderError> { let f = std::fs::File::open(path)?; let mf = makefile_lossless::Makefile::read_relaxed(f) .map_err(|e| ProviderError::ParseError(format!("Failed to parse debian/rules: {}", e)))?; let mut ret = vec![]; if let Some(variable) = mf .variable_definitions() .find(|v| v.name().as_deref() == Some("DEB_UPSTREAM_GIT")) { let certainty = Some(Certainty::Likely); let datum = UpstreamDatum::Repository(variable.raw_value().unwrap()); ret.push(UpstreamDatumWithMetadata { datum, certainty, origin: Some(Origin::Path(path.to_path_buf())), }); } if let Some(deb_upstream_url) = mf .variable_definitions() .find(|v| v.name().as_deref() == Some("DEB_UPSTREAM_URL")) { let certainty = Some(Certainty::Likely); let datum = UpstreamDatum::Download(deb_upstream_url.raw_value().unwrap()); ret.push(UpstreamDatumWithMetadata { datum, certainty, origin: Some(Origin::Path(path.to_path_buf())), }); } Ok(ret) } #[cfg(feature = "debian")] /// Extracts upstream metadata from debian/control file pub fn guess_from_debian_control( path: &Path, _settings: &GuesserSettings, ) -> std::result::Result, ProviderError> { let mut ret = vec![]; use std::str::FromStr; let control = debian_control::Control::from_str(&std::fs::read_to_string(path)?) .map_err(|e| ProviderError::ParseError(format!("Failed to parse debian/control: {}", e)))?; let source = control.source().unwrap(); let is_native = debian_is_native(path.parent().unwrap()).map_err(|e| { ProviderError::ParseError(format!("Failed to parse debian/source/format: {}", e)) })?; if let Some(homepage) = source.homepage() { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Homepage(homepage.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(go_import_path) = source.as_deb822().get("XS-Go-Import-Path") { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::GoImportPath(go_import_path.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(format!("https://{}", go_import_path)), certainty: Some(Certainty::Likely), origin: Some(path.into()), }); } if is_native == Some(true) { if let Some(vcs_git) = source.vcs_git() { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(vcs_git), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(vcs_browser) = source.vcs_browser() { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::RepositoryBrowse(vcs_browser), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } let binaries = control.binaries().collect::>(); let certainty = if binaries.len() == 1 && is_native == Some(true) { // Debian native package with only one binary package Certainty::Certain } else if binaries.len() > 1 && is_native == Some(true) { Certainty::Possible } else if binaries.len() == 1 && is_native == Some(false) { // Debian non-native package with only one binary package, so description is likely to be // good but might be Debian-specific Certainty::Confident } else { Certainty::Likely }; for binary in binaries { if let Some(description) = binary.description() { let lines = description.split('\n').collect::>(); let mut summary = lines[0].to_string(); let mut description_lines = &lines[1..]; if !description_lines.is_empty() && description_lines .last() .unwrap() .starts_with("This package contains") { summary = summary .split(" - ") .next() .unwrap_or(summary.as_str()) .to_string(); description_lines = description_lines.split_last().unwrap().1; } if !summary.is_empty() { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Summary(summary), certainty: Some(certainty), origin: Some(path.into()), }); } if !description_lines.is_empty() { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Description(description_lines.join("\n")), certainty: Some(certainty), origin: Some(path.into()), }); } } } Ok(ret) } #[cfg(feature = "debian")] /// Extracts upstream metadata from debian/copyright file pub async fn guess_from_debian_copyright( path: &Path, _settings: &GuesserSettings, ) -> std::result::Result, ProviderError> { use debian_copyright::lossless::{Copyright, Error}; let mut ret = vec![]; let text = &std::fs::read_to_string(path)?; let mut urls = vec![]; match Copyright::from_str_relaxed(text) { Ok((c, _)) => { let header = c.header().unwrap(); if let Some(upstream_name) = header.upstream_name() { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(upstream_name.to_string()), certainty: Some(if upstream_name.contains(' ') { Certainty::Confident } else { Certainty::Certain }), origin: Some(path.into()), }); } if let Some(upstream_contact) = header.upstream_contact() { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Contact(upstream_contact), certainty: Some(Certainty::Possible), origin: Some(path.into()), }); } if let Some(source) = header.source() { if source.contains(' ') { urls.extend( source .split([' ', '\n', ',']) .filter(|s| !s.is_empty()) .map(|s| s.to_string()), ); } else { urls.push(source.clone()); } for captures in lazy_regex::regex!(r"(http|https)://([^ ,]+)").captures_iter(source.as_str()) { urls.push(captures[0].to_string()); } } if let Some(upstream_bugs) = header.as_deb822().get("X-Upstream-Bugs") { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::BugDatabase(upstream_bugs), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(source_downloaded_from) = header.as_deb822().get("X-Source-Downloaded-From") { if let Ok(url) = source_downloaded_from.parse::() { urls.push(url.to_string()); } ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Download(source_downloaded_from), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } let referenced_licenses = c .iter_licenses() .filter_map(|l| l.name()) .collect::>(); if referenced_licenses.len() == 1 { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::License(referenced_licenses.into_iter().next().unwrap()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } Err(Error::IoError(e)) => { unreachable!("IO error: {}", e); } Err(Error::ParseError(e)) => { return Err(ProviderError::ParseError(e.to_string())); } Err(Error::NotMachineReadable) => { for line in text.lines() { if let Some(name) = line.strip_prefix("Upstream-Name: ") { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(name.to_string()), certainty: Some(Certainty::Possible), origin: Some(Origin::Path(path.into())), }); } if let Some(url) = lazy_regex::regex_find!(r".* was downloaded from ([^\s]+)", line) { urls.push(url.to_string()); ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Download(url.to_string()), certainty: Some(Certainty::Possible), origin: Some(path.into()), }); } } } Err(Error::InvalidValue(e)) => { return Err(ProviderError::ParseError(format!( "Invalid value in debian/copyright: {}", e ))); } } for url in urls.into_iter() { if let Ok(url) = url.parse() { if let Some(repo_url) = crate::vcs::guess_repo_from_url(&url, None).await { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(repo_url), certainty: Some(Certainty::Confident), origin: Some(path.into()), }); } } ret.extend(crate::metadata_from_url( url.as_str(), &Origin::Path(path.into()), )); } Ok(ret) } #[cfg(feature = "debian")] fn read_entries(path: &Path) -> Result, ProviderError> { use debian_changelog::ChangeLog; let get_package_name = || -> String { let text = std::fs::read_to_string(path.parent().unwrap().join("changelog")).unwrap(); let cl: ChangeLog = text.parse().unwrap(); let first_entry = cl.iter().next().unwrap(); first_entry.package().unwrap() }; let w: debian_watch::WatchFile = std::fs::read_to_string(path)? .parse() .map_err(|e| ProviderError::ParseError(format!("Failed to parse debian/watch: {}", e)))?; let entries = w .entries() .map(|e| (e.format_url(get_package_name), e.mode().unwrap_or_default())) .collect::>(); Ok(entries) } #[cfg(feature = "debian")] /// Extracts upstream metadata from debian/watch file pub async fn guess_from_debian_watch( path: &Path, _settings: &GuesserSettings, ) -> std::result::Result, ProviderError> { let mut ret = vec![]; use debian_watch::Mode; let entries = read_entries(path)?; let origin = Origin::Path(path.into()); for (url, mode) in entries { match mode { Mode::Git => { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(url.to_string()), certainty: Some(Certainty::Confident), origin: Some(origin.clone()), }); } Mode::Svn => { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(url.to_string()), certainty: Some(Certainty::Confident), origin: Some(origin.clone()), }); } Mode::LWP => { if url.scheme() == "http" || url.scheme() == "https" { let url = url.clone(); if let Some(repo) = crate::vcs::guess_repo_from_url(&url, None).await { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(repo), certainty: Some(Certainty::Confident), origin: Some(origin.clone()), }); } } } }; ret.extend(crate::metadata_from_url(url.as_str(), &origin)); } Ok(ret) } #[cfg(feature = "debian")] /// Checks if a Debian package is native (no upstream) pub fn debian_is_native(path: &Path) -> std::io::Result> { let format_file_path = path.join("source/format"); match File::open(format_file_path) { Ok(mut file) => { let mut content = String::new(); file.read_to_string(&mut content)?; return Ok(Some(content.trim() == "3.0 (native)")); } Err(e) if e.kind() == std::io::ErrorKind::NotFound => {} Err(e) => return Err(e), } let changelog_file = path.join("changelog"); match File::open(changelog_file) { Ok(mut file) => { let cl = debian_changelog::ChangeLog::read(&mut file).map_err(std::io::Error::other)?; let first_entry = cl.iter().next().unwrap(); let version = first_entry.version().unwrap(); return Ok(Some(version.debian_revision.is_none())); } Err(e) if e.kind() == std::io::ErrorKind::NotFound => {} Err(e) => return Err(e), } Ok(None) } #[cfg(test)] mod watch_tests { use super::*; #[cfg(feature = "debian")] #[tokio::test] async fn test_empty() { let td = tempfile::tempdir().unwrap(); let path = td.path().join("watch"); std::fs::write( &path, r#" # Blah "#, ) .unwrap(); assert!(guess_from_debian_watch(&path, &GuesserSettings::default()) .await .unwrap() .is_empty()); } #[cfg(feature = "debian")] #[tokio::test] async fn test_simple() { let td = tempfile::tempdir().unwrap(); let path = td.path().join("watch"); std::fs::write( &path, r#"version=4 https://github.com/jelmer/dulwich/tags/dulwich-(.*).tar.gz "#, ) .unwrap(); assert_eq!( vec![UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository("https://github.com/jelmer/dulwich".to_string()), certainty: Some(Certainty::Confident), origin: Some(path.clone().into()) }], guess_from_debian_watch(&path, &GuesserSettings::default()) .await .unwrap() ); } } upstream-ontologist-0.3.6/src/providers/doap.rs000064400000000000000000000245231046102023000177740ustar 00000000000000//! See use crate::{Certainty, Person, ProviderError, UpstreamDatum, UpstreamDatumWithMetadata}; use log::error; use std::fs::File; use std::path::Path; /// Extracts upstream metadata from DOAP (Description of a Project) files pub fn guess_from_doap( path: &Path, _trust_package: bool, ) -> std::result::Result, ProviderError> { use xmltree::Element; let file = File::open(path).expect("Failed to open file"); let doc = Element::parse(file).expect("Failed to parse XML"); let mut root = &doc; let mut results: Vec = Vec::new(); const DOAP_NAMESPACE: &str = "http://usefulinc.com/ns/doap#"; const RDF_NAMESPACE: &str = "http://www.w3.org/1999/02/22-rdf-syntax-ns#"; const SCHEMA_NAMESPACE: &str = "https://schema.org/"; if root.name == "RDF" && root.namespace.as_deref() == Some(RDF_NAMESPACE) { for child in root.children.iter() { if let Some(element) = child.as_element() { root = element; break; } } } if root.name != "Project" || root.namespace.as_deref() != Some(DOAP_NAMESPACE) { return Err(ProviderError::ParseError(format!( "Doap file does not have DOAP project as root, but {}", root.name ))); } fn extract_url(el: &Element) -> Option<&str> { el.attributes.get("resource").map(|url| url.as_str()) } fn extract_lang(el: &Element) -> Option<&str> { el.attributes.get("lang").map(|lang| lang.as_str()) } let mut screenshots: Vec = Vec::new(); let mut maintainers: Vec = Vec::new(); for child in &root.children { let child = if let Some(element) = child.as_element() { element } else { continue; }; match (child.namespace.as_deref(), child.name.as_str()) { (Some(DOAP_NAMESPACE), "name") => { if let Some(text) = &child.get_text() { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(text.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } (Some(DOAP_NAMESPACE), "shortname") | (Some(DOAP_NAMESPACE), "short-name") => { if let Some(text) = &child.get_text() { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(text.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } (Some(DOAP_NAMESPACE), "bug-database") => { if let Some(url) = extract_url(child) { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::BugDatabase(url.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } (Some(DOAP_NAMESPACE), "homepage") => { if let Some(url) = extract_url(child) { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Homepage(url.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } (Some(DOAP_NAMESPACE), "download-page") => { if let Some(url) = extract_url(child) { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Download(url.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } (Some(DOAP_NAMESPACE), "shortdesc") => { if let Some(lang) = extract_lang(child) { if lang == "en" { if let Some(text) = &child.get_text() { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Summary(text.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } } } (Some(DOAP_NAMESPACE), "description") => { if let Some(lang) = extract_lang(child) { if lang == "en" { if let Some(text) = &child.get_text() { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Description(text.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } } } (Some(DOAP_NAMESPACE), "license") => { // TODO: Handle license } (Some(DOAP_NAMESPACE), "repository") => { for repo in &child.children { let repo = if let Some(element) = repo.as_element() { element } else { continue; }; match repo.name.as_str() { "SVNRepository" | "GitRepository" => { if let Some(repo_location) = repo.get_child("location") { if let Some(repo_url) = extract_url(repo_location) { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(repo_url.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } if let Some(web_location) = repo.get_child("browse") { if let Some(web_url) = extract_url(web_location) { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::RepositoryBrowse(web_url.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } } _ => (), } } } (Some(DOAP_NAMESPACE), "category") | (Some(DOAP_NAMESPACE), "programming-language") | (Some(DOAP_NAMESPACE), "os") | (Some(DOAP_NAMESPACE), "implements") | (Some(SCHEMA_NAMESPACE), "logo") | (Some(DOAP_NAMESPACE), "platform") => { // TODO: Handle other tags } (Some(SCHEMA_NAMESPACE), "screenshot") | (Some(DOAP_NAMESPACE), "screenshots") => { if let Some(url) = extract_url(child) { screenshots.push(url.to_string()); } } (Some(DOAP_NAMESPACE), "wiki") => { if let Some(url) = extract_url(child) { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Wiki(url.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } (Some(DOAP_NAMESPACE), "maintainer") => { for person in &child.children { let person = if let Some(element) = person.as_element() { element } else { continue; }; if person.name != "Person" { continue; } let name = if let Some(name_tag) = person.get_child("name") { name_tag.get_text().clone() } else { None }; let email = if let Some(email_tag) = person.get_child("mbox") { email_tag.get_text().as_ref().cloned() } else { None }; let url = if let Some(email_tag) = person.get_child("mbox") { extract_url(email_tag).map(|url| url.to_string()) } else { None }; maintainers.push(Person { name: name.map(|n| n.to_string()), email: email.map(|n| n.to_string()), url, }); } } (Some(DOAP_NAMESPACE), "mailing-list") => { if let Some(url) = extract_url(child) { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::MailingList(url.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } (Some(DOAP_NAMESPACE), "release") => {} _ => { error!("Unknown tag {} in DOAP file", child.name); } } } if maintainers.len() == 1 { let maintainer = maintainers.remove(0); results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Maintainer(maintainer), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } else { for maintainer in maintainers { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Maintainer(maintainer), certainty: Some(Certainty::Possible), origin: Some(path.into()), }); } } Ok(results) } upstream-ontologist-0.3.6/src/providers/git.rs000064400000000000000000000031131046102023000176240ustar 00000000000000use crate::{Certainty, GuesserSettings, ProviderError, UpstreamDatum, UpstreamDatumWithMetadata}; use std::path::Path; #[cfg(feature = "git-config")] /// Extracts upstream metadata from .git/config file pub fn guess_from_git_config( path: &Path, settings: &GuesserSettings, ) -> std::result::Result, ProviderError> { let config_file = gix_config::File::from_path_no_includes(path.to_path_buf(), gix_config::Source::Local) .map_err(|e| ProviderError::ParseError(e.to_string()))?; let mut results = Vec::new(); // Check if there's a remote named "upstream" if let Some(remote_upstream) = config_file.string_by("remote", Some("upstream".into()), "url") { let url = remote_upstream.to_string(); if !url.starts_with("../") { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(url), certainty: Some(Certainty::Likely), origin: Some(path.into()), }); } } // Check if there's a remote named "origin" if !settings.trust_package { if let Some(remote_origin) = config_file.string_by("remote", Some("origin".into()), "url") { let url = remote_origin.to_string(); if !url.starts_with("../") { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(url), certainty: Some(Certainty::Possible), origin: Some(path.into()), }); } } } Ok(results) } upstream-ontologist-0.3.6/src/providers/go.rs000064400000000000000000000037111046102023000174520ustar 00000000000000//! See use crate::{ Certainty, GuesserSettings, ProviderError, UpstreamDatum, UpstreamDatumWithMetadata, UpstreamMetadata, }; use log::debug; use std::fs::File; use std::io::{BufRead, BufReader}; use std::path::Path; /// Extracts upstream metadata from go.mod file pub fn guess_from_go_mod( path: &Path, _settings: &GuesserSettings, ) -> std::result::Result, ProviderError> { let file = File::open(path).expect("Failed to open file"); let reader = BufReader::new(file); let mut results = Vec::new(); for line in reader.lines().map_while(Result::ok) { if line.starts_with("module ") { let modname = match line.trim().split_once(' ') { Some((_, modname)) => modname, None => { debug!("Failed to parse module name from line: {}", line); continue; } }; results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(modname.to_owned()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } Ok(results) } /// Fetches upstream metadata for a Go package from pkg.go.dev pub fn remote_go_metadata(package: &str) -> Result { let mut ret = UpstreamMetadata::default(); if package.starts_with("github.com/") { ret.insert(UpstreamDatumWithMetadata { datum: UpstreamDatum::GoImportPath(package.to_string()), certainty: Some(Certainty::Certain), origin: None, }); let parts: Vec<&str> = package.split('/').collect(); ret.insert(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(format!("https://{}", parts[..3].join("/"))), certainty: Some(Certainty::Certain), origin: None, }); } Ok(ret) } upstream-ontologist-0.3.6/src/providers/gobo.rs000064400000000000000000000115561046102023000200010ustar 00000000000000use crate::UpstreamDatum; #[allow(dead_code)] #[derive(serde::Deserialize)] struct Contents { name: String, path: String, sha: String, size: u64, url: url::Url, html_url: url::Url, git_url: url::Url, download_url: Option, r#type: String, content: Option, encoding: Option, _links: Links, } #[allow(dead_code)] #[derive(serde::Deserialize)] struct Links { #[serde(rename = "self")] self_: String, git: url::Url, html: url::Url, } /// Fetches upstream metadata from GoboLinux repository pub async fn guess_from_gobo(package: &str) -> Result, crate::ProviderError> { let packages_url = "https://api.github.com/repos/gobolinux/Recipes/contents" .parse() .unwrap(); let contents: Vec = serde_json::from_value(crate::load_json_url(&packages_url, None).await?).unwrap(); let package = match contents .iter() .find(|p| p.name.eq_ignore_ascii_case(package)) { Some(p) => p, None => { log::debug!("No gobo package named {}", package); return Ok(Vec::new()); } }; let versions: Vec = serde_json::from_value(crate::load_json_url(&package.url, None).await?).unwrap(); let last_version = if let Some(last_version) = versions.last() { &last_version.name } else { log::debug!("No versions for gobo package {}", package.name); return Ok(Vec::new()); }; let base_url: url::Url = format!( "https://raw.githubusercontent.com/gobolinux/Recipes/master/{}/{}/", package.name, last_version ) .parse() .unwrap(); let client = reqwest::Client::builder() .user_agent(crate::USER_AGENT) .build() .unwrap(); let mut result = Vec::new(); let recipe_url = base_url.join("Recipe").unwrap(); match client.get(recipe_url.as_ref()).send().await { Ok(response) => { let text = response.text().await.unwrap(); for line in text.lines() { if let Some(url) = line.strip_prefix("url=") { result.push(UpstreamDatum::Homepage(url.to_string())); } } } Err(e) => { if e.status() == Some(reqwest::StatusCode::NOT_FOUND) { log::error!("No recipe for existing gobo package {}", package.name); } else if e.status() == Some(reqwest::StatusCode::FORBIDDEN) { log::debug!("error loading {}: {}. rate limiting?", recipe_url, e); } else { return Err(crate::ProviderError::Other(e.to_string())); } } } let description_url = base_url.join("Resources/Description").unwrap(); match client.get(description_url.as_ref()).send().await { Ok(response) => { for line in response.text().await.unwrap().lines() { if let Some((_, key, value)) = lazy_regex::regex_captures!("\\[(.*)\\] (.*)", line) { match key { "Name" => result.push(UpstreamDatum::Name(value.to_string())), "Summary" => result.push(UpstreamDatum::Summary(value.to_string())), "License" => result.push(UpstreamDatum::License(value.to_string())), "Description" => result.push(UpstreamDatum::Description(value.to_string())), "Homepage" => result.push(UpstreamDatum::Homepage(value.to_string())), _ => log::warn!("Unknown field {} in gobo Description", key), } } } } Err(e) => { if e.status() == Some(reqwest::StatusCode::NOT_FOUND) { log::error!("No description for existing gobo package {}", package.name); } else if e.status() == Some(reqwest::StatusCode::FORBIDDEN) { log::debug!("error loading {}: {}. rate limiting?", description_url, e); return Ok(Vec::new()); } else { return Err(crate::ProviderError::Other(e.to_string())); } } } Ok(result) } /// GoboLinux metadata provider pub struct Gobo; impl Default for Gobo { fn default() -> Self { Self::new() } } impl Gobo { /// Creates a new Gobo metadata provider pub fn new() -> Self { Self } } #[async_trait::async_trait] impl crate::ThirdPartyRepository for Gobo { fn name(&self) -> &'static str { "gobo" } fn supported_fields(&self) -> &'static [&'static str] { &["Homepage", "Repository"][..] } fn max_supported_certainty(&self) -> crate::Certainty { crate::Certainty::Possible } async fn guess_metadata(&self, name: &str) -> Result, crate::ProviderError> { guess_from_gobo(name).await } } upstream-ontologist-0.3.6/src/providers/haskell.rs000064400000000000000000000223061046102023000204710ustar 00000000000000use crate::{ Certainty, Person, ProviderError, UpstreamDatum, UpstreamDatumWithMetadata, UpstreamMetadata, }; use std::fs::File; use std::io::{BufRead, BufReader}; use std::path::Path; /// Parses lines from a Cabal file pub fn parse_cabal_lines( lines: impl Iterator, ) -> Vec<(Option, String, String)> { let mut ret = Vec::new(); let mut section = None; for line in lines { if line.trim_start().starts_with("--") { // Comment continue; } // Empty line if line.trim().is_empty() { section = None; continue; } let (field, value) = match line.split_once(':') { Some((field, value)) => (field.to_lowercase(), value.trim()), None => { if !line.starts_with(' ') { section = Some(line.trim().to_lowercase()); } else { log::debug!("Failed to parse line: {}", line); } continue; } }; if section.is_none() && !field.starts_with(' ') { ret.push((None, field.trim().to_string(), value.to_owned())); } else if field.starts_with(' ') { ret.push(( section.clone(), field.trim().to_lowercase(), value.to_owned(), )); } else { log::debug!("Invalid field {}", field); } } ret } /// Extracts upstream metadata from parsed Cabal file lines pub fn guess_from_cabal_lines( lines: impl Iterator, ) -> std::result::Result, ProviderError> { let mut repo_url = None; let mut repo_branch = None; let mut repo_subpath = None; let mut results = Vec::new(); for (section, key, value) in parse_cabal_lines(lines) { match (section.as_deref(), key.as_str()) { (None, "homepage") => results.push(( UpstreamDatum::Homepage(value.to_owned()), Certainty::Certain, )), (None, "bug-reports") => results.push(( UpstreamDatum::BugDatabase(value.to_owned()), Certainty::Certain, )), (None, "name") => { results.push((UpstreamDatum::Name(value.to_owned()), Certainty::Certain)) } (None, "maintainer") => results.push(( UpstreamDatum::Maintainer(Person::from(value.as_str())), Certainty::Certain, )), (None, "copyright") => results.push(( UpstreamDatum::Copyright(value.to_owned()), Certainty::Certain, )), (None, "license") => { results.push((UpstreamDatum::License(value.to_owned()), Certainty::Certain)) } (None, "author") => results.push(( UpstreamDatum::Author(vec![Person::from(value.as_str())]), Certainty::Certain, )), (None, "synopsis") => { results.push((UpstreamDatum::Summary(value.to_owned()), Certainty::Certain)) } (None, "cabal-version") => {} (None, "build-depends") => {} (None, "build-type") => {} (Some("source-repository head"), "location") => repo_url = Some(value.to_owned()), (Some("source-repository head"), "branch") => repo_branch = Some(value.to_owned()), (Some("source-repository head"), "subdir") => repo_subpath = Some(value.to_owned()), (s, _) if s.is_some() && s.unwrap().starts_with("executable ") => {} _ => { log::debug!("Unknown field {:?} in section {:?}", key, section); } } } if let Some(repo_url) = repo_url { results.push(( UpstreamDatum::Repository(crate::vcs::unsplit_vcs_url(&crate::vcs::VcsLocation { url: repo_url.parse().unwrap(), branch: repo_branch, subpath: repo_subpath, })), Certainty::Certain, )); } Ok(results .into_iter() .map(|(datum, certainty)| UpstreamDatumWithMetadata { datum, certainty: Some(certainty), origin: None, }) .collect()) } /// Extracts upstream metadata from a .cabal file pub fn guess_from_cabal( path: &Path, _trust_package: bool, ) -> std::result::Result, ProviderError> { let file = File::open(path)?; let reader = BufReader::new(file); guess_from_cabal_lines( reader .lines() .map(|line| line.expect("Failed to read line")), ) } /// Fetches upstream metadata for a package from Hackage pub async fn remote_hackage_data(package: &str) -> Result { let mut ret = UpstreamMetadata::new(); for datum in guess_from_hackage(package).await? { ret.insert(datum); } Ok(ret) } /// Extracts upstream metadata from Hackage for a specific package pub async fn guess_from_hackage( package: &str, ) -> std::result::Result, ProviderError> { let client = reqwest::Client::builder() .user_agent(crate::USER_AGENT) .build() .unwrap(); let url: url::Url = format!( "https://hackage.haskell.org/package/{}/{}.cabal", package, package ) .parse() .unwrap(); match client.get(url).send().await { Ok(response) => { let bytes = response.bytes().await?; let reader = BufReader::new(&bytes[..]); guess_from_cabal_lines( reader .lines() .map(|line| line.expect("Failed to read line")), ) } Err(e) => match e.status() { Some(reqwest::StatusCode::NOT_FOUND) => { log::warn!("Package {} not found on Hackage", package); Ok(Vec::new()) } _ => { log::warn!("Failed to fetch package {} from Hackage: {}", package, e); Err(ProviderError::Other(format!( "Failed to fetch package {} from Hackage: {}", package, e ))) } }, } } /// Hackage (Haskell package repository) metadata provider pub struct Hackage; impl Default for Hackage { fn default() -> Self { Self::new() } } impl Hackage { /// Creates a new Hackage metadata provider pub fn new() -> Self { Self } } #[async_trait::async_trait] impl crate::ThirdPartyRepository for Hackage { fn name(&self) -> &'static str { "Hackage" } fn max_supported_certainty(&self) -> Certainty { Certainty::Certain } fn supported_fields(&self) -> &'static [&'static str] { &[ "Homepage", "Name", "Repository", "Maintainer", "Copyright", "License", "Bug-Database", ][..] } async fn guess_metadata(&self, name: &str) -> Result, ProviderError> { Ok(guess_from_hackage(name) .await? .into_iter() .map(|v| v.datum) .collect()) } } #[cfg(test)] mod parse_tests { use super::*; #[test] fn test_parse_cabal_lines() { let lines = r#"Name: foo Version: 0.0 License: BSD3 Author: John Doe Maintainer: John Doe Cabal-Version: >= 1.10 Homepage: https://example.com Executable program1 Build-Depends: HUnit Main-Is: Main.hs source-repository head type: git location: https://github.com/example/blah "#; let parsed = parse_cabal_lines(lines.lines().map(|s| s.to_owned())); assert_eq!( parsed, vec![ (None, "name".to_owned(), "foo".to_owned()), (None, "version".to_owned(), "0.0".to_owned()), (None, "license".to_owned(), "BSD3".to_owned()), (None, "author".to_owned(), "John Doe".to_owned()), ( None, "maintainer".to_owned(), "John Doe ".to_owned() ), (None, "cabal-version".to_owned(), ">= 1.10".to_owned()), ( None, "homepage".to_owned(), "https://example.com".to_owned() ), ( Some("executable program1".to_owned()), "build-depends".to_owned(), "HUnit".to_owned() ), ( Some("executable program1".to_owned()), "main-is".to_owned(), "Main.hs".to_owned() ), ( Some("source-repository head".to_owned()), "type".to_owned(), "git".to_owned() ), ( Some("source-repository head".to_owned()), "location".to_owned(), "https://github.com/example/blah".to_owned() ) ] ); } } upstream-ontologist-0.3.6/src/providers/launchpad.rs000064400000000000000000000163361046102023000210130ustar 00000000000000use crate::{load_json_url, UpstreamDatum}; use log::error; /// Fetches upstream metadata from Launchpad #[cfg(feature = "launchpad")] pub async fn guess_from_launchpad( package: &str, distribution: Option<&str>, suite: Option<&str>, ) -> Option> { use distro_info::DistroInfo; use distro_info::UbuntuDistroInfo; let distribution = distribution.unwrap_or("ubuntu"); let suite = suite.map_or_else( || { if distribution == "ubuntu" { let ubuntu = UbuntuDistroInfo::new().unwrap(); Some( ubuntu .ubuntu_devel(chrono::Utc::now().date_naive()) .last()? .codename() .clone(), ) } else if distribution == "debian" { Some("sid".to_string()) } else { None } }, |x| Some(x.to_string()), ); let suite = suite?; let sourcepackage_url = format!( "https://api.launchpad.net/devel/{}/{}/+source/{}", distribution, suite, package ); let sourcepackage_data = load_json_url(&url::Url::parse(sourcepackage_url.as_str()).unwrap(), None) .await .unwrap(); if let Some(productseries_url) = sourcepackage_data.get("productseries_link") { let productseries_data = load_json_url( &url::Url::parse(productseries_url.as_str().unwrap()).unwrap(), None, ) .await .unwrap(); let project_link = productseries_data.get("project_link").cloned(); if let Some(project_link) = project_link { let project_data = load_json_url( &url::Url::parse(project_link.as_str().unwrap()).unwrap(), None, ) .await .unwrap(); let mut results = Vec::new(); if let Some(homepage_url) = project_data.get("homepage_url") { results.push(UpstreamDatum::Homepage( homepage_url.as_str().unwrap().to_string(), )); } if let Some(display_name) = project_data.get("display_name") { results.push(UpstreamDatum::Name( display_name.as_str().unwrap().to_string(), )); } if let Some(sourceforge_project) = project_data.get("sourceforge_project") { results.push(UpstreamDatum::SourceForgeProject( sourceforge_project.as_str().unwrap().to_string(), )); } if let Some(wiki_url) = project_data.get("wiki_url") { results.push(UpstreamDatum::Wiki(wiki_url.as_str().unwrap().to_string())); } if let Some(summary) = project_data.get("summary") { results.push(UpstreamDatum::Summary( summary.as_str().unwrap().to_string(), )); } if let Some(download_url) = project_data.get("download_url") { results.push(UpstreamDatum::Download( download_url.as_str().unwrap().to_string(), )); } if let Some(vcs) = project_data.get("vcs") { if vcs == "Bazaar" { if let Some(branch_link) = productseries_data.get("branch_link") { let code_import_data = load_json_url( &url::Url::parse( format!("{}/+code-import", branch_link.as_str().unwrap()).as_str(), ) .unwrap(), None, ) .await .unwrap(); if let Some(url) = code_import_data.get("url") { results .push(UpstreamDatum::Repository(url.as_str().unwrap().to_string())); } } else if let Some(official_codehosting) = project_data.get("official_codehosting") { if official_codehosting == "true" { let branch_data = load_json_url( &url::Url::parse( productseries_data.as_object().unwrap()["branch_link"] .as_str() .unwrap(), ) .unwrap(), None, ) .await .unwrap(); results.push(UpstreamDatum::Repository( branch_data.as_object().unwrap()["bzr_identity"] .as_str() .unwrap() .to_owned(), )); results.push(UpstreamDatum::RepositoryBrowse( branch_data.as_object().unwrap()["web_link"] .as_str() .unwrap() .to_owned(), )); } } } else if vcs == "Git" { let repo_link = format!( "https://api.launchpad.net/devel/+git?ws.op=getByPath&path={}", project_data["name"] ); let repo_data = load_json_url(&url::Url::parse(repo_link.as_str()).unwrap(), None) .await .unwrap(); if let Some(code_import_link) = repo_data.get("code_import_link") { let code_import_data = load_json_url( &url::Url::parse(code_import_link.as_str().unwrap()).unwrap(), None, ) .await .unwrap(); if let Some(url) = code_import_data.get("url") { results .push(UpstreamDatum::Repository(url.as_str().unwrap().to_owned())); } } else if let Some(official_codehosting) = project_data.get("official_codehosting") { if official_codehosting == "true" { results.push(UpstreamDatum::Repository( repo_data["git_https_url"].as_str().unwrap().to_owned(), )); results.push(UpstreamDatum::RepositoryBrowse( repo_data["web_link"].as_str().unwrap().to_owned(), )); } } } else { error!("unknown vcs: {:?}", vcs); } } return Some(results); } } None } upstream-ontologist-0.3.6/src/providers/maven.rs000064400000000000000000000154451046102023000201620ustar 00000000000000//! Documentation: use crate::{ vcs, Certainty, GuesserSettings, ProviderError, UpstreamDatum, UpstreamDatumWithMetadata, }; use log::warn; use std::fs::File; use std::path::Path; /// Extracts upstream metadata from Maven pom.xml file pub fn guess_from_pom_xml( path: &Path, _settings: &GuesserSettings, ) -> std::result::Result, ProviderError> { use xmltree::Element; let file = File::open(path).expect("Failed to open file"); let file = std::io::BufReader::new(file); let root = Element::parse(file) .map_err(|e| ProviderError::ParseError(format!("Unable to parse package.xml: {}", e)))?; let mut result = Vec::new(); if root.name == "project" { if let Some(name_tag) = root.get_child("name") { if let Some(name) = name_tag.get_text() { if !name.contains('$') { result.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(name.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } } else if let Some(artifact_id_tag) = root.get_child("artifactId") { if let Some(artifact_id) = artifact_id_tag.get_text() { result.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(artifact_id.to_string()), certainty: Some(Certainty::Possible), origin: Some(path.into()), }); } } if let Some(description_tag) = root.get_child("description") { if let Some(description) = description_tag.get_text() { result.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Summary(description.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } if let Some(version_tag) = root.get_child("version") { if let Some(version) = version_tag.get_text() { if !version.contains('$') { result.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Version(version.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } } if let Some(licenses_tag) = root.get_child("licenses") { for license_tag in licenses_tag .children .iter() .filter(|c| c.as_element().is_some_and(|e| e.name == "license")) { if let Some(license_tag) = license_tag.as_element() { if let Some(name_tag) = license_tag.get_child("name") { if let Some(license_name) = name_tag.get_text() { result.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::License(license_name.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } } } } for scm_tag in root .children .iter() .filter(|c| c.as_element().is_some_and(|e| e.name == "scm")) { if let Some(scm_tag) = scm_tag.as_element() { if let Some(url_tag) = scm_tag.get_child("url") { if let Some(url) = url_tag.get_text() { if url.starts_with("scm:") && url.matches(':').count() >= 3 { let url_parts: Vec<&str> = url.splitn(3, ':').collect(); let browse_url = url_parts[2]; if vcs::plausible_browse_url(browse_url) { result.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::RepositoryBrowse(browse_url.to_owned()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } else { result.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::RepositoryBrowse(url.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } } if let Some(connection_tag) = scm_tag.get_child("connection") { if let Some(connection) = connection_tag.get_text() { let connection_parts: Vec<&str> = connection.splitn(3, ':').collect(); if connection_parts.len() == 3 && connection_parts[0] == "scm" { result.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(connection_parts[2].to_owned()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } else { warn!("Invalid format for SCM connection: {}", connection); } } } } } for issue_mgmt_tag in root .children .iter() .filter(|c| c.as_element().is_some_and(|e| e.name == "issueManagement")) { if let Some(issue_mgmt_tag) = issue_mgmt_tag.as_element() { if let Some(url_tag) = issue_mgmt_tag.get_child("url") { if let Some(url) = url_tag.get_text() { result.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::BugDatabase(url.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } } } if let Some(url_tag) = root.get_child("url") { if let Some(url) = url_tag.get_text() { if !url.starts_with("scm:") { result.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Homepage(url.into_owned()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } } } Ok(result) } upstream-ontologist-0.3.6/src/providers/meson.rs000064400000000000000000000037311046102023000201700ustar 00000000000000use crate::{Certainty, GuesserSettings, ProviderError, UpstreamDatum, UpstreamDatumWithMetadata}; use std::process::Command; /// Extracts upstream metadata from meson.build file using meson introspect pub fn guess_from_meson( path: &std::path::Path, _settings: &GuesserSettings, ) -> std::result::Result, ProviderError> { // TODO(jelmer): consider looking for a meson build directory to call "meson // introspect" on // TODO(jelmer): mesonbuild is python; consider using its internal functions to parse // meson.build? let mut command = Command::new("meson"); command.arg("introspect").arg("--projectinfo").arg(path); let output = command.output().map_err(|_| { ProviderError::Other("meson not installed; skipping meson.build introspection".to_string()) })?; if !output.status.success() { return Err(ProviderError::Other(format!( "meson failed to run; exited with code {}", output.status.code().unwrap() ))); } let project_info: serde_json::Value = serde_json::from_slice(&output.stdout) .map_err(|e| ProviderError::Other(format!("Failed to parse meson project info: {}", e)))?; let mut results = Vec::new(); if let Some(descriptive_name) = project_info.get("descriptive_name") { if let Some(name) = descriptive_name.as_str() { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(name.to_owned()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } if let Some(version) = project_info.get("version") { if let Some(version_str) = version.as_str() { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Version(version_str.to_owned()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } Ok(results) } upstream-ontologist-0.3.6/src/providers/metadata_json.rs000064400000000000000000000206571046102023000216660ustar 00000000000000use crate::{ Certainty, GuesserSettings, Person, ProviderError, UpstreamDatum, UpstreamDatumWithMetadata, }; use log::warn; use std::fs::File; use std::io::Read; use std::path::Path; /// Extracts upstream metadata from metadata.json file pub fn guess_from_metadata_json( path: &Path, _settings: &GuesserSettings, ) -> std::result::Result, ProviderError> { let mut file = File::open(path)?; let mut contents = String::new(); file.read_to_string(&mut contents)?; let data: serde_json::Map = match serde_json::from_str(&contents) { Ok(data) => data, Err(e) => { return Err(ProviderError::ParseError(e.to_string())); } }; let mut upstream_data: Vec = Vec::new(); for (field, value) in data.iter() { match field.as_str() { "description" => { if let Some(description) = value.as_str() { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Description(description.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } "name" => { if let Some(name) = value.as_str() { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(name.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } "version" => { if let Some(version) = value.as_str() { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Version(version.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } "url" => { if let Some(url) = value.as_str() { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Homepage(url.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } "license" => { if let Some(license) = value.as_str() { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::License(license.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } "source" => { if let Some(repository) = value.as_str() { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(repository.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } else if let Some(array) = value.as_array() { // Handle Repository as array (e.g., CVS format: ["cvs_root", "module"]) let strings: Vec<&str> = array.iter().filter_map(|v| v.as_str()).collect(); if strings.len() >= 2 { // Try to convert CVS array format if let Some(repo_url) = crate::vcs::convert_cvs_list_to_str(&strings) { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(repo_url), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } else { warn!("Repository array format not recognized: {:?}", strings); } } else { warn!("Repository array has insufficient elements: {:?}", array); } } } "summary" => { if let Some(summary) = value.as_str() { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Summary(summary.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } "issues_url" => { if let Some(issues_url) = value.as_str() { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::BugDatabase(issues_url.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } "project_page" => { if let Some(project_page) = value.as_str() { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Homepage(project_page.to_string()), certainty: Some(Certainty::Likely), origin: Some(path.into()), }); } } "author" => { if let Some(author_value) = value.as_str() { let author = Person::from(author_value); upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Author(vec![author]), certainty: Some(Certainty::Likely), origin: Some(path.into()), }); } else if let Some(author_values) = value.as_array() { let authors: Vec = match author_values .iter() .map(|v| { Ok::(Person::from( v.as_str().ok_or("Author value is not a string")?, )) }) .collect::, _>>() { Ok(authors) => authors, Err(e) => { warn!("Error parsing author array: {}", e); continue; } }; upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Author(authors), certainty: Some(Certainty::Likely), origin: Some(path.into()), }); } } "operatingsystem_support" | "requirements" | "dependencies" => { // Skip these fields } _ => { warn!("Unknown field {} ({:?}) in metadata.json", field, value); } } } Ok(upstream_data) } #[cfg(test)] mod tests { use super::*; use std::io::Write; use tempfile::NamedTempFile; #[test] fn test_cvs_repository_array() { let json_content = r#"{ "name": "yep", "source": [ ":extssh:_anoncvs@anoncvs.example.org:/cvs", "yep" ] }"#; let mut temp_file = NamedTempFile::new().unwrap(); temp_file.write_all(json_content.as_bytes()).unwrap(); temp_file.flush().unwrap(); let settings = GuesserSettings { trust_package: false, }; let result = guess_from_metadata_json(temp_file.path(), &settings).unwrap(); // Find the Repository datum let repo = result .iter() .find(|d| matches!(d.datum, UpstreamDatum::Repository(_))) .expect("Should have Repository datum"); if let UpstreamDatum::Repository(url) = &repo.datum { println!("Converted CVS URL: {}", url); // The URL should be converted from CVS array format assert!( url.contains("anoncvs.example.org"), "URL should contain the host" ); assert!(url.contains("yep"), "URL should contain the module name"); } else { panic!("Expected Repository datum"); } assert_eq!(repo.certainty, Some(Certainty::Certain)); } } upstream-ontologist-0.3.6/src/providers/metainfo.rs000064400000000000000000000061051046102023000206470ustar 00000000000000//! See use crate::{Certainty, ProviderError, UpstreamDatum, UpstreamDatumWithMetadata}; use std::fs::File; use std::path::Path; /// Extracts upstream metadata from AppStream metainfo XML files pub fn guess_from_metainfo( path: &Path, _trust_package: bool, ) -> std::result::Result, ProviderError> { use xmltree::Element; let file = File::open(path)?; let root = Element::parse(file).map_err(|e| ProviderError::ParseError(e.to_string()))?; let mut results: Vec = Vec::new(); for child in root.children { let child = if let Some(element) = child.as_element() { element } else { continue; }; if child.name == "id" { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(child.get_text().unwrap().to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if child.name == "project_license" { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::License(child.get_text().unwrap().to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if child.name == "url" { if let Some(urltype) = child.attributes.get("type") { if urltype == "homepage" { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Homepage(child.get_text().unwrap().to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } else if urltype == "bugtracker" { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::BugDatabase(child.get_text().unwrap().to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } } if child.name == "description" { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Description(child.get_text().unwrap().to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if child.name == "summary" { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Summary(child.get_text().unwrap().to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if child.name == "name" { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(child.get_text().unwrap().to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } Ok(results) } upstream-ontologist-0.3.6/src/providers/mod.rs000064400000000000000000000123171046102023000176260ustar 00000000000000/// Arch Linux package metadata provider pub mod arch; /// Authors file metadata provider pub mod authors; /// Autoconf configure script metadata provider pub mod autoconf; /// PHP Composer metadata provider pub mod composer_json; /// Debian package metadata provider pub mod debian; /// DOAP (Description of a Project) metadata provider pub mod doap; /// Git configuration metadata provider pub mod git; /// Go module metadata provider pub mod go; /// GoboLinux metadata provider pub mod gobo; /// Haskell package metadata provider pub mod haskell; /// Launchpad metadata provider pub mod launchpad; /// Maven POM metadata provider pub mod maven; /// Meson build system metadata provider pub mod meson; /// Generic metadata.json provider pub mod metadata_json; /// AppStream metainfo metadata provider pub mod metainfo; /// Node.js metadata provider pub mod node; /// NuGet package specification metadata provider pub mod nuspec; /// OCaml OPAM metadata provider #[cfg(feature = "opam")] pub mod ocaml; /// NPM package.json metadata provider pub mod package_json; /// PEAR package.xml metadata provider pub mod package_xml; /// Haskell package.yaml metadata provider pub mod package_yaml; /// Perl module metadata provider pub mod perl; /// PHP package metadata provider pub mod php; /// Dart/Flutter pubspec metadata provider pub mod pubspec; /// Python package metadata provider pub mod python; /// R package metadata provider pub mod r; /// Repology metadata provider pub mod repology; /// Ruby gem metadata provider pub mod ruby; /// Rust crate metadata provider pub mod rust; /// Security.md file metadata provider pub mod security_md; /// Waf build system metadata provider pub mod waf; use crate::{Certainty, GuesserSettings, UpstreamDatum, UpstreamDatumWithMetadata}; use std::io::BufRead; /// Guesses upstream metadata from INSTALL file pub async fn guess_from_install( path: &std::path::Path, _settings: &GuesserSettings, ) -> Result, crate::ProviderError> { let mut ret = Vec::new(); let f = std::fs::File::open(path)?; let f = std::io::BufReader::new(f); let mut urls: Vec = Vec::new(); let mut lines = f.lines(); while let Some(oline) = lines.next() { let oline = oline?; let line = oline.trim(); let mut cmdline = line.trim().trim_start_matches('$').trim().to_string(); if cmdline.starts_with("git clone ") || cmdline.starts_with("fossil clone ") { while cmdline.ends_with('\\') { cmdline.push_str(lines.next().unwrap()?.trim()); cmdline = cmdline.trim().to_string(); } if let Some(url) = if cmdline.starts_with("git clone ") { crate::vcs_command::url_from_git_clone_command(cmdline.as_bytes()) } else if cmdline.starts_with("fossil clone ") { crate::vcs_command::url_from_fossil_clone_command(cmdline.as_bytes()) } else { None } { urls.push(url); } } for m in lazy_regex::regex!("[\"'`](git clone.*)[\"`']").find_iter(line) { if let Some(url) = crate::vcs_command::url_from_git_clone_command(m.as_str().as_bytes()) { urls.push(url); } } let project_re = "([^/]+)/([^/?.()\"#>\\s]*[^-/?.()\"#>\\s])"; for m in regex::Regex::new(format!("https://github.com/{}/(.git)?", project_re).as_str()) .unwrap() .find_iter(line) { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(m.as_str().trim_end_matches('.').to_string()), certainty: Some(Certainty::Possible), origin: Some(path.into()), }); } if let Some(m) = regex::Regex::new(format!("https://github.com/{}", project_re).as_str()) .unwrap() .captures(line) { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository( m.get(0).unwrap().as_str().trim_end_matches('.').to_string(), ), certainty: Some(Certainty::Possible), origin: Some(path.into()), }); } if let Some((url, _)) = lazy_regex::regex_captures!("git://([^ ]+)", line) { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(url.trim_end_matches('.').to_string()), certainty: Some(Certainty::Possible), origin: Some(path.into()), }); } for m in lazy_regex::regex!("https://([^]/]+)/([^]\\s()\"#]+)").find_iter(line) { let url: url::Url = m.as_str().trim_end_matches('.').trim().parse().unwrap(); if crate::vcs::is_gitlab_site(url.host_str().unwrap(), None).await { if let Some(repo_url) = crate::vcs::guess_repo_from_url(&url, None).await { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(repo_url), certainty: Some(Certainty::Possible), origin: Some(path.into()), }); } } } } Ok(ret) } upstream-ontologist-0.3.6/src/providers/node.rs000064400000000000000000000230051046102023000177700ustar 00000000000000use crate::{ProviderError, UpstreamDatum, UpstreamDatumWithMetadata, UpstreamMetadata}; use serde::Deserialize; use std::collections::HashMap; /// Information about a specific NPM package version #[derive(Deserialize)] pub struct NpmVersion { /// Distribution information for the package #[serde(rename = "dist")] pub dist: NpmDist, /// Package dependencies #[serde(rename = "dependencies")] pub dependencies: Option>, /// Development dependencies #[serde(rename = "devDependencies")] pub dev_dependencies: Option>, /// Peer dependencies #[serde(rename = "peerDependencies")] pub peer_dependencies: Option>, /// Optional dependencies #[serde(rename = "optionalDependencies")] pub optional_dependencies: Option>, /// Bundled dependencies #[serde(rename = "bundledDependencies")] pub bundled_dependencies: Option>, /// Engine requirements #[serde(rename = "engines")] pub engines: Option>, /// NPM scripts #[serde(rename = "scripts")] pub scripts: Option>, /// Package name pub name: String, /// Package version pub version: String, /// README file name #[serde(rename = "readmeFilename")] pub readme_filename: Option, /// Package maintainers #[serde(rename = "maintainers")] pub maintainers: Vec, /// Package author #[serde(rename = "author")] pub author: Option, /// Source repository #[serde(rename = "repository")] pub repository: Option, /// Bug tracker information #[serde(rename = "bugs")] pub bugs: Option, /// Package homepage #[serde(rename = "homepage")] pub homepage: Option, /// Package keywords #[serde(rename = "keywords")] pub keywords: Option>, /// License identifier #[serde(rename = "license")] pub license: Option, } /// NPM person (author/maintainer) information #[derive(Deserialize)] pub struct NpmPerson { /// Person's name pub name: String, /// Person's email address pub email: String, } impl From for crate::Person { fn from(person: NpmPerson) -> Self { crate::Person { name: Some(person.name), email: Some(person.email), url: None, } } } /// NPM distribution information #[derive(Deserialize)] pub struct NpmDist { /// SHA checksum of the package pub shasum: String, /// URL to the package tarball pub tarball: String, /// Package integrity string pub integrity: String, /// Package signatures pub signatures: Vec, } /// NPM package signature #[derive(Deserialize)] pub struct NpmSignature { /// Key identifier pub keyid: String, /// Signature string pub sig: String, } /// NPM repository information #[derive(Deserialize)] pub struct NpmRepository { /// Repository type (e.g., git) #[serde(rename = "type")] pub type_: String, /// Repository URL pub url: String, } /// NPM bug tracker information #[derive(Deserialize)] pub struct NpmBugs { /// Bug tracker URL pub url: String, } /// Complete NPM package metadata #[derive(Deserialize)] pub struct NpmPackage { /// Package identifier #[serde(rename = "_id")] pub id: String, /// Package revision #[serde(rename = "_rev")] pub rev: String, /// Package name pub name: String, /// Package description pub description: String, /// Distribution tags mapping #[serde(rename = "dist-tags")] pub dist_tags: HashMap, /// All available versions pub versions: HashMap, /// Package README content pub readme: String, /// Package maintainers pub maintainers: Vec, /// Timestamps for various events pub time: HashMap, /// Package author pub author: Option, /// Source repository pub repository: Option, /// Bug tracker information pub bugs: Option, /// Package homepage pub homepage: Option, /// Package keywords pub keywords: Option>, /// License identifier pub license: Option, /// Package dependencies pub dependencies: Option>, /// Development dependencies #[serde(rename = "devDependencies")] pub dev_dependencies: Option>, /// Peer dependencies #[serde(rename = "peerDependencies")] pub peer_dependencies: Option>, /// Optional dependencies #[serde(rename = "optionalDependencies")] pub optional_dependencies: Option>, /// Bundled dependencies #[serde(rename = "bundledDependencies")] pub bundled_dependencies: Option>, /// Engine requirements pub engines: Option>, /// NPM scripts pub scripts: Option>, /// README file name #[serde(rename = "readmeFilename")] pub readme_filename: Option, } impl TryInto for NpmPackage { type Error = ProviderError; fn try_into(self) -> Result { let mut metadata = UpstreamMetadata::default(); let package_name = self.name.clone(); metadata.insert(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(self.name), certainty: None, origin: None, }); metadata.insert(UpstreamDatumWithMetadata { datum: UpstreamDatum::Description(self.description), certainty: None, origin: None, }); if let Some(homepage) = self.homepage { metadata.insert(UpstreamDatumWithMetadata { datum: UpstreamDatum::Homepage(homepage), certainty: None, origin: None, }); } if let Some(author) = self.author { metadata.insert(UpstreamDatumWithMetadata { datum: UpstreamDatum::Author(vec![author.into()]), certainty: None, origin: None, }); } if let Some(repository) = self.repository { metadata.insert(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(repository.url), certainty: None, origin: None, }); } if let Some(bugs) = self.bugs { metadata.insert(UpstreamDatumWithMetadata { datum: UpstreamDatum::BugDatabase(bugs.url), certainty: None, origin: None, }); } if let Some(license) = self.license { metadata.insert(UpstreamDatumWithMetadata { datum: UpstreamDatum::License(license), certainty: None, origin: None, }); } if let Some(keywords) = self.keywords { metadata.insert(UpstreamDatumWithMetadata { datum: UpstreamDatum::Keywords(keywords), certainty: None, origin: None, }); } // Find the latest version if let Some(latest_version) = self.dist_tags.get("latest") { if let Some(version) = self.versions.get(latest_version) { metadata.insert(UpstreamDatumWithMetadata { datum: UpstreamDatum::Version(version.version.to_string()), certainty: None, origin: None, }); } let version_data = self.versions.get(latest_version).map_or_else( || { Err(ProviderError::Other(format!( "Could not find version {} in package {}", latest_version, &package_name ))) }, Ok, )?; metadata.insert(UpstreamDatumWithMetadata { datum: UpstreamDatum::Download(version_data.dist.tarball.to_string()), certainty: None, origin: None, }); } Ok(metadata) } } /// Load NPM package metadata from the registry /// /// Fetches package information from the NPM registry API for the specified package name. /// Returns the parsed package metadata or None if the package doesn't exist. pub async fn load_npm_package(package: &str) -> Result, crate::ProviderError> { let http_url = format!("https://registry.npmjs.org/{}", package) .parse() .unwrap(); let data = crate::load_json_url(&http_url, None).await?; Ok(serde_json::from_value(data).unwrap()) } /// Get upstream metadata for an NPM package /// /// Retrieves and converts NPM package information into standardized upstream metadata format. /// Returns empty metadata if the package is not found. pub async fn remote_npm_metadata(package: &str) -> Result { let data = load_npm_package(package).await?; match data { Some(data) => data.try_into(), None => Ok(UpstreamMetadata::default()), } } #[cfg(test)] mod npm_tests { use super::*; #[test] fn test_load_npm_package() { let data = include_str!(".././testdata/npm.json"); let npm_data: NpmPackage = serde_json::from_str(data).unwrap(); assert_eq!(npm_data.name, "leftpad"); } } upstream-ontologist-0.3.6/src/providers/nuspec.rs000064400000000000000000000126741046102023000203520ustar 00000000000000use crate::xmlparse_simplify_namespaces; use crate::{Certainty, Person, ProviderError, UpstreamDatum, UpstreamDatumWithMetadata}; use std::path::Path; // Documentation: https://docs.microsoft.com/en-us/nuget/reference/nuspec /// Parse upstream metadata from a NuGet package specification (.nuspec) file /// /// Extracts package information like version, description, authors, and URLs from /// a NuSpec XML file following the NuGet package specification format. pub async fn guess_from_nuspec( path: &Path, _trust_package: bool, ) -> std::result::Result, ProviderError> { const NAMESPACES: &[&str] = &["http://schemas.microsoft.com/packaging/2010/07/nuspec.xsd"]; // XML parsing and other logic let root = match xmlparse_simplify_namespaces(path, NAMESPACES) { Some(root) => root, None => { return Err(crate::ProviderError::ParseError( "Unable to parse nuspec".to_string(), )); } }; assert_eq!(root.name, "package", "root tag is {}", root.name); let metadata = root.get_child("metadata"); if metadata.is_none() { return Err(ProviderError::ParseError( "Unable to find metadata tag".to_string(), )); } let metadata = metadata.unwrap(); let mut result = Vec::new(); if let Some(version_tag) = metadata.get_child("version") { if let Some(version) = version_tag.get_text() { result.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Version(version.into_owned()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } if let Some(description_tag) = metadata.get_child("description") { if let Some(description) = description_tag.get_text() { result.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Description(description.into_owned()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } if let Some(authors_tag) = metadata.get_child("authors") { if let Some(authors) = authors_tag.get_text() { let authors = authors.split(',').map(Person::from).collect(); result.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Author(authors), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } if let Some(project_url_tag) = metadata.get_child("projectUrl") { if let Some(project_url) = project_url_tag.get_text() { let repo_url = crate::vcs::guess_repo_from_url(&url::Url::parse(&project_url).unwrap(), None) .await; if let Some(repo_url) = repo_url { result.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(repo_url), certainty: Some(Certainty::Confident), origin: Some(path.into()), }); } result.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Homepage(project_url.into_owned()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } if let Some(license_tag) = metadata.get_child("license") { if let Some(license) = license_tag.get_text() { result.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::License(license.into_owned()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } if let Some(copyright_tag) = metadata.get_child("copyright") { if let Some(copyright) = copyright_tag.get_text() { result.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Copyright(copyright.into_owned()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } if let Some(title_tag) = metadata.get_child("title") { if let Some(title) = title_tag.get_text() { result.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(title.into_owned()), certainty: Some(Certainty::Likely), origin: Some(path.into()), }); } } if let Some(summary_tag) = metadata.get_child("summary") { if let Some(summary) = summary_tag.get_text() { result.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Summary(summary.into_owned()), certainty: Some(Certainty::Likely), origin: Some(path.into()), }); } } if let Some(repository_tag) = metadata.get_child("repository") { if let Some(repo_url) = repository_tag.attributes.get("url") { let branch = repository_tag.attributes.get("branch"); result.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(crate::vcs::unsplit_vcs_url( &crate::vcs::VcsLocation { url: repo_url.parse().unwrap(), branch: branch.cloned(), subpath: None, }, )), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } Ok(result) } upstream-ontologist-0.3.6/src/providers/ocaml.rs000064400000000000000000000213761046102023000201470ustar 00000000000000//! Documentation: use crate::{Certainty, Person, ProviderError, UpstreamDatum, UpstreamDatumWithMetadata}; use log::warn; use opam_file_rs::value::{OpamFileItem, OpamFileSection, ValueKind}; use std::fs::File; use std::io::Read; use std::path::Path; #[cfg(feature = "opam")] /// Extracts upstream metadata from OPAM file pub fn guess_from_opam( path: &Path, _trust_package: bool, ) -> std::result::Result, ProviderError> { let mut f = File::open(path)?; let mut contents = String::new(); f.read_to_string(&mut contents)?; let opam = opam_file_rs::parse(contents.as_str()) .map_err(|e| ProviderError::ParseError(format!("Failed to parse OPAM file: {:?}", e)))?; let mut results: Vec = Vec::new(); fn find_item<'a>(section: &'a OpamFileSection, name: &str) -> Option<&'a OpamFileItem> { for child in section.section_item.iter() { match child { OpamFileItem::Variable(_, n, _) if n == name => return Some(child), _ => (), } } None } for entry in opam.file_contents { match entry { OpamFileItem::Variable(_, name, value) if name == "maintainer" => { let value = match value.kind { ValueKind::String(s) => s, _ => { warn!("Unexpected type for maintainer in OPAM file: {:?}", value); continue; } }; results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Maintainer(Person::from(value.as_str())), certainty: Some(Certainty::Confident), origin: Some(path.into()), }); } OpamFileItem::Variable(_, name, value) if name == "license" => { let value = match value.kind { ValueKind::String(s) => s, _ => { warn!("Unexpected type for license in OPAM file: {:?}", value); continue; } }; results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::License(value), certainty: Some(Certainty::Confident), origin: Some(path.into()), }); } OpamFileItem::Variable(_, name, value) if name == "homepage" => { let value = match value.kind { ValueKind::String(s) => s, _ => { warn!("Unexpected type for homepage in OPAM file: {:?}", value); continue; } }; results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Homepage(value), certainty: Some(Certainty::Confident), origin: Some(path.into()), }); } OpamFileItem::Section(_, section) if section.section_name.as_deref() == Some("dev-repo") => { match find_item(§ion, "repository") { Some(OpamFileItem::Variable(_, _, ref value)) => { let value = match value.kind { ValueKind::String(ref s) => s, _ => { warn!("Unexpected type for dev-repo in OPAM file: {:?}", value); continue; } }; results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(value.to_string()), certainty: Some(Certainty::Confident), origin: Some(path.into()), }); } Some(o) => { warn!("Unexpected type for dev-repo in OPAM file: {:?}", o); continue; } None => { warn!("Missing repository for dev-repo in OPAM file"); continue; } } } OpamFileItem::Variable(_, name, value) if name == "bug-reports" => { let value = match value.kind { ValueKind::String(s) => s, _ => { warn!("Unexpected type for bug-reports in OPAM file: {:?}", value); continue; } }; results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::BugDatabase(value), certainty: Some(Certainty::Confident), origin: Some(path.into()), }); } OpamFileItem::Variable(_, name, value) if name == "synopsis" => { let value = match value.kind { ValueKind::String(s) => s, _ => { warn!("Unexpected type for synopsis in OPAM file: {:?}", value); continue; } }; results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Summary(value), certainty: Some(Certainty::Confident), origin: Some(path.into()), }); } OpamFileItem::Variable(_, name, value) if name == "description" => { let value = match value.kind { ValueKind::String(s) => s, _ => { warn!("Unexpected type for description in OPAM file: {:?}", value); continue; } }; results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Description(value), certainty: Some(Certainty::Confident), origin: Some(path.into()), }); } OpamFileItem::Variable(_, name, value) if name == "doc" => { let value = match value.kind { ValueKind::String(s) => s, _ => { warn!("Unexpected type for doc in OPAM file: {:?}", value); continue; } }; results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Documentation(value), certainty: Some(Certainty::Confident), origin: Some(path.into()), }); } OpamFileItem::Variable(_, name, value) if name == "version" => { let value = match value.kind { ValueKind::String(s) => s, _ => { warn!("Unexpected type for version in OPAM file: {:?}", value); continue; } }; results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Version(value), certainty: Some(Certainty::Confident), origin: Some(path.into()), }); } OpamFileItem::Variable(_, name, value) if name == "authors" => { let value = match value.kind { ValueKind::String(s) => vec![Person::from(s.as_str())], ValueKind::List(ref l) => l .iter() .filter_map(|v| match v.kind { ValueKind::String(ref s) => Some(Person::from(s.as_str())), _ => { warn!("Unexpected type for authors in OPAM file: {:?}", &value); None } }) .collect(), _ => { warn!("Unexpected type for authors in OPAM file: {:?}", value); continue; } }; results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Author(value), certainty: Some(Certainty::Confident), origin: Some(path.into()), }); } OpamFileItem::Variable(_, name, _) => { warn!("Unexpected variable in OPAM file: {}", name); } OpamFileItem::Section(_, section) => { warn!("Unexpected section in OPAM file: {:?}", section); } } } Ok(results) } upstream-ontologist-0.3.6/src/providers/package_json.rs000064400000000000000000000237471046102023000215040ustar 00000000000000use crate::{ Certainty, GuesserSettings, Person, ProviderError, UpstreamDatum, UpstreamDatumWithMetadata, }; use log::error; use std::path::Path; use url::Url; /// Extracts upstream metadata from NPM package.json file pub fn guess_from_package_json( path: &Path, _settings: &GuesserSettings, ) -> std::result::Result, ProviderError> { // see https://docs.npmjs.com/cli/v7/configuring-npm/package-json let file = std::fs::File::open(path)?; let package: serde_json::Value = serde_json::from_reader(file).map_err(|e| ProviderError::ParseError(e.to_string()))?; let mut upstream_data: Vec = Vec::new(); let package = match package { serde_json::Value::Object(package) => package, _ => { return Err(ProviderError::ParseError( "package.json is not an object".to_string(), )); } }; for (field, value) in package { match field.as_str() { "name" => { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(value.as_str().unwrap().to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } "homepage" => { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Homepage(value.as_str().unwrap().to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } "description" => { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Summary(value.as_str().unwrap().to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } "license" => { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::License(value.as_str().unwrap().to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } "demo" => { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Demo(value.as_str().unwrap().to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } "version" => { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Version(value.as_str().unwrap().to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } "repository" => { let repo_url = if let Some(repo_url) = value.as_str() { Some(repo_url) } else if let Some(repo) = value.as_object() { if let Some(repo_url) = repo.get("url") { repo_url.as_str() } else { None } } else { None }; if let Some(repo_url) = repo_url { match Url::parse(repo_url) { Ok(url) if url.scheme() == "github" => { // Some people seem to default to github. :( let repo_url = format!("https://github.com/{}", url.path()); upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(repo_url.to_string()), certainty: Some(Certainty::Likely), origin: Some(path.into()), }); } Err(url::ParseError::RelativeUrlWithoutBase) => { // Some people seem to default to github. :( let repo_url = format!("https://github.com/{}", repo_url); upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(repo_url.to_string()), certainty: Some(Certainty::Likely), origin: Some(path.into()), }); } Ok(url) => { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(url.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } Err(e) => { panic!("Failed to parse repository URL: {}", e); } } } } "bugs" => { if let Some(url) = value.as_str() { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::BugDatabase(url.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } else if let Some(email) = value.get("email").and_then(serde_json::Value::as_str) { let url = format!("mailto:{}", email); upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::BugDatabase(url.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } "keywords" => { if let Some(keywords) = value.as_array() { let keywords = keywords .iter() .filter_map(|keyword| keyword.as_str()) .map(String::from) .collect(); upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Keywords(keywords), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } "author" => { if let Some(author) = value.as_object() { let name = author .get("name") .and_then(serde_json::Value::as_str) .map(String::from); let url = author .get("url") .and_then(serde_json::Value::as_str) .map(String::from); let email = author .get("email") .and_then(serde_json::Value::as_str) .map(String::from); let person = Person { name, url, email }; upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Author(vec![person]), certainty: Some(Certainty::Confident), origin: Some(path.into()), }); } else if let Some(author) = value.as_str() { let person = Person::from(author); upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Author(vec![person]), certainty: Some(Certainty::Confident), origin: Some(path.into()), }); } else { error!("Unsupported type for author in package.json: {:?}", value); } } "dependencies" | "private" | "devDependencies" | "scripts" | "files" | "main" => { // Do nothing, skip these fields } _ => { error!("Unknown package.json field {} ({:?})", field, value); } } } Ok(upstream_data) } #[cfg(test)] mod package_json_tests { use super::*; #[test] fn test_dummy() { let td = tempfile::tempdir().unwrap(); let path = td.path().join("package.json"); std::fs::write( &path, r#"{ "name": "mozillaeslintsetup", "description": "This package file is for setup of ESLint.", "repository": {}, "license": "MPL-2.0", "dependencies": { "eslint": "4.18.1", "eslint-plugin-html": "4.0.2", "eslint-plugin-mozilla": "file:tools/lint/eslint/eslint-plugin-mozilla", "eslint-plugin-no-unsanitized": "2.0.2", "eslint-plugin-react": "7.1.0", "eslint-plugin-spidermonkey-js": "file:tools/lint/eslint/eslint-plugin-spidermonkey-js" }, "devDependencies": {} } "#, ) .unwrap(); let ret = guess_from_package_json(&path, &GuesserSettings::default()).unwrap(); assert_eq!( ret, vec![ UpstreamDatumWithMetadata { datum: UpstreamDatum::Summary( "This package file is for setup of ESLint.".to_string() ), certainty: Some(Certainty::Certain), origin: Some(path.clone().into()), }, UpstreamDatumWithMetadata { datum: UpstreamDatum::License("MPL-2.0".to_string()), certainty: Some(Certainty::Certain), origin: Some(path.clone().into()) }, UpstreamDatumWithMetadata { datum: UpstreamDatum::Name("mozillaeslintsetup".to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()) } ] ); } } upstream-ontologist-0.3.6/src/providers/package_xml.rs000064400000000000000000000162151046102023000213230ustar 00000000000000use crate::xmlparse_simplify_namespaces; use crate::{ Certainty, GuesserSettings, Person, ProviderError, UpstreamDatum, UpstreamDatumWithMetadata, }; use log::error; use std::path::Path; /// Extracts upstream metadata from PEAR package.xml file pub fn guess_from_package_xml( path: &Path, _settings: &GuesserSettings, ) -> std::result::Result, ProviderError> { use xmltree::{Element, XMLNode}; const NAMESPACES: &[&str] = &[ "http://pear.php.net/dtd/package-2.0", "http://pear.php.net/dtd/package-2.1", ]; let root = xmlparse_simplify_namespaces(path, NAMESPACES) .ok_or_else(|| ProviderError::ParseError("Unable to parse package.xml".to_string()))?; assert_eq!(root.name, "package", "root tag is {:?}", root.name); let mut upstream_data: Vec = Vec::new(); let mut leads: Vec<&Element> = Vec::new(); let mut maintainers: Vec<&Element> = Vec::new(); let mut authors: Vec<&Element> = Vec::new(); for child_element in &root.children { if let XMLNode::Element(ref element) = child_element { match element.name.as_str() { "name" => { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(element.get_text().unwrap().to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } "summary" => { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Summary(element.get_text().unwrap().to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } "description" => { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Description(element.get_text().unwrap().to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } "version" => { if let Some(release_tag) = element.get_child("release") { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Version( release_tag.get_text().unwrap().to_string(), ), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } "license" => { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::License(element.get_text().unwrap().to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } "url" => { if let Some(url_type) = element.attributes.get("type") { match url_type.as_str() { "repository" => { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository( element.get_text().unwrap().to_string(), ), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } "bugtracker" => { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::BugDatabase( element.get_text().unwrap().to_string(), ), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } _ => {} } } } "lead" => { leads.push(element); } "maintainer" => { maintainers.push(element); } "author" => { authors.push(element); } "stability" | "dependencies" | "providesextension" | "extsrcrelease" | "channel" | "notes" | "contents" | "date" | "time" | "depend" | "exec_depend" | "buildtool_depend" => { // Do nothing, skip these fields } _ => { error!("Unknown package.xml tag {}", element.name); } } } } for lead_element in leads.iter().take(1) { let name_el = lead_element.get_child("name").unwrap().get_text(); let email_el = lead_element .get_child("email") .map(|s| s.get_text().unwrap()); let active_el = lead_element .get_child("active") .map(|s| s.get_text().unwrap()); if let Some(active_el) = active_el { if active_el != "yes" { continue; } } let person = Person { name: name_el.map(|s| s.to_string()), email: email_el.map(|s| s.to_string()), ..Default::default() }; upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Maintainer(person), certainty: Some(Certainty::Confident), origin: Some(path.into()), }); } if maintainers.len() == 1 { let maintainer_element = maintainers[0]; let name_el = maintainer_element.get_text().map(|s| s.into_owned()); let email_el = maintainer_element.attributes.get("email"); let person = Person { name: name_el, email: email_el.map(|s| s.to_string()), ..Default::default() }; upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Maintainer(person), certainty: Some(Certainty::Confident), origin: Some(path.into()), }); } if !authors.is_empty() { let persons = authors .iter() .map(|author_element| { let name_el = author_element.get_text().unwrap().into_owned(); let email_el = author_element.attributes.get("email"); Person { name: Some(name_el), email: email_el.map(|s| s.to_string()), ..Default::default() } }) .collect(); upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Author(persons), certainty: Some(Certainty::Confident), origin: Some(path.into()), }); } Ok(upstream_data) } upstream-ontologist-0.3.6/src/providers/package_yaml.rs000064400000000000000000000107631046102023000214670ustar 00000000000000use crate::{ Certainty, GuesserSettings, Person, ProviderError, UpstreamDatum, UpstreamDatumWithMetadata, }; use std::path::Path; /// Extracts upstream metadata from Haskell package.yaml file pub fn guess_from_package_yaml( path: &Path, _settings: &GuesserSettings, ) -> std::result::Result, ProviderError> { let reader = std::fs::File::open(path)?; let data: serde_yaml::Value = serde_yaml::from_reader(reader).map_err(|e| ProviderError::ParseError(e.to_string()))?; let mut ret = Vec::new(); if let Some(name) = data.get("name") { if let Some(name) = name.as_str() { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(name.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } if let Some(version) = data.get("version") { if let Some(version) = version.as_str() { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Version(version.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } if let Some(authors) = data.get("author") { if let Some(author) = authors.as_str() { let authors = author.split(',').collect::>(); ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Author(authors.into_iter().map(Person::from).collect()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } if let Some(maintainers) = data.get("maintainer") { if let Some(maintainer) = maintainers.as_str() { let maintainers = maintainer.split(',').collect::>(); let mut maintainers = maintainers .into_iter() .map(Person::from) .collect::>(); if let Some(maintainer) = maintainers.pop() { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Maintainer(maintainer), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } } if let Some(homepage) = data.get("homepage") { if let Some(homepage) = homepage.as_str() { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Homepage(homepage.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } if let Some(description) = data.get("description") { if let Some(description) = description.as_str() { if !description.starts_with("Please see the README") { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Description(description.to_string()), certainty: Some(Certainty::Confident), origin: Some(path.into()), }); } } } if let Some(synopsis) = data.get("synopsis") { if let Some(synopsis) = synopsis.as_str() { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Summary(synopsis.to_string()), certainty: Some(Certainty::Confident), origin: Some(path.into()), }); } } if let Some(license) = data.get("license") { if let Some(license) = license.as_str() { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::License(license.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } if let Some(github) = data.get("github") { if let Some(github) = github.as_str() { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(format!("https://github.com/{}", github)), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } if let Some(repository) = data.get("repository") { if let Some(repository) = repository.as_str() { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(repository.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } Ok(ret) } upstream-ontologist-0.3.6/src/providers/perl.rs000064400000000000000000000477761046102023000200320ustar 00000000000000use crate::{ Certainty, GuesserSettings, Origin, ProviderError, UpstreamDatum, UpstreamDatumWithMetadata, UpstreamMetadata, }; use lazy_regex::regex; use serde::Deserialize; use std::collections::HashMap; use std::fs::File; use std::io::{BufRead, BufReader, Read}; use std::path::{Path, PathBuf}; use std::process::Command; /// Extracts upstream metadata from Perl POD documentation pub fn guess_from_pod( contents: &str, origin: &Origin, ) -> std::result::Result, ProviderError> { let mut by_header: HashMap = HashMap::new(); let mut inheader: Option = None; for line in contents.lines() { if line.starts_with("=head1 ") { inheader = Some(line.trim_start_matches("=head1 ").to_string()); by_header.insert(inheader.clone().unwrap().to_uppercase(), String::new()); } else if let Some(header) = &inheader { if let Some(value) = by_header.get_mut(&header.to_uppercase()) { value.push_str(line) } } } let mut upstream_data: Vec = Vec::new(); if let Some(description) = by_header.get("DESCRIPTION") { let mut description = description.trim_start_matches('\n').to_string(); description = regex!(r"[FXZSCBI]\\<([^>]+)>") .replace_all(&description, "$1") .into_owned(); description = regex!(r"L\\<([^\|]+)\|([^\\>]+)\\>") .replace_all(&description, "$2") .into_owned(); description = regex!(r"L\\<([^\\>]+)\\>") .replace_all(&description, "$1") .into_owned(); upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Description(description), certainty: Some(Certainty::Certain), origin: Some(origin.clone()), }); } if let Some(name) = by_header.get("NAME") { let lines: Vec<&str> = name.trim().lines().collect(); if let Some(line) = lines.first() { if let Some((name, summary)) = line.split_once(" - ") { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(name.trim().to_string()), certainty: Some(Certainty::Confident), origin: Some(origin.clone()), }); upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Summary(summary.trim().to_string()), certainty: Some(Certainty::Confident), origin: Some(origin.clone()), }); } else if !line.contains(' ') { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(line.trim().to_string()), certainty: Some(Certainty::Confident), origin: Some(origin.clone()), }); } } } Ok(upstream_data) } /// Extracts upstream metadata from a Perl module file pub fn guess_from_perl_module( path: &Path, ) -> std::result::Result, ProviderError> { match Command::new("perldoc").arg("-u").arg(path).output() { Ok(output) => guess_from_pod( &String::from_utf8_lossy(&output.stdout), &Origin::Path(path.into()), ), Err(e) => Err(ProviderError::Other(format!( "Error running perldoc: {}", e ))), } } /// Guesses upstream metadata based on Perl distribution name pub fn guess_from_perl_dist_name( path: &Path, dist_name: &str, ) -> std::result::Result, ProviderError> { let mod_path = PathBuf::from(format!( "{}/lib/{}.pm", std::path::Path::new(path) .parent() .expect("parent") .display(), dist_name.replace('-', "/") )); if mod_path.exists() { guess_from_perl_module(mod_path.as_path()) } else { Ok(Vec::new()) } } #[cfg(feature = "dist-ini")] /// Extracts upstream metadata from Dist::Zilla dist.ini file pub fn guess_from_dist_ini( path: &Path, _settings: &GuesserSettings, ) -> std::result::Result, ProviderError> { let parser = ini::Ini::load_from_file(path) .map_err(|e| ProviderError::ParseError(format!("Error parsing dist.ini: {}", e)))?; let dist_name = parser .get_from::<&str>(None, "name") .map(|name| UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(name.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); let version = parser .get_from::<&str>(None, "version") .map(|version| UpstreamDatumWithMetadata { datum: UpstreamDatum::Version(version.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); let summary = parser .get_from::<&str>(None, "abstract") .map(|summary| UpstreamDatumWithMetadata { datum: UpstreamDatum::Summary(summary.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); let bug_database = parser .get_from(Some("MetaResources"), "bugtracker.web") .map(|bugtracker| UpstreamDatumWithMetadata { datum: UpstreamDatum::BugDatabase(bugtracker.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); let repository = parser .get_from(Some("MetaResources"), "repository.url") .map(|repository| UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(repository.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); let license = parser .get_from::<&str>(None, "license") .map(|license| UpstreamDatumWithMetadata { datum: UpstreamDatum::License(license.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); let copyright = match ( parser.get_from::<&str>(None, "copyright_year"), parser.get_from::<&str>(None, "copyright_holder"), ) { (Some(year), Some(holder)) => Some(UpstreamDatumWithMetadata { datum: UpstreamDatum::Copyright(format!("{} {}", year, holder)), certainty: Some(Certainty::Certain), origin: Some(path.into()), }), _ => None, }; let mut upstream_data: Vec = Vec::new(); if let Some(dist_name) = dist_name { upstream_data.push(dist_name); } if let Some(version) = version { upstream_data.push(version); } if let Some(summary) = summary { upstream_data.push(summary); } if let Some(bug_database) = bug_database { upstream_data.push(bug_database); } if let Some(repository) = repository { upstream_data.push(repository); } if let Some(license) = license { upstream_data.push(license); } if let Some(copyright) = copyright { upstream_data.push(copyright); } if let Some(dist_name) = parser.get_from::<&str>(None, "name") { upstream_data.extend(guess_from_perl_dist_name(path, dist_name)?); } Ok(upstream_data) } /// Extracts upstream metadata from Perl META.json file pub fn guess_from_meta_json( path: &Path, _settings: &GuesserSettings, ) -> std::result::Result, ProviderError> { let mut file = File::open(path)?; let mut contents = String::new(); file.read_to_string(&mut contents)?; let data: serde_json::Map = serde_json::from_str(&contents) .map_err(|e| ProviderError::ParseError(format!("Error parsing META.json: {}", e)))?; let mut upstream_data: Vec = Vec::new(); if let Some(name) = data.get("name").and_then(serde_json::Value::as_str) { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(name.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(version) = data.get("version").and_then(serde_json::Value::as_str) { let version = version.strip_prefix('v').unwrap_or(version); upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Version(version.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(summary) = data.get("abstract").and_then(serde_json::Value::as_str) { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Summary(summary.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(resources) = data.get("resources").and_then(serde_json::Value::as_object) { if let Some(bugtracker) = resources .get("bugtracker") .and_then(serde_json::Value::as_object) { if let Some(web) = bugtracker.get("web").and_then(serde_json::Value::as_str) { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::BugDatabase(web.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); // TODO: Support resources["bugtracker"]["mailto"] } } if let Some(homepage) = resources .get("homepage") .and_then(serde_json::Value::as_str) { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Homepage(homepage.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(repo) = resources .get("repository") .and_then(serde_json::Value::as_object) { if let Some(url) = repo.get("url").and_then(serde_json::Value::as_str) { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(url.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(web) = repo.get("web").and_then(serde_json::Value::as_str) { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::RepositoryBrowse(web.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } } // Wild guess: if let Some(dist_name) = data.get("name").and_then(serde_json::Value::as_str) { upstream_data.extend(guess_from_perl_dist_name(path, dist_name)?); } Ok(upstream_data) } /// Guess upstream metadata from a META.yml file. /// /// See for the /// specification of the format. pub fn guess_from_meta_yml( path: &Path, _settings: &GuesserSettings, ) -> std::result::Result, ProviderError> { let mut file = File::open(path)?; let mut contents = String::new(); file.read_to_string(&mut contents)?; let data: serde_yaml::Value = serde_yaml::from_str(&contents) .map_err(|e| ProviderError::ParseError(format!("Error parsing META.yml: {}", e)))?; let mut upstream_data = Vec::new(); if let Some(name) = data.get("name") { if let Some(name) = name.as_str() { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(name.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } if let Some(license) = data.get("license") { if let Some(license) = license.as_str() { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::License(license.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } if let Some(version) = data.get("version") { if let Some(version) = version.as_str() { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Version(version.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } if let Some(resources) = data.get("resources") { if let Some(bugtracker) = resources.get("bugtracker") { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::BugDatabase(bugtracker.as_str().unwrap().to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(homepage) = resources.get("homepage") { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Homepage(homepage.as_str().unwrap().to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(repository) = resources.get("repository") { if let Some(url) = repository.get("url") { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(url.as_str().unwrap().to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } else { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(repository.as_str().unwrap().to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } } // Wild guess: if let Some(dist_name) = data.get("name") { if let Some(dist_name) = dist_name.as_str() { upstream_data.extend(guess_from_perl_dist_name(path, dist_name)?); } } Ok(upstream_data) } /// Extracts upstream metadata from Makefile.PL pub fn guess_from_makefile_pl( path: &Path, _settings: &GuesserSettings, ) -> std::result::Result, ProviderError> { let mut dist_name = None; let file = File::open(path)?; let reader = BufReader::new(file); let mut results = Vec::new(); let name_regex = regex!("name '([^'\"]+)';$"); let repository_regex = regex!("repository '([^'\"]+)';$"); for line in reader.lines().map_while(Result::ok) { if let Some(captures) = name_regex.captures(&line) { dist_name = Some(captures.get(1).unwrap().as_str().to_owned()); let name = dist_name.as_ref().unwrap().to_owned(); results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(name), certainty: Some(Certainty::Confident), origin: Some(path.into()), }); } if let Some(captures) = repository_regex.captures(&line) { let repository = captures.get(1).unwrap().as_str().to_owned(); results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(repository), certainty: Some(Certainty::Confident), origin: Some(path.into()), }); } } if let Some(dist_name) = dist_name { results.extend(guess_from_perl_dist_name(path, &dist_name)?); } Ok(results) } /// CPAN module information #[derive(Deserialize)] pub struct Module { /// Numeric version representation pub version_numified: f64, /// Version string pub version: String, /// Whether the module is authorized pub authorized: bool, /// Module name pub name: String, /// Whether the module is indexed pub indexed: bool, } /// File statistics for a CPAN module #[derive(Deserialize)] pub struct Stat { /// User ID pub uid: isize, /// Modification time pub mtime: isize, /// File size in bytes pub size: isize, /// File mode pub mode: isize, /// Group ID pub gid: isize, } /// Complete CPAN module metadata #[derive(Deserialize)] pub struct CpanModule { /// Module maturity level pub maturity: String, /// Release identifier pub release: String, /// Author name pub author: String, /// Source lines of Perl code pub slop: isize, /// Download URL for the module pub download_url: url::Url, /// List of modules in this release pub module: Vec, /// POD documentation lines pub pod_lines: Vec, /// Module version pub version: String, /// Whether the module is deprecated pub deprecated: bool, /// Nesting level pub level: isize, /// MIME type of the file pub mime: String, /// Release date pub date: String, /// File path within the distribution pub path: String, /// Distribution name pub distribution: String, /// POD documentation content pub pod: String, /// Module name pub name: String, /// Source lines of code pub sloc: isize, /// File statistics pub stat: Stat, /// Numeric version representation pub version_numified: f64, /// Whether the file is binary pub binary: bool, /// Unique identifier pub id: String, /// Whether this is a directory pub directory: bool, /// Whether the module is indexed pub indexed: bool, /// Whether the module is authorized pub authorized: bool, } impl TryFrom for UpstreamMetadata { type Error = crate::ProviderError; fn try_from(value: CpanModule) -> Result { let mut metadata = UpstreamMetadata::default(); metadata.insert(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(value.name), certainty: Some(Certainty::Certain), origin: None, }); metadata.insert(UpstreamDatumWithMetadata { datum: UpstreamDatum::Version(value.version), certainty: Some(Certainty::Certain), origin: None, }); metadata.insert(UpstreamDatumWithMetadata { datum: UpstreamDatum::Homepage(value.download_url.to_string()), certainty: Some(Certainty::Certain), origin: None, }); metadata.insert(UpstreamDatumWithMetadata { datum: UpstreamDatum::Download(value.download_url.to_string()), certainty: Some(Certainty::Certain), origin: None, }); metadata.insert(UpstreamDatumWithMetadata { datum: UpstreamDatum::Author(vec![crate::Person::from(value.author.as_str())]), certainty: Some(Certainty::Certain), origin: None, }); Ok(metadata) } } /// Loads CPAN module data from the MetaCPAN API pub async fn load_cpan_data(module: &str) -> Result, crate::ProviderError> { let url = format!("https://fastapi.metacpan.org/v1/release/{}", module) .parse() .unwrap(); let data = crate::load_json_url(&url, None).await?; Ok(Some(serde_json::from_value(data).unwrap())) } /// Retrieves upstream metadata for a Perl module from CPAN pub async fn remote_cpan_data(module: &str) -> Result { let data = load_cpan_data(module).await?; match data { Some(data) => data.try_into(), None => Ok(UpstreamMetadata::default()), } } #[cfg(test)] mod tests { use super::*; #[test] fn test_load_from_json() { let text = include_str!("../testdata/cpan.json"); let cpan_module: CpanModule = serde_json::from_str(text).unwrap(); assert_eq!("Parse-Pidl-0.02", cpan_module.release); } } upstream-ontologist-0.3.6/src/providers/php.rs000064400000000000000000000102431046102023000176320ustar 00000000000000use crate::{ProviderError, UpstreamDatum}; use select::document::Document; use select::predicate::{And, Name, Predicate}; /// Fetch upstream metadata for a PECL package /// /// Retrieves package information from the PECL website by scraping the package page /// and extracting homepage, repository, and bug database URLs. pub async fn guess_from_pecl_package(package: &str) -> Result, ProviderError> { let url = format!("https://pecl.php.net/packages/{}", package); let client = reqwest::Client::builder() .user_agent(crate::USER_AGENT) // PECL is slow .timeout(std::time::Duration::from_secs(15)) .build() .unwrap(); let response = client .get(url) .send() .await .map_err(|e| ProviderError::Other(e.to_string()))?; match response.status() { reqwest::StatusCode::NOT_FOUND => { return Ok(vec![]); } status if !status.is_success() => { return Err(ProviderError::Other(format!("HTTP error: {}", status))); } _ => {} } let body = response .text() .await .map_err(|e| ProviderError::Other(e.to_string()))?; guess_from_pecl_page(&body) } struct TextContains<'a>(&'a str); impl<'a> Predicate for TextContains<'a> { fn matches(&self, node: &select::node::Node) -> bool { node.text().contains(self.0) } } fn find_tags_by_text<'a>( document: &'a Document, tag_name: &'a str, text: &'a str, ) -> Vec> { document .find(And(Name(tag_name), TextContains(text))) .collect() } fn guess_from_pecl_page(body: &str) -> Result, ProviderError> { let document = Document::from(body); let mut ret = Vec::new(); let browse_source_selector = find_tags_by_text(&document, "a", "Browse Source") .into_iter() .next(); if let Some(node) = browse_source_selector { ret.push(UpstreamDatum::RepositoryBrowse( node.attr("href").unwrap().to_string(), )); } let package_bugs_selector = find_tags_by_text(&document, "a", "Package Bugs") .into_iter() .next(); if let Some(node) = package_bugs_selector { ret.push(UpstreamDatum::BugDatabase( node.attr("href").unwrap().to_string(), )); } let homepage_selector = find_tags_by_text(&document, "th", "Homepage") .into_iter() .next() .unwrap() .parent() .unwrap() .find(Name("td").descendant(Name("a"))) .next(); if let Some(node) = homepage_selector { ret.push(UpstreamDatum::Homepage( node.attr("href").unwrap().to_string(), )); } Ok(ret) } /// PECL (PHP Extension Community Library) third-party repository provider pub struct Pecl; impl Default for Pecl { fn default() -> Self { Self::new() } } impl Pecl { /// Create a new PECL provider instance pub fn new() -> Self { Self } } #[async_trait::async_trait] impl crate::ThirdPartyRepository for Pecl { fn name(&self) -> &'static str { "Pecl" } fn max_supported_certainty(&self) -> crate::Certainty { crate::Certainty::Certain } fn supported_fields(&self) -> &'static [&'static str] { &["Homepage", "Repository", "Bug-Database"] } async fn guess_metadata(&self, name: &str) -> Result, ProviderError> { guess_from_pecl_package(name).await } } #[cfg(test)] mod pecl_tests { use super::*; #[test] fn test_guess_from_pecl_page() { let text = include_str!("../testdata/pecl.html"); let ret = guess_from_pecl_page(text).unwrap(); assert_eq!( ret, vec![ UpstreamDatum::RepositoryBrowse( "https://github.com/eduardok/libsmbclient-php".to_string() ), UpstreamDatum::BugDatabase( "https://github.com/eduardok/libsmbclient-php/issues".to_string() ), UpstreamDatum::Homepage("https://github.com/eduardok/libsmbclient-php".to_string()) ] ); } } upstream-ontologist-0.3.6/src/providers/pubspec.rs000064400000000000000000000053101046102023000205030ustar 00000000000000use crate::{Certainty, GuesserSettings, ProviderError, UpstreamDatum, UpstreamDatumWithMetadata}; use std::fs::File; use std::path::Path; #[derive(serde::Deserialize)] struct Pubspec { name: Option, description: Option, version: Option, homepage: Option, repository: Option, documentation: Option, issue_tracker: Option, } /// Extracts upstream metadata from Dart/Flutter pubspec.yaml file pub fn guess_from_pubspec_yaml( path: &Path, _settings: &GuesserSettings, ) -> std::result::Result, ProviderError> { let file = File::open(path)?; let pubspec: Pubspec = serde_yaml::from_reader(file).map_err(|e| ProviderError::ParseError(e.to_string()))?; let mut upstream_data: Vec = Vec::new(); if let Some(name) = pubspec.name { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(name), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(description) = pubspec.description { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Description(description), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(version) = pubspec.version { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Version(version), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(homepage) = pubspec.homepage { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Homepage(homepage), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(repository) = pubspec.repository { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(repository), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(documentation) = pubspec.documentation { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Documentation(documentation), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(issue_tracker) = pubspec.issue_tracker { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::BugDatabase(issue_tracker), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } Ok(upstream_data) } upstream-ontologist-0.3.6/src/providers/python.rs000064400000000000000000001444551046102023000204010ustar 00000000000000use crate::{ vcs, Certainty, GuesserSettings, Origin, Person, ProviderError, UpstreamDatum, UpstreamDatumWithMetadata, UpstreamMetadata, }; use log::{debug, warn}; use serde::Deserialize; #[cfg(feature = "pyo3")] use pyo3::prelude::*; use std::collections::HashMap; use std::path::Path; #[cfg(feature = "python-pkginfo")] /// Extracts upstream metadata from Python PKG-INFO file pub async fn guess_from_pkg_info( path: &Path, _settings: &GuesserSettings, ) -> std::result::Result, ProviderError> { let contents = std::fs::read(path)?; let dist = python_pkginfo::Metadata::parse(contents.as_slice()).map_err(|e| { ProviderError::ParseError(format!("Failed to parse python package metadata: {}", e)) })?; let mut ret = vec![]; ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(dist.name), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Version(dist.version), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); if let Some(homepage) = dist.home_page { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Homepage(homepage), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(summary) = dist.summary { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Summary(summary), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(description) = dist.description { ret.extend(parse_python_long_description( description.as_str(), dist.description_content_type.as_deref(), &Origin::Path(path.to_path_buf()), )?); } ret.extend(parse_python_project_urls( dist.project_urls .iter() .map(|k| k.split_once(", ").unwrap()) .map(|(k, v)| (k.to_string(), v.to_string())), &Origin::Path(path.to_path_buf()), )); if dist.author.is_some() || dist.author_email.is_some() { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Author(vec![Person { name: dist.author, email: dist.author_email, url: None, }]), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if dist.maintainer.is_some() || dist.maintainer_email.is_some() { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Maintainer(Person { name: dist.maintainer, email: dist.maintainer_email, url: None, }), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(license) = dist.license { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::License(license), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(keywords) = dist.keywords { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Keywords(keywords.split(", ").map(|s| s.to_string()).collect()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(download_url) = dist.download_url { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Download(download_url), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } Ok(ret) } /// Extracts upstream metadata from Python pyproject.toml file #[cfg(feature = "pyproject-toml")] pub fn guess_from_pyproject_toml( path: &Path, _settings: &GuesserSettings, ) -> std::result::Result, ProviderError> { let content = std::fs::read_to_string(path)?; let mut ret = Vec::new(); use serde::{Deserialize, Serialize}; #[derive(Serialize, Deserialize, Debug, Clone)] pub struct PyProjectToml { #[serde(flatten)] inner: pyproject_toml::PyProjectToml, tool: Option, } #[derive(Serialize, Deserialize, Debug, Clone)] #[serde(rename_all = "kebab-case")] pub struct Tool { poetry: Option, } #[derive(Serialize, Deserialize, Debug, Clone)] #[serde(rename_all = "kebab-case")] pub struct ToolPoetry { version: Option, description: Option, license: Option, repository: Option, name: String, urls: Option>, keywords: Option>, authors: Option>, homepage: Option, documentation: Option, } let pyproject: PyProjectToml = toml::from_str(content.as_str()).map_err(|e| ProviderError::ParseError(e.to_string()))?; if let Some(inner_project) = pyproject.inner.project { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(inner_project.name), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); if let Some(version) = inner_project.version { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Version(version.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(pyproject_toml::License::Spdx(license)) = inner_project.license.as_ref() { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::License(license.clone()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } fn contact_to_person(contact: &pyproject_toml::Contact) -> Person { Person { name: contact.name().map(|s| s.to_string()), email: contact.email().map(|s| s.to_string()), url: None, } } if let Some(authors) = inner_project.authors { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Author(authors.iter().map(contact_to_person).collect()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(maintainers) = inner_project.maintainers { let maintainers: Vec<_> = maintainers.iter().map(contact_to_person).collect(); let certainty = if maintainers.len() == 1 { Certainty::Certain } else { Certainty::Possible }; ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Maintainer(maintainers[0].clone()), certainty: Some(certainty), origin: Some(path.into()), }); } if let Some(keywords) = inner_project.keywords { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Keywords(keywords), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(urls) = inner_project.urls { ret.extend(parse_python_project_urls( urls.into_iter(), &Origin::Path(path.to_path_buf()), )); } if let Some(classifiers) = inner_project.classifiers { ret.extend(parse_python_classifiers( classifiers.iter().map(|s| s.as_str()), &Origin::Path(path.to_path_buf()), )); } } if let Some(tool) = pyproject.tool { if let Some(poetry) = tool.poetry { if let Some(version) = poetry.version { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Version(version), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(description) = poetry.description { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Summary(description), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(license) = poetry.license { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::License(license), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(repository) = poetry.repository { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(repository), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(poetry.name.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); if let Some(urls) = poetry.urls { ret.extend(parse_python_project_urls( urls.into_iter(), &Origin::Path(path.to_path_buf()), )); } if let Some(keywords) = poetry.keywords { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Keywords(keywords), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(authors) = poetry.authors { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Author( authors.iter().map(|p| Person::from(p.as_str())).collect(), ), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(homepage) = poetry.homepage { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Homepage(homepage), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(documentation) = poetry.documentation { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Documentation(documentation), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } } Ok(ret) } fn parse_python_project_urls( urls: impl Iterator, origin: &Origin, ) -> Vec { let mut ret = Vec::new(); for (url_type, url) in urls { match url_type.as_str() { "GitHub" | "Repository" | "Source Code" | "Source" => { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(url.to_string()), certainty: Some(Certainty::Certain), origin: Some(origin.clone()), }); } "Bug Tracker" | "Bug Reports" => { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::BugDatabase(url.to_string()), certainty: Some(Certainty::Certain), origin: Some(origin.clone()), }); } "Documentation" => { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Documentation(url.to_string()), certainty: Some(Certainty::Certain), origin: Some(origin.clone()), }); } "Funding" => { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Funding(url.to_string()), certainty: Some(Certainty::Certain), origin: Some(origin.clone()), }); } "Homepage" => { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Homepage(url.to_string()), certainty: Some(Certainty::Certain), origin: Some(origin.clone()), }); } _u => { debug!("Unknown Python project URL type: {}", url_type); } } } ret } fn parse_python_long_description( long_description: &str, content_type: Option<&str>, origin: &Origin, ) -> std::result::Result, ProviderError> { if long_description.is_empty() { return Ok(vec![]); } let content_type = content_type.unwrap_or("text/plain"); let mut content_type = content_type.split(';').next().unwrap(); if long_description.contains("-*-restructuredtext-*-") { content_type = "text/restructured-text"; } let mut ret = vec![]; match content_type { "text/plain" => { let lines = long_description.split('\n').collect::>(); if lines.len() > 30 { debug!("Long description is too long ({} lines)", lines.len()); return Ok(vec![]); } ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Description(long_description.to_string()), certainty: Some(Certainty::Possible), origin: Some(origin.clone()), }); } "text/restructured-text" | "text/x-rst" => { let (description, extra_md) = crate::readme::description_from_readme_rst(long_description) .map_err(|e| ProviderError::Other(e.to_string()))?; if let Some(description) = description { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Description(description), certainty: Some(Certainty::Possible), origin: Some(Origin::Other( "python long description (restructuredText)".to_string(), )), }); } ret.extend(extra_md); } "text/markdown" => { let (description, extra_md) = crate::readme::description_from_readme_md(long_description) .map_err(|e| ProviderError::Other(e.to_string()))?; if let Some(description) = description { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Description(description), certainty: Some(Certainty::Possible), origin: Some(Origin::Other( "python long description (markdown)".to_string(), )), }); } ret.extend(extra_md); } _ => { warn!("Unknown content type: {}", content_type); } } Ok(ret) } /// Parses a Python URL to extract upstream metadata pub async fn parse_python_url(url: &str) -> Vec { let repo = vcs::guess_repo_from_url(&url::Url::parse(url).unwrap(), None).await; if let Some(repo) = repo { return vec![UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(repo), certainty: Some(Certainty::Likely), origin: None, }]; } vec![UpstreamDatumWithMetadata { datum: UpstreamDatum::Homepage(url.to_string()), certainty: Some(Certainty::Likely), origin: None, }] } #[cfg(feature = "setup-cfg")] /// Extracts upstream metadata from Python setup.cfg file pub async fn guess_from_setup_cfg( path: &Path, _settings: &GuesserSettings, ) -> std::result::Result, ProviderError> { let setup_cfg = ini::Ini::load_from_file(path).map_err(|e| ProviderError::ParseError(e.to_string()))?; let metadata = match setup_cfg.section(Some("metadata")) { Some(metadata) => metadata, None => { debug!("No [metadata] section in setup.cfg"); return Ok(vec![]); } }; let origin = Origin::Path(path.to_path_buf()); let mut ret = vec![]; for (field, value) in metadata.iter() { match field { "name" => { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(value.to_string()), certainty: Some(Certainty::Certain), origin: Some(origin.clone()), }); } "version" => { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Version(value.to_string()), certainty: Some(Certainty::Certain), origin: Some(origin.clone()), }); } "url" => { ret.extend(parse_python_url(value).await); } "description" => { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Summary(value.to_string()), certainty: Some(Certainty::Certain), origin: Some(origin.clone()), }); } "summary" => { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Summary(value.to_string()), certainty: Some(Certainty::Certain), origin: Some(origin.clone()), }); } "long_description" => { if let Some(path) = value.strip_prefix(value) { if path.contains('/') { debug!("Ignoring long_description path: {}", path); continue; } let value = match std::fs::read_to_string(path) { Ok(value) => value, Err(e) => { debug!("Failed to read long_description file: {}", e); continue; } }; ret.extend(parse_python_long_description( &value, metadata.get("long_description_content_type"), &origin, )?); } else { ret.extend(parse_python_long_description( value, metadata.get("long_description_content_type"), &origin, )?); } } "maintainer" => { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Maintainer(Person { name: Some(value.to_string()), email: metadata .get("maintainer_email") .or_else(|| metadata.get("maintainer-email")) .map(|s| s.to_string()), url: None, }), certainty: Some(Certainty::Certain), origin: Some(origin.clone()), }); } "author" => { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Author(vec![Person { name: Some(value.to_string()), email: metadata .get("author_email") .or_else(|| metadata.get("author-email")) .map(|s| s.to_string()), url: None, }]), certainty: Some(Certainty::Certain), origin: Some(origin.clone()), }); } "project_urls" => { let urls = value.split('\n').filter_map(|s| { if s.is_empty() { return None; } let (key, value) = match s.split_once('=') { Some((key, value)) => (key, value), None => { debug!("Invalid project_urls line: {}", s); return None; } }; Some((key.to_string(), value.to_string())) }); ret.extend(parse_python_project_urls(urls, &origin)); } "license" => { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::License(value.to_string()), certainty: Some(Certainty::Certain), origin: Some(origin.clone()), }); } "home-page" => { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Homepage(value.to_string()), certainty: Some(Certainty::Certain), origin: Some(origin.clone()), }); } "long_description_content_type" | "maintainer_email" | "author_email" | "maintainer-email" | "author-email" => { // Ignore these, they are handled elsewhere } _ => { warn!("Unknown setup.cfg field: {}", field); } } } Ok(ret) } #[cfg(feature = "pyo3")] async fn guess_from_setup_py_executed( path: &Path, ) -> std::result::Result, ProviderError> { // Ensure only one thread can run this function at a time static SETUP_PY_LOCK: tokio::sync::Mutex<()> = tokio::sync::Mutex::const_new(()); let _guard = SETUP_PY_LOCK.lock().await; let mut ret = Vec::new(); // Import setuptools, just in case it replaces distutils // use pyo3::types::PyDict; Python::initialize(); let mut long_description = None; let mut urls: Vec = vec![]; Python::attach(|py| { let _ = py.import("setuptools"); let run_setup = py.import("distutils.core")?.getattr("run_setup")?; let os = py.import("os")?; let orig = match os.getattr("getcwd")?.call0() { Ok(orig) => Some(orig.extract::()?), Err(e) => { debug!("Failed to get current directory: {}", e); None } }; let parent = path.parent().unwrap(); os.getattr("chdir")?.call1((parent,))?; let result = || -> PyResult<_> { let kwargs = PyDict::new(py); kwargs.set_item("stop_after", "config")?; run_setup.call((path,), Some(&kwargs)) }(); if let Some(orig) = orig { os.getattr("chdir")?.call1((orig,))?; } let result = result?; if let Some(name) = result.call_method0("get_name")?.extract()? { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(name), certainty: Some(Certainty::Certain), origin: None, }); } if let Some(version) = result.call_method0("get_version")?.extract()? { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Version(version), certainty: Some(Certainty::Certain), origin: None, }); } if let Some(url) = result .call_method0("get_url")? .extract::>()? { urls.push(url); } if let Some(download_url) = result.call_method0("get_download_url")?.extract()? { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Download(download_url), certainty: Some(Certainty::Certain), origin: None, }); } if let Some(license) = result.call_method0("get_license")?.extract()? { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::License(license), certainty: Some(Certainty::Likely), origin: Some(Origin::Path(path.to_path_buf())), }); } if let Some(contact) = result.call_method0("get_contact")?.extract()? { let contact: String = match result .call_method0("get_contact_email")? .extract::>()? { Some(email) => format!("{} <{}>", contact, email), None => contact, }; ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Contact(contact), certainty: Some(Certainty::Certain), origin: Some(Origin::Path(path.to_path_buf())), }); } if let Some(description) = result.call_method0("get_description")?.extract()? { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Summary(description), certainty: Some(Certainty::Certain), origin: Some(Origin::Path(path.to_path_buf())), }); } if let Some(description) = result .call_method0("get_long_description")? .extract::>()? { let content_type = match result.getattr("long_description_content_type") { Ok(content_type) => content_type.extract::>(), Err(e) if e.is_instance_of::(py) => Ok(None), Err(e) => return Err(e), }?; long_description = Some((description, content_type)); } if let Ok(metadata) = result.getattr("metadata") { if let Ok(project_urls) = metadata.getattr("project_urls") { ret.extend(parse_python_project_urls( project_urls .extract::>()? .into_iter(), &Origin::Path(path.to_path_buf()), )); } } Ok::<(), PyErr>(()) }) .map_err(|e| { warn!("Failed to run setup.py: {}", e); ProviderError::Other(e.to_string()) })?; if let Some((long_description, long_description_content_type)) = long_description { ret.extend(parse_python_long_description( long_description.as_str(), long_description_content_type.as_deref(), &Origin::Path(path.to_path_buf()), )?); } for url in urls { ret.extend(parse_python_url(&url).await); } Ok(ret) } #[cfg(feature = "pyo3")] /// Extracts upstream metadata from Python setup.py file pub async fn guess_from_setup_py( path: &Path, trust_package: bool, ) -> std::result::Result, ProviderError> { if trust_package { guess_from_setup_py_executed(path).await } else { guess_from_setup_py_parsed(path).await } } #[cfg(feature = "pyo3")] async fn guess_from_setup_py_parsed( path: &Path, ) -> std::result::Result, ProviderError> { Python::initialize(); let code = match std::fs::read_to_string(path) { Ok(setup_text) => setup_text, Err(e) => { warn!("Failed to read setup.py: {}", e); return Err(ProviderError::IoError(e)); } }; let mut long_description = None; let mut ret = Vec::new(); let mut urls: Vec = vec![]; Python::attach(|py| { let ast = py.import("ast").unwrap(); // Based on pypi.py in https://github.com/nexB/scancode-toolkit/blob/develop/src/packagedcode/pypi.py // // Copyright (c) nexB Inc. and others. All rights reserved. // ScanCode is a trademark of nexB Inc. // SPDX-License-Identifier: Apache-2.0 let tree = ast.call_method1("parse", (code,))?; let mut setup_args: HashMap> = HashMap::new(); let ast_expr = ast.getattr("Expr").unwrap(); let ast_call = ast.getattr("Call").unwrap(); let ast_assign = ast.getattr("Assign").unwrap(); let ast_name = ast.getattr("Name").unwrap(); for statement in tree.getattr("body")?.try_iter()? { let statement = statement?; // We only care about function calls or assignments to functions named // `setup` or `main` if (statement.is_instance(&ast_expr)? || statement.is_instance(&ast_call)? || statement.is_instance(&ast_assign)?) && statement.getattr("value")?.is_instance(&ast_call)? && statement .getattr("value")? .getattr("func")? .is_instance(&ast_name)? && (statement.getattr("value")?.getattr("func")?.getattr("id")?.extract::()? == "setup" || // we also look for main as sometimes this is used instead of // setup() statement.getattr("value")?.getattr("func")?.getattr("id")?.extract::()? == "main") { let value = statement.getattr("value")?; // Process the arguments to the setup function for kw in value.getattr("keywords")?.try_iter()? { let kw = kw?; let arg_name = kw.getattr("arg")?.extract::()?; setup_args.insert(arg_name, kw.getattr("value")?.unbind()); } } } // End code from https://github.com/nexB/scancode-toolkit/blob/develop/src/packagedcode/pypi.py let ast_str = ast.getattr("Str").unwrap(); let ast_constant = ast.getattr("Constant").unwrap(); let get_str_from_expr = |expr: &Bound| -> Option { if expr.is_instance(&ast_str).ok()? { Some(expr.getattr("s").ok()?.extract::().ok()?) } else if expr.is_instance(&ast_constant).ok()? { Some(expr.getattr("value").ok()?.extract::().ok()?) } else { None } }; let ast_list = ast.getattr("List").unwrap(); let ast_tuple = ast.getattr("Tuple").unwrap(); let ast_set = ast.getattr("Set").unwrap(); let get_str_list_from_expr = |expr: &Bound| -> Option> { // We collect the elements of a list if the element // and tag function calls if expr.is_instance(&ast_list).ok()? || expr.is_instance(&ast_tuple).ok()? || expr.is_instance(&ast_set).ok()? { let mut ret = Vec::new(); for elt in expr.getattr("elts").ok()?.try_iter().ok()? { let elt = elt.ok()?; if let Some(value) = get_str_from_expr(&elt) { ret.push(value); } else { return None; } } Some(ret) } else { None } }; let ast = py.import("ast").unwrap(); let ast_dict = ast.getattr("Dict").unwrap(); let get_dict_from_expr = |expr: &Bound| -> Option> { if expr.is_instance(&ast_dict).ok()? { let mut ret = HashMap::new(); let keys = expr.getattr("keys").ok()?; let values = expr.getattr("values").ok()?; for (key, value) in keys.try_iter().ok()?.zip(values.try_iter().ok()?) { if let Some(key) = get_str_from_expr(&key.ok()?) { if let Some(value) = get_str_from_expr(&value.ok()?) { ret.insert(key, value); } else { return None; } } else { return None; } } Some(ret) } else { None } }; // TODO: what if kw.value is an expression like a call to // version=get_version or version__version__ for (key, value) in setup_args.iter() { let value = value.bind(py); match key.as_str() { "name" => { if let Some(name) = get_str_from_expr(value) { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(name), certainty: Some(Certainty::Certain), origin: Some(path.into()) }); } } "version" => { if let Some(version) = get_str_from_expr(value) { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Version(version), certainty: Some(Certainty::Certain), origin: Some(path.into()) }); } } "description" => { if let Some(description) = get_str_from_expr(value) { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Summary(description), certainty: Some(Certainty::Certain), origin: Some(path.into()) }); } } "long_description" => { if let Some(description) = get_str_from_expr(value) { let content_type = setup_args.get("long_description_content_type"); let content_type = if let Some(content_type) = content_type { get_str_from_expr(content_type.bind(py)) } else { None }; long_description = Some((description, content_type)); } } "license" => { if let Some(license) = get_str_from_expr(value) { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::License(license), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } "download_url" => { if let Some(download_url) = get_str_from_expr(value) { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Download(download_url), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } "url" => { if let Some(url) = get_str_from_expr(value) { urls.push(url.clone()); } } "project_urls" => { if let Some(project_urls) = get_dict_from_expr(value) { ret.extend(parse_python_project_urls(project_urls.into_iter(), &Origin::Path(path.into()))); } } "maintainer" => { if let Some(maintainer) = get_str_from_expr(value) { let maintainer_email = setup_args.get("maintainer_email"); let maintainer_email = if let Some(maintainer_email) = maintainer_email { get_str_from_expr(maintainer_email.bind(py)) } else { None }; ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Maintainer(Person { name: Some(maintainer), email: maintainer_email, url: None }), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } "author" => { if let Some(author) = get_str_from_expr(value) { let author_email = setup_args.get("author_email"); let author_email = if let Some(author_email) = author_email { get_str_from_expr(author_email.bind(py)) } else { None }; ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Author(vec![Person { name: Some(author), email: author_email, url: None }]), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } else if let Some(author) = get_str_list_from_expr(value) { let author_emails = setup_args.get("author_email"); let author_emails = if let Some(author_emails) = author_emails { get_str_list_from_expr(author_emails.bind(py)).map_or_else(|| vec![None; author.len()], |v| v.into_iter().map(Some).collect()) } else { vec![None; author.len()] }; let persons = author.into_iter().zip(author_emails.into_iter()).map(|(name, email)| Person { name: Some(name), email, url: None }).collect(); ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Author(persons), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } "keywords" => { if let Some(keywords) = get_str_list_from_expr(value) { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Keywords(keywords), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } "classifiers" => { if let Some(classifiers) = get_str_list_from_expr(value) { ret.extend(parse_python_classifiers(classifiers.iter().map(|s| s.as_str()), &Origin::Path(path.into()))); } } // Handled above "author_email" | "maintainer_email" => {}, // Irrelevant "rust_extensions" | "data_files" | "packages" | "package_dir" | "entry_points" => {}, // Irrelevant: dependencies t if t.ends_with("_requires") || t.ends_with("_require") => {}, _ => { warn!("Unknown key in setup.py: {}", key); } } } Ok::<(), PyErr>(()) }).map_err(|e: PyErr| { Python::attach(|py| { if e.is_instance_of::(py) { warn!("Syntax error while parsing setup.py: {}", e); ProviderError::Other(e.to_string()) } else { warn!("Failed to parse setup.py: {}", e); ProviderError::Other(e.to_string()) } }) })?; if let Some((description, content_type)) = long_description { ret.extend(parse_python_long_description( description.as_str(), content_type.as_deref(), &Origin::Path(path.into()), )?); } for url in urls { ret.extend(parse_python_url(url.as_str()).await); } Ok(ret) } fn parse_python_classifiers<'a>( classifiers: impl Iterator + 'a, origin: &'a Origin, ) -> impl Iterator + 'a { classifiers.filter_map(|classifier| { let mut parts = classifier.split(" :: "); let category = parts.next()?; let subcategory = parts.next()?; let value = parts.next()?; let certainty = Some(Certainty::Certain); let origin = Some(origin.clone()); match (category, subcategory) { ("Development Status", _) => None, ("Intended Audience", _) => None, ("License", "OSI Approved") => Some(UpstreamDatumWithMetadata { datum: UpstreamDatum::License(value.into()), certainty, origin, }), ("Natural Language", _) => None, ("Operating System", _) => None, ("Programming Language", _) => None, ("Topic", _) => None, _ => { warn!("Unknown classifier: {}", classifier); None } } }) } /// PyPI project information from the PyPI API #[derive(Deserialize)] pub struct PypiProjectInfo { /// Package author name pub author: Option, /// Package author email pub author_email: Option, /// Bug tracker URL pub bugtrack_url: Option, /// PyPI trove classifiers pub classifiers: Vec, /// Package description pub description: String, /// Content type of the description (e.g., text/markdown) pub description_content_type: Option, /// Documentation URL pub docs_url: Option, /// Package download URL pub download_url: Option, /// Download statistics pub downloads: HashMap, /// Whether the project uses dynamic metadata pub dynamic: Option, /// Project homepage URL pub home_page: Option, /// Package keywords pub keywords: Option, /// Package license pub license: Option, /// Package maintainer name pub maintainer: Option, /// Package maintainer email pub maintainer_email: Option, /// Package name pub name: String, /// PyPI package URL pub package_url: String, /// Target platform pub platform: Option, /// PyPI project URL pub project_url: String, /// Additional project URLs pub project_urls: Option>, /// Whether the package provides extras pub provides_extra: Option, /// Current release URL pub release_url: String, /// Distribution requirements pub requires_dist: Option>, /// Required Python version pub requires_python: Option, /// Package summary pub summary: String, /// Package version pub version: String, /// Whether the release is yanked pub yanked: Option, /// Reason for yanking the release pub yanked_reason: Option, } /// Cryptographic digests for a PyPI release #[derive(Deserialize)] pub struct Digests { /// MD5 hash digest pub md5: String, /// SHA256 hash digest pub sha256: String, /// BLAKE2b-256 hash digest pub blake2b_256: String, } /// Information about a specific PyPI release #[derive(Deserialize)] pub struct PypiRelease { /// Release comment text pub comment_text: String, /// Cryptographic digests for this release pub digests: Digests, /// Number of downloads pub downloads: isize, /// Release filename pub filename: String, /// Whether the release has a signature pub has_sig: bool, /// MD5 digest of the release file pub md5_digest: String, /// Package type (e.g., sdist, bdist_wheel) pub packagetype: String, /// Target Python version pub python_version: String, /// Required Python version for this release pub requires_python: Option, /// File size in bytes pub size: isize, /// Upload timestamp pub upload_time: String, /// Upload timestamp in ISO 8601 format pub upload_time_iso_8601: String, /// Download URL for this release pub url: String, /// Whether this release is yanked pub yanked: bool, /// Reason for yanking this release pub yanked_reason: Option, } /// PyPI URL information for a release artifact #[derive(Deserialize)] pub struct PypiUrl { /// Comment text for this URL pub comment_text: String, /// Cryptographic digests for this URL pub digests: Digests, /// Filename for this URL pub filename: String, /// Whether this URL has a signature pub has_sig: bool, /// Package type for this URL pub packagetype: String, /// Target Python version for this URL pub python_version: String, /// Required Python version for this URL pub requires_python: Option, /// File size in bytes for this URL pub size: isize, /// Upload timestamp for this URL pub upload_time: String, /// Upload timestamp in ISO 8601 format for this URL pub upload_time_iso_8601: String, /// The actual download URL pub url: String, /// Whether this URL release is yanked pub yanked: bool, /// Reason for yanking this URL release pub yanked_reason: Option, } /// Complete PyPI project metadata #[derive(Deserialize)] pub struct PypiProject { /// Project information pub info: PypiProjectInfo, /// Last serial number for the project pub last_serial: isize, /// All releases for this project pub releases: HashMap>, /// URLs for the current release pub urls: Vec, /// Known security vulnerabilities pub vulnerabilities: Vec, } impl TryInto for PypiProject { type Error = ProviderError; fn try_into(self) -> Result { let mut metadata = UpstreamMetadata::default(); if let Some(author) = self.info.author { metadata.insert(UpstreamDatumWithMetadata { datum: UpstreamDatum::Author(vec![Person { name: Some(author), email: self.info.author_email, url: None, }]), certainty: Some(Certainty::Certain), origin: None, }); } metadata.insert(UpstreamDatumWithMetadata { datum: UpstreamDatum::Description(self.info.description), certainty: Some(Certainty::Certain), origin: None, }); if let Some(homepage) = self.info.home_page { metadata.insert(UpstreamDatumWithMetadata { datum: UpstreamDatum::Homepage(homepage), certainty: Some(Certainty::Certain), origin: None, }); } if let Some(license) = self.info.license { metadata.insert(UpstreamDatumWithMetadata { datum: UpstreamDatum::License(license), certainty: Some(Certainty::Certain), origin: None, }); } metadata.insert(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(self.info.name), certainty: Some(Certainty::Certain), origin: None, }); if let Some(maintainer) = self.info.maintainer { metadata.insert(UpstreamDatumWithMetadata { datum: UpstreamDatum::Maintainer(Person { name: Some(maintainer), email: self.info.maintainer_email, url: None, }), certainty: Some(Certainty::Certain), origin: None, }); } metadata.insert(UpstreamDatumWithMetadata { datum: UpstreamDatum::Version(self.info.version), certainty: Some(Certainty::Certain), origin: None, }); if let Some(keywords) = self.info.keywords { metadata.insert(UpstreamDatumWithMetadata { datum: UpstreamDatum::Keywords( keywords.split(',').map(|s| s.trim().to_string()).collect(), ), certainty: Some(Certainty::Certain), origin: None, }); } if let Some(urls) = self.info.project_urls { metadata.0.extend(parse_python_project_urls( urls.into_iter(), &Origin::Other("pypi".to_string()), )); } for url_data in self.urls { if url_data.packagetype == "sdist" { metadata.insert(UpstreamDatumWithMetadata { datum: UpstreamDatum::Download(url_data.url), certainty: Some(Certainty::Certain), origin: None, }); } } metadata.insert(UpstreamDatumWithMetadata { datum: UpstreamDatum::Summary(self.info.summary), certainty: Some(Certainty::Certain), origin: None, }); Ok(metadata) } } /// Loads PyPI project data from the PyPI JSON API pub async fn load_pypi_project(name: &str) -> Result, ProviderError> { let http_url = format!("https://pypi.org/pypi/{}/json", name) .parse() .unwrap(); let data = crate::load_json_url(&http_url, None).await?; let pypi_data: PypiProject = serde_json::from_value(data).map_err(|e| crate::ProviderError::Other(e.to_string()))?; Ok(Some(pypi_data)) } /// Retrieves upstream metadata for a Python package from PyPI pub async fn remote_pypi_metadata(name: &str) -> Result { let pypi = load_pypi_project(name).await?; match pypi { Some(pypi) => pypi.try_into(), None => Ok(UpstreamMetadata::default()), } } #[cfg(test)] mod pypi_tests { use super::*; #[test] fn test_pypi_upstream_info() { let data = include_str!("../testdata/pypi.json"); let pypi_data: PypiProject = serde_json::from_str(data).unwrap(); assert_eq!(pypi_data.info.name, "merge3"); } } upstream-ontologist-0.3.6/src/providers/r.rs000064400000000000000000000242361046102023000173130ustar 00000000000000//! See use crate::{ vcs, Certainty, GuesserSettings, Person, ProviderError, UpstreamDatum, UpstreamDatumWithMetadata, }; #[cfg(feature = "r-description")] /// Parse upstream metadata from an R package DESCRIPTION file /// /// Extracts package information from an R DESCRIPTION file including name, version, /// description, maintainer, repository URLs, and other metadata fields. pub async fn guess_from_r_description( path: &std::path::Path, _settings: &GuesserSettings, ) -> std::result::Result, ProviderError> { use std::str::FromStr; let contents = std::fs::read_to_string(path)?; // TODO: Use parse_relaxed let msg = r_description::lossy::RDescription::from_str(&contents) .map_err(|e| ProviderError::ParseError(e.to_string()))?; let mut results = Vec::new(); results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(msg.name), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); if let Some(repository) = msg.repository { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Archive(repository), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(bug_reports) = msg.bug_reports { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::BugDatabase(bug_reports.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Version(msg.version.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::License(msg.license), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Summary(msg.title), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); let lines: Vec<&str> = msg.description.split_inclusive('\n').collect(); if !lines.is_empty() { let reflowed = format!("{}{}", lines[0], textwrap::dedent(&lines[1..].concat())); results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Description(reflowed), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(maintainer) = msg.maintainer { let person = Person::from(maintainer.as_str()); results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Maintainer(person), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(urls) = msg.url { if urls.len() == 1 { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Homepage(urls[0].url.to_string()), certainty: Some(Certainty::Possible), origin: Some(path.into()), }); } for entry in urls { let url = &entry.url; let label = entry.label.as_deref(); if let Some(hostname) = url.host_str() { if hostname == "bioconductor.org" { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Archive("Bioconductor".to_string()), certainty: Some(Certainty::Confident), origin: Some(path.into()), }); } if label.map(str::to_lowercase).as_deref() == Some("devel") || label.map(str::to_lowercase).as_deref() == Some("repository") { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(url.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } else if label.map(str::to_lowercase).as_deref() == Some("homepage") { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Homepage(url.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } else if let Some(repo_url) = vcs::guess_repo_from_url(url, None).await { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(repo_url), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } } } Ok(results) } #[cfg(test)] #[cfg(feature = "r-description")] mod description_tests { use super::*; #[tokio::test] async fn test_read() { let td = tempfile::tempdir().unwrap(); let path = td.path().join("DESCRIPTION"); std::fs::write( &path, r#"Package: crul Title: HTTP Client Description: A simple HTTP client, with tools for making HTTP requests, and mocking HTTP requests. The package is built on R6, and takes inspiration from Ruby's 'faraday' gem () The package name is a play on curl, the widely used command line tool for HTTP, and this package is built on top of the R package 'curl', an interface to 'libcurl' (). Version: 0.8.4 License: MIT + file LICENSE Authors@R: c( person("Scott", "Chamberlain", role = c("aut", "cre"), email = "myrmecocystus@gmail.com", comment = c(ORCID = "0000-0003-1444-9135")) ) URL: https://github.com/ropensci/crul (devel) https://ropenscilabs.github.io/http-testing-book/ (user manual) https://www.example.com/crul (homepage) BugReports: https://github.com/ropensci/crul/issues Encoding: UTF-8 Language: en-US Imports: curl (>= 3.3), R6 (>= 2.2.0), urltools (>= 1.6.0), httpcode (>= 0.2.0), jsonlite, mime Suggests: testthat, fauxpas (>= 0.1.0), webmockr (>= 0.1.0), knitr VignetteBuilder: knitr RoxygenNote: 6.1.1 X-schema.org-applicationCategory: Web X-schema.org-keywords: http, https, API, web-services, curl, download, libcurl, async, mocking, caching X-schema.org-isPartOf: https://ropensci.org NeedsCompilation: no Packaged: 2019-08-02 19:58:21 UTC; sckott Author: Scott Chamberlain [aut, cre] () Maintainer: Scott Chamberlain Repository: CRAN Date/Publication: 2019-08-02 20:30:02 UTC "#, ) .unwrap(); let ret = guess_from_r_description(&path, &GuesserSettings::default()) .await .unwrap(); assert_eq!( ret, vec![ UpstreamDatumWithMetadata { datum: UpstreamDatum::Name("crul".to_string()), certainty: Some(Certainty::Certain), origin: Some(path.clone().into()) }, UpstreamDatumWithMetadata { datum: UpstreamDatum::Archive("CRAN".to_string()), certainty: Some(Certainty::Certain), origin: Some(path.clone().into()) }, UpstreamDatumWithMetadata { datum: UpstreamDatum::BugDatabase( "https://github.com/ropensci/crul/issues".to_string() ), certainty: Some(Certainty::Certain), origin: Some(path.clone().into()), }, UpstreamDatumWithMetadata { datum: UpstreamDatum::Version("0.8.4".to_string()), certainty: Some(Certainty::Certain), origin: Some(path.clone().into()) }, UpstreamDatumWithMetadata { datum: UpstreamDatum::License("MIT + file LICENSE".to_string()), certainty: Some(Certainty::Certain), origin: Some(path.clone().into()) }, UpstreamDatumWithMetadata { datum: UpstreamDatum::Summary("HTTP Client".to_string()), certainty: Some(Certainty::Certain), origin: Some(path.clone().into()) }, UpstreamDatumWithMetadata { datum: UpstreamDatum::Description( r#"A simple HTTP client, with tools for making HTTP requests, and mocking HTTP requests. The package is built on R6, and takes inspiration from Ruby's 'faraday' gem () The package name is a play on curl, the widely used command line tool for HTTP, and this package is built on top of the R package 'curl', an interface to 'libcurl' ()."# .to_string() ), certainty: Some(Certainty::Certain), origin: Some(path.clone().into()), }, UpstreamDatumWithMetadata { datum: UpstreamDatum::Maintainer(Person { name: Some("Scott Chamberlain".to_string()), email: Some("myrmecocystus@gmail.com".to_string()), url: None }), certainty: Some(Certainty::Certain), origin: Some(path.clone().into()), }, UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository( "https://github.com/ropensci/crul".to_string() ), certainty: Some(Certainty::Certain), origin: Some(path.clone().into()), }, UpstreamDatumWithMetadata { datum: UpstreamDatum::Homepage("https://www.example.com/crul".to_string()), certainty: Some(Certainty::Certain), origin: Some(path.clone().into()) }, ] ); } } upstream-ontologist-0.3.6/src/providers/repology.rs000064400000000000000000000044141046102023000207060ustar 00000000000000use crate::UpstreamDatum; use std::collections::HashMap; #[allow(dead_code)] #[derive(serde::Deserialize)] struct Project { pub name: String, pub status: Option, pub www: Vec, pub licenses: Vec, pub summary: Option, pub downloads: Vec, } /// Fetch upstream metadata from Repology project database /// /// Retrieves package information from Repology by aggregating data from multiple /// distributions and selecting the most reliable values based on package status. pub async fn guess_from_repology( repology_project: &str, ) -> Result, crate::ProviderError> { let metadata: Vec = serde_json::from_value( if let Some(value) = crate::get_repology_metadata(repology_project, None).await { value } else { return Ok(Vec::new()); }, ) .unwrap(); let mut fields = HashMap::new(); let mut add_field = |name, value, add| { *fields .entry(name) .or_insert(HashMap::new()) .entry(value) .or_insert(0) += add; }; for entry in metadata { let score = if entry.status.as_deref() == Some("outdated") { 1 } else { 10 }; for www in entry.www { add_field("Homepage", www, score); } for license in entry.licenses { add_field("License", license, score); } if let Some(summary) = entry.summary { add_field("Summary", summary, score); } for download in entry.downloads { add_field("Download", download, score); } } Ok(fields .into_iter() .map(|(name, scores)| { ( name.to_string(), scores .into_iter() .max_by_key(|(_, score)| *score) .unwrap() .0, ) }) .map(|(f, v)| match f.as_str() { "Homepage" => UpstreamDatum::Homepage(v), "License" => UpstreamDatum::License(v), "Summary" => UpstreamDatum::Summary(v), "Download" => UpstreamDatum::Download(v), _ => unreachable!(), }) .collect()) } upstream-ontologist-0.3.6/src/providers/ruby.rs000064400000000000000000000264211046102023000200310ustar 00000000000000use crate::{ Certainty, GuesserSettings, Person, ProviderError, UpstreamDatum, UpstreamDatumWithMetadata, UpstreamMetadata, }; use log::debug; use serde::Deserialize; use std::fs::File; use std::io::{BufRead, BufReader}; use std::path::Path; /// Extracts upstream metadata from Ruby gemspec file pub async fn guess_from_gemspec( path: &Path, _settings: &GuesserSettings, ) -> std::result::Result, ProviderError> { let file = File::open(path)?; let reader = BufReader::new(file); let mut results = Vec::new(); #[derive(Debug)] enum GemValue { String(String), Array(Vec), } impl GemValue { fn as_str(&self) -> Option<&str> { match self { GemValue::String(s) => Some(s), GemValue::Array(_) => None, } } fn as_array(&self) -> Option<&Vec> { match self { GemValue::String(_) => None, GemValue::Array(a) => Some(a), } } } fn parse_value(value: &str) -> Result { let trimmed = value.trim(); if (trimmed.starts_with('"') && trimmed.ends_with('"')) || (trimmed.starts_with('\'') && trimmed.ends_with('\'')) { return Ok(GemValue::String(trimmed[1..trimmed.len() - 1].to_string())); } else if trimmed.starts_with('"') || trimmed.starts_with("'.freeze") { return Ok(GemValue::String(trimmed[1..].to_string())); } else if trimmed.starts_with('[') && trimmed.ends_with(']') { let elements = trimmed[1..trimmed.len() - 1] .split(',') .map(parse_value) .collect::, _>>()?; return Ok(GemValue::Array(elements)); } Err(format!("Could not parse value: {}", value)) } for line in reader.lines().map_while(Result::ok) { if line.starts_with('#') { continue; } if line.trim().is_empty() { continue; } if line == "Gem::Specification.new do |s|\n" || line == "end\n" { continue; } if let Some(line) = line.strip_prefix(" s.") { let (key, rawval) = match line.split_once('=') { Some((key, rawval)) => (key.trim(), rawval), _ => continue, }; let val = match parse_value(rawval.trim()) { Ok(val) => val, Err(_) => { debug!("Could not parse value: {}", rawval); continue; } }; match key { "name" => results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(val.as_str().unwrap().to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }), "version" => results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Version(val.as_str().unwrap().to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }), "homepage" => results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Homepage(val.as_str().unwrap().to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }), "summary" => results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Summary(val.as_str().unwrap().to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }), "description" => results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Description(val.as_str().unwrap().to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }), "license" => results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::License(val.as_str().unwrap().to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }), "authors" => results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Author( val.as_array() .unwrap() .iter() .map(|p| Person::from(p.as_str().unwrap())) .collect(), ), certainty: Some(Certainty::Certain), origin: Some(path.into()), }), _ => debug!("unknown field {} ({:?}) in gemspec", key, val), } } else { debug!("ignoring unparsable line in {}: {:?}", path.display(), line); } } Ok(results) } /// Ruby gem metadata URIs #[derive(Deserialize)] pub struct RubygemMetadata { /// Changelog URI pub changelog_uri: Option, /// Source code URI pub source_code_uri: Option, } /// Ruby gem dependency information #[derive(Deserialize)] pub struct RubygemDependency { /// Dependency name pub name: String, /// Version requirements pub requirements: String, } /// Ruby gem dependencies collection #[derive(Deserialize)] pub struct RubygemDependencies { /// Development dependencies pub development: Vec, /// Runtime dependencies pub runtime: Vec, } /// Complete Ruby gem metadata #[derive(Deserialize)] pub struct Rubygem { /// Gem name pub name: String, /// Total downloads pub downloads: usize, /// Version number pub version: String, /// Version creation timestamp pub version_created_at: String, /// Downloads for this version pub version_downloads: usize, /// Target platform pub platform: String, /// Authors pub authors: String, /// Gem description pub info: String, /// License information pub licenses: Vec, /// Additional metadata URIs pub metadata: RubygemMetadata, /// Whether version is yanked pub yanked: bool, /// SHA hash pub sha: String, /// Spec SHA hash pub spec_sha: String, /// Project URI pub project_uri: url::Url, /// Gem download URI pub gem_uri: url::Url, /// Homepage URI pub homepage_uri: Option, /// Wiki URI pub wiki_uri: Option, /// Documentation URI pub documentation_uri: Option, /// Mailing list URI pub mailing_list_uri: Option, /// Source code URI pub source_code_uri: Option, /// Bug tracker URI pub bug_tracker_uri: Option, /// Changelog URI pub changelog_uri: Option, /// Funding URI pub funding_uri: Option, /// Dependencies pub dependencies: RubygemDependencies, } impl TryFrom for UpstreamMetadata { type Error = ProviderError; fn try_from(gem: Rubygem) -> Result { let mut metadata = UpstreamMetadata::default(); metadata.insert(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(gem.name), certainty: Some(Certainty::Certain), origin: None, }); metadata.insert(UpstreamDatumWithMetadata { datum: UpstreamDatum::Version(gem.version), certainty: Some(Certainty::Certain), origin: None, }); metadata.insert(UpstreamDatumWithMetadata { datum: UpstreamDatum::Author(vec![Person::from(gem.authors.as_str())]), certainty: Some(Certainty::Certain), origin: None, }); metadata.insert(UpstreamDatumWithMetadata { datum: UpstreamDatum::Homepage(gem.homepage_uri.unwrap_or(gem.project_uri).to_string()), certainty: Some(Certainty::Certain), origin: None, }); if let Some(wiki_uri) = gem.wiki_uri { metadata.insert(UpstreamDatumWithMetadata { datum: UpstreamDatum::Wiki(wiki_uri.to_string()), certainty: Some(Certainty::Certain), origin: None, }); } if let Some(mailing_list_uri) = gem.mailing_list_uri { metadata.insert(UpstreamDatumWithMetadata { datum: UpstreamDatum::MailingList(mailing_list_uri.to_string()), certainty: Some(Certainty::Certain), origin: None, }); } if let Some(bug_tracker_uri) = gem.bug_tracker_uri { metadata.insert(UpstreamDatumWithMetadata { datum: UpstreamDatum::BugDatabase(bug_tracker_uri.to_string()), certainty: Some(Certainty::Certain), origin: None, }); } if let Some(funding_uri) = gem.funding_uri { metadata.insert(UpstreamDatumWithMetadata { datum: UpstreamDatum::Funding(funding_uri.to_string()), certainty: Some(Certainty::Certain), origin: None, }); } if let Some(source_code_uri) = gem.source_code_uri { metadata.insert(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(source_code_uri.to_string()), certainty: Some(Certainty::Certain), origin: None, }); } metadata.insert(UpstreamDatumWithMetadata { datum: UpstreamDatum::License(gem.licenses.join(", ")), certainty: Some(Certainty::Certain), origin: None, }); if let Some(documentation_uri) = gem.documentation_uri { metadata.insert(UpstreamDatumWithMetadata { datum: UpstreamDatum::Documentation(documentation_uri.to_string()), certainty: Some(Certainty::Certain), origin: None, }); } if let Some(changelog_uri) = gem.changelog_uri { metadata.insert(UpstreamDatumWithMetadata { datum: UpstreamDatum::Changelog(changelog_uri.to_string()), certainty: Some(Certainty::Certain), origin: None, }); } Ok(metadata) } } /// Loads Ruby gem data from the RubyGems API pub async fn load_rubygem(name: &str) -> Result, ProviderError> { let url = format!("https://rubygems.org/api/v1/gems/{}.json", name) .parse() .unwrap(); let data = crate::load_json_url(&url, None).await?; let gem: Rubygem = serde_json::from_value(data).unwrap(); Ok(Some(gem)) } /// Retrieves upstream metadata for a Ruby gem from RubyGems pub async fn remote_rubygem_metadata(name: &str) -> Result { let gem = load_rubygem(name).await?; match gem { Some(gem) => gem.try_into(), None => Ok(UpstreamMetadata::default()), } } #[cfg(test)] mod tests { #[test] fn test_parse_gem() { let gemspec = include_str!("../testdata/rubygem.json"); let gem: super::Rubygem = serde_json::from_str(gemspec).unwrap(); assert_eq!(gem.name, "bullet"); } } upstream-ontologist-0.3.6/src/providers/rust.rs000064400000000000000000000341721046102023000200470ustar 00000000000000use crate::{ Certainty, GuesserSettings, Person, ProviderError, UpstreamDatum, UpstreamDatumWithMetadata, UpstreamMetadata, }; use serde::Deserialize; use std::collections::HashMap; #[cfg(feature = "cargo")] #[derive(Deserialize)] struct CargoToml { package: Option, workspace: Option, } #[cfg(feature = "cargo")] #[derive(Deserialize)] struct CargoWorkspace { #[serde(default)] package: Option, } #[cfg(feature = "cargo")] /// Allow either specifying setting T directly or "workspace = true" pub enum DirectOrWorkspace { /// Direct value specification Direct(T), /// Workspace inheritance Workspace, } #[cfg(feature = "cargo")] impl<'de, T: serde::Deserialize<'de>> serde::Deserialize<'de> for DirectOrWorkspace { fn deserialize(deserializer: D) -> Result, D::Error> where D: serde::Deserializer<'de>, { // Assume deserializing T, but if that fails, check for a table with "workspace = true" let v: toml::value::Value = serde::Deserialize::deserialize(deserializer)?; match T::deserialize(v.clone()) { Ok(t) => Ok(DirectOrWorkspace::Direct(t)), Err(_) => { let table = v.as_table().ok_or_else(|| { serde::de::Error::custom("expected either a value or a table") })?; if table.get("workspace").and_then(|v| v.as_bool()) == Some(true) { Ok(DirectOrWorkspace::Workspace) } else { Err(serde::de::Error::custom( "expected either a value or a table", )) } } } } } #[cfg(feature = "cargo")] #[derive(Deserialize)] struct CargoPackage { name: Option, #[serde(default)] version: Option>, #[serde(default)] authors: Option>, #[serde(default)] description: Option>, #[serde(default)] homepage: Option>, #[serde(default)] repository: Option>, #[serde(default)] license: Option>, } #[cfg(feature = "cargo")] macro_rules! resolve { ($workspace:expr, $package:expr, $field:ident) => { match $package.$field { Some(DirectOrWorkspace::Direct(ref s)) => Some(s.clone()), Some(DirectOrWorkspace::Workspace) => { if let Some(DirectOrWorkspace::Direct(ref s)) = $workspace.package.as_ref().and_then(|p| p.$field.as_ref()) { Some(s.clone()) } else { None } } None => None, } }; } /// Extracts upstream metadata from Cargo.toml file #[cfg(feature = "cargo")] pub fn guess_from_cargo( path: &std::path::Path, _settings: &GuesserSettings, ) -> std::result::Result, ProviderError> { // see https://doc.rust-lang.org/cargo/reference/manifest.html let doc: CargoToml = toml::from_str(&std::fs::read_to_string(path)?) .map_err(|e| ProviderError::ParseError(e.to_string()))?; let package = match doc.package { Some(p) => p, None => { log::debug!("No package section in Cargo.toml"); return Ok(Vec::new()); } }; let workspace = match doc.workspace { Some(w) => w, None => CargoWorkspace { package: None }, }; let mut results = Vec::new(); if let Some(name) = package.name { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(name.clone()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::CargoCrate(name), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(description) = resolve!(workspace, package, description) { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Summary(description), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(homepage) = resolve!(workspace, package, homepage) { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Homepage(homepage), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(license) = resolve!(workspace, package, license) { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::License(license), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(repository) = resolve!(workspace, package, repository) { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(repository), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(version) = resolve!(workspace, package, version) { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Version(version), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(authors) = package.authors { let authors = authors.iter().map(|a| Person::from(a.as_str())).collect(); results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Author(authors), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } Ok(results) } /// Translates crate names with dashes to their canonical form on crates.io pub async fn cargo_translate_dashes( crate_name: &str, ) -> Result, crate::HTTPJSONError> { let url = format!("https://crates.io/api/v1/crates?q={}", crate_name) .parse() .unwrap(); let json: serde_json::Value = crate::load_json_url(&url, None).await?; // Navigate through the JSON response to find the crate name. if let Some(crates) = json.get("crates").and_then(|c| c.as_array()) { for krate in crates { if let Some(name) = krate.get("id").and_then(|n| n.as_str()) { return Ok(Some(name.to_string())); } } } Ok(None) } /// Crate metadata from crates.io #[derive(Deserialize)] pub struct Crate { /// Crate badges pub badges: Vec, /// Creation timestamp pub created_at: String, /// Crate description pub description: Option, /// Documentation URL pub documentation: Option, /// Total downloads pub downloads: i64, /// Homepage URL pub homepage: Option, /// Crate identifier pub id: String, /// Keywords pub keywords: Vec, /// License identifier pub license: Option, /// Various links pub links: HashMap>, /// Maximum stable version pub max_stable_version: semver::Version, /// Maximum version pub max_version: semver::Version, /// Crate name pub name: String, /// Newest version pub newest_version: semver::Version, /// Recent downloads pub recent_downloads: i64, /// Repository URL pub repository: Option, /// Last update timestamp pub updated_at: String, /// Version IDs pub versions: Option>, } /// User information from crates.io #[derive(Deserialize)] pub struct User { /// User avatar URL pub avatar: String, /// User ID pub id: i32, /// User login name pub login: String, /// User display name pub name: String, /// User profile URL pub url: String, } /// Audit action information #[derive(Deserialize)] pub struct AuditAction { /// Action type pub action: String, /// Timestamp of the action pub time: String, /// User who performed the action pub user: User, } /// Information about a specific version of a crate #[derive(Deserialize)] pub struct CrateVersion { /// Audit actions for this version pub audit_actions: Vec, /// Names of binary targets pub bin_names: Vec, /// Checksum of the crate pub checksum: String, /// Name of the crate #[serde(rename = "crate")] pub crate_: String, /// Size of the crate in bytes pub crate_size: i64, /// Creation timestamp pub created_at: String, /// Download path pub dl_path: String, /// Number of downloads pub downloads: i64, /// Feature flags pub features: HashMap>, /// Whether the crate has a library pub has_lib: bool, /// Version ID pub id: i32, /// Library links pub lib_links: Option>, /// License identifier pub license: Option, /// Various links pub links: HashMap>, /// Version number pub num: semver::Version, /// User who published this version pub published_by: Option, /// Path to README file pub readme_path: String, /// Minimum Rust version required pub rust_version: Option, /// Last update timestamp pub updated_at: String, /// Whether this version is yanked pub yanked: bool, } /// Information about a crate from crates.io #[derive(Deserialize)] pub struct CrateInfo { /// Categories the crate belongs to pub categories: Vec, #[serde(rename = "crate")] crate_: Crate, /// Keywords associated with the crate pub keywords: Vec, /// All versions of the crate pub versions: Vec, } impl TryFrom for UpstreamMetadata { type Error = crate::ProviderError; fn try_from(value: CrateInfo) -> Result { let mut ret = UpstreamMetadata::default(); ret.insert(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(value.crate_.name.to_string()), certainty: Some(Certainty::Certain), origin: None, }); if let Some(homepage) = value.crate_.homepage { ret.insert(UpstreamDatumWithMetadata { datum: UpstreamDatum::Homepage(homepage), certainty: Some(Certainty::Certain), origin: None, }); } if let Some(repository) = value.crate_.repository { ret.insert(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(repository), certainty: Some(Certainty::Certain), origin: None, }); } if let Some(description) = value.crate_.description { ret.insert(UpstreamDatumWithMetadata { datum: UpstreamDatum::Summary(description), certainty: Some(Certainty::Certain), origin: None, }); } if let Some(license) = value.crate_.license { ret.insert(UpstreamDatumWithMetadata { datum: UpstreamDatum::License(license), certainty: Some(Certainty::Certain), origin: None, }); } ret.insert(UpstreamDatumWithMetadata { datum: UpstreamDatum::Version(value.crate_.newest_version.to_string()), certainty: Some(Certainty::Certain), origin: None, }); Ok(ret) } } /// Loads crate information from crates.io API pub async fn load_crate_info(cratename: &str) -> Result, crate::ProviderError> { let http_url = format!("https://crates.io/api/v1/crates/{}", cratename); let data = crate::load_json_url(&http_url.parse().unwrap(), None).await?; Ok(Some(serde_json::from_value(data).unwrap())) } // TODO: dedupe with TryFrom implementation above fn parse_crates_io(data: &CrateInfo) -> Vec { let crate_data = &data.crate_; let mut results = Vec::new(); results.push(UpstreamDatum::Name(crate_data.name.to_string())); if let Some(homepage) = crate_data.homepage.as_ref() { results.push(UpstreamDatum::Homepage(homepage.to_string())); } if let Some(repository) = crate_data.repository.as_ref() { results.push(UpstreamDatum::Repository(repository.to_string())); } if let Some(description) = crate_data.description.as_ref() { results.push(UpstreamDatum::Summary(description.to_string())); } if let Some(license) = crate_data.license.as_ref() { results.push(UpstreamDatum::License(license.to_string())); } results.push(UpstreamDatum::Version( crate_data.newest_version.to_string(), )); results } /// Crates.io metadata provider pub struct CratesIo; impl Default for CratesIo { fn default() -> Self { Self::new() } } impl CratesIo { /// Creates a new CratesIo provider pub fn new() -> Self { Self } } #[async_trait::async_trait] impl crate::ThirdPartyRepository for CratesIo { fn name(&self) -> &'static str { "crates.io" } fn max_supported_certainty(&self) -> Certainty { Certainty::Certain } fn supported_fields(&self) -> &'static [&'static str] { &["Homepage", "Name", "Repository", "Version", "Summary"][..] } async fn guess_metadata(&self, name: &str) -> Result, ProviderError> { let data = load_crate_info(name).await?; if data.is_none() { return Ok(Vec::new()); } Ok(parse_crates_io(&data.unwrap())) } } /// Fetches upstream metadata for a crate from crates.io pub async fn remote_crate_data(name: &str) -> Result { let data = load_crate_info(name).await?; if let Some(data) = data { Ok(data.try_into()?) } else { Ok(UpstreamMetadata::default()) } } #[cfg(test)] mod crates_io_tests { use super::*; #[test] fn test_load_crate_info() { let data = include_str!("../testdata/crates.io.json"); let crate_info: CrateInfo = serde_json::from_str(data).unwrap(); assert_eq!(crate_info.crate_.name, "breezy"); } } upstream-ontologist-0.3.6/src/providers/security_md.rs000064400000000000000000000014471046102023000214000ustar 00000000000000//! use crate::{Certainty, GuesserSettings, ProviderError, UpstreamDatum, UpstreamDatumWithMetadata}; /// Extracts metadata from SECURITY.md files pub fn guess_from_security_md( name: &str, path: &std::path::Path, _settings: &GuesserSettings, ) -> Result, ProviderError> { let path = path.strip_prefix("./").unwrap_or(path); // TODO(jelmer): scan SECURITY.md for email addresses/URLs with instructions let results = vec![UpstreamDatumWithMetadata { datum: UpstreamDatum::SecurityMD(name.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }]; Ok(results) } upstream-ontologist-0.3.6/src/providers/waf.rs000064400000000000000000000026321046102023000176230ustar 00000000000000use crate::{Certainty, GuesserSettings, ProviderError, UpstreamDatum, UpstreamDatumWithMetadata}; use lazy_regex::regex; use std::fs::File; use std::io::{BufRead, BufReader}; use std::path::Path; /// Extracts metadata from Waf wscript files pub fn guess_from_wscript( path: &Path, _settings: &GuesserSettings, ) -> std::result::Result, ProviderError> { let file = File::open(path)?; let reader = BufReader::new(file); let mut results = Vec::new(); let appname_regex = regex!("APPNAME = [\'\"](.*)[\'\"]"); let version_regex = regex!("VERSION = [\'\"](.*)[\'\"]"); for line in reader.lines().map_while(Result::ok) { if let Some(captures) = appname_regex.captures(&line) { let name = captures.get(1).unwrap().as_str().to_owned(); results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(name), certainty: Some(Certainty::Confident), origin: Some(path.into()), }); } if let Some(captures) = version_regex.captures(&line) { let version = captures.get(1).unwrap().as_str().to_owned(); results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Version(version), certainty: Some(Certainty::Confident), origin: Some(path.into()), }); } } Ok(results) } upstream-ontologist-0.3.6/src/readme.rs000064400000000000000000001302031046102023000162620ustar 00000000000000use crate::{Certainty, Origin, ProviderError, UpstreamDatum, UpstreamDatumWithMetadata}; use lazy_regex::regex; use regex::Regex; use select::document::Document; use select::node::Node; use select::predicate::{And, Class, Name, Text}; use std::io::BufRead; use std::iter::Iterator; use url::Url; /// Determines if a paragraph should be skipped and extracts any metadata from it pub fn skip_paragraph(para: &str) -> (bool, Vec) { let mut ret = Vec::::new(); let re = regex!(r"(?ms)^See .* for more (details|information)\."); if re.is_match(para) { return (true, ret); } let re = regex!(r"(?ms)^See .* for instructions"); if re.is_match(para) { return (true, ret); } let re = regex!(r"(?ms)^Please refer .*\."); if re.is_match(para) { return (true, ret); } if let Some(m) = regex!(r"(?ms)^It is licensed under (.*)").captures(para) { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::License(m.get(1).unwrap().as_str().to_string()), certainty: Some(Certainty::Possible), origin: None, }); return (true, ret); } if let Some(m) = regex!(r"(?ms)^License: (.*)").captures(para) { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::License(m.get(1).unwrap().as_str().to_string()), certainty: Some(Certainty::Likely), origin: None, }); return (true, ret); } if let Some(m) = regex!(r"(?ms)^(Home page|homepage_url|Main website|Website|Homepage): (.*)").captures(para) { let mut url = m.get(2).unwrap().as_str().to_string(); if url.starts_with('<') && url.ends_with('>') { url = url[1..url.len() - 1].to_string(); } ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Homepage(url), certainty: Some(Certainty::Likely), origin: None, }); return (true, ret); } if regex!(r"(?ms)^More documentation .* at http.*").is_match(para) { return (true, ret); } if let Some(m) = regex!(r"(?ms)^Documentation (can be found|is hosted|is available) (at|on) ([^ ]+)") .captures(para) { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Documentation(m.get(3).unwrap().as_str().to_string()), certainty: Some(Certainty::Likely), origin: None, }); return (true, ret); } if let Some(m) = regex!(r"(?ms)^Documentation for (.*)\s+(can\s+be\s+found|is\s+hosted)\s+(at|on)\s+([^ ]+)") .captures(para) { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(m.get(1).unwrap().as_str().to_string()), certainty: Some(Certainty::Possible), origin: None, }); ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Documentation(m.get(4).unwrap().as_str().to_string()), certainty: Some(Certainty::Likely), origin: None, }); return (true, ret); } if regex!(r"(?ms)^Documentation[, ].*found.*(at|on).*\.").is_match(para) { return (true, ret); } if regex!(r"(?ms)^See (http.*|gopkg.in.*|github.com.*)").is_match(para) { return (true, ret); } if regex!(r"(?ms)^Available on (.*)").is_match(para) { return (true, ret); } if let Some(m) = regex!(r"(?ms)^This software is freely distributable under the (.*) license.*") .captures(para) { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::License(m.get(1).unwrap().as_str().to_string()), certainty: Some(Certainty::Likely), origin: None, }); return (true, ret); } if regex!(r"(?ms)^This .* is hosted at .*").is_match(para) { return (true, ret); } if regex!(r"(?ms)^This code has been developed by .*").is_match(para) { return (true, ret); } if para.starts_with("Download and install using:") { return (true, ret); } if regex!(r"(?ms)^Bugs should be reported by .*").is_match(para) { return (true, ret); } if let Some(m) = regex!(r"(?ms)^The bug tracker can be found at (http[^ ]+[^.])").captures(para) { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::BugDatabase(m.get(1).unwrap().as_str().to_string()), certainty: Some(Certainty::Likely), origin: None, }); return (true, ret); } if let Some(m) = regex!(r"(?ms)^Copyright (\(c\) |)(.*)").captures(para) { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Copyright(m.get(2).unwrap().as_str().to_string()), certainty: Some(Certainty::Possible), origin: None, }); return (true, ret); } if regex!(r"(?ms)^You install .*").is_match(para) { return (true, ret); } if regex!(r"(?ms)^This .* is free software; .*").is_match(para) { return (true, ret); } if let Some(m) = regex!(r"(?ms)^Please report any bugs(.*) to <(.*)>").captures(para) { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::BugDatabase(m.get(2).unwrap().as_str().to_string()), certainty: Some(Certainty::Possible), origin: None, }); return (true, ret); } if regex!(r"(?ms)^Share and Enjoy").is_match(para) { return (true, ret); } let lines = para.lines().collect::>(); if !lines.is_empty() && ["perl Makefile.PL", "make", "./configure"].contains(&lines[0].trim()) { return (true, ret); } if regex!(r"(?ms)^For further information, .*").is_match(para) { return (true, ret); } if regex!(r"(?ms)^Further information .*").is_match(para) { return (true, ret); } if let Some(m) = regex!(r"(?ms)^A detailed ChangeLog can be found.*:\s+(http.*)").captures(para) { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Changelog(m.get(1).unwrap().as_str().to_string()), certainty: Some(Certainty::Possible), origin: None, }); return (true, ret); } (false, ret) } /// Extracts description and metadata from reStructuredText README content pub fn description_from_readme_rst( long_description: &str, ) -> Result<(Option, Vec), ProviderError> { // Work around https://github.com/flying-sheep/rust-rst/issues/55 let mut fields: Vec<(&str, String)> = Vec::new(); let mut in_field = false; let long_description = long_description .lines() .filter(|line| { // Filter out field lists. Syntax is: // :field: value // with possible continuation lines that are indented. // field can contain any character except a colon followed by a space unless // it is escaped with a backslash. if line.starts_with([' ', '\t'].as_ref()) && in_field { if in_field { fields.last_mut().unwrap().1.push_str(line.trim()); return false; } return true; } else { in_field = false; } if let Some((_, field, value)) = lazy_regex::regex_captures!(r"^:([^:]+): (.*)", line) { fields.push((field, value.to_string())); in_field = true; false } else { line != &"----" } }) .collect::>() .join("\n") + "\n"; let html = rst_to_html(&long_description); let (description, mut md) = description_from_readme_html(&html)?; for (field, value) in fields { md.extend(parse_field(field, &NodeOrText::Text(&value))); } Ok((description, md)) } /// Extracts description and metadata from Markdown README content pub fn description_from_readme_md( long_description: &str, ) -> Result<(Option, Vec), ProviderError> { let parser = pulldown_cmark::Parser::new(long_description); let mut html_output = String::new(); pulldown_cmark::html::push_html(&mut html_output, parser); description_from_readme_html(&html_output) } /// Guesses upstream metadata from README files pub async fn guess_from_readme( path: &std::path::Path, _trust_package: bool, ) -> Result, ProviderError> { let mut urls: Vec = vec![]; let mut ret = vec![]; let f = std::fs::File::open(path)?; let reader = std::io::BufReader::new(f); let mut line_iter = reader.lines(); while let Some(line) = line_iter.next() { let line = line?; let line = line.trim(); let mut cmdline = line.strip_prefix('$').unwrap_or(line).trim().to_string(); if cmdline.starts_with("git clone ") || cmdline.starts_with("fossil clone ") || cmdline.starts_with("hg clone ") || cmdline.starts_with("bzr co ") || cmdline.starts_with("bzr branch ") { while cmdline.ends_with('\\') { let next_line = line_iter.next().unwrap()?; cmdline = format!("{} {}", cmdline, next_line.trim()); } if let Some(url) = crate::vcs_command::url_from_vcs_command(cmdline.as_bytes()) { urls.push(url.parse().unwrap()); } } for m in lazy_regex::regex!("[\"'`](git clone.*)[\"`']").captures_iter(line) { if let Some(url) = crate::vcs_command::url_from_git_clone_command( m.get(1).unwrap().as_str().as_bytes(), ) { urls.push(url.parse().unwrap()); } } if let Some(m) = lazy_regex::regex_find!(r"cvs.*-d\s*:pserver:.*", line) { if let Some(url) = crate::vcs_command::url_from_cvs_co_command(m.as_bytes()) { urls.push(url.parse().unwrap()); } } for m in lazy_regex::regex!("($ )?(svn co .*)").captures_iter(line) { if let Some(url) = crate::vcs_command::url_from_svn_co_command(m.get(2).unwrap().as_str().as_bytes()) { urls.push(url.parse().unwrap()); } } const PROJECT_RE: &str = "([^/]+)/([^/?.()\"#>\\s]*[^-,/?.()\"#>\\s])"; for m in regex::Regex::new(format!("https://travis-ci.org/{}", PROJECT_RE).as_str()) .unwrap() .captures_iter(line) { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(format!( "https://github.com/{}/{}", m.get(1).unwrap().as_str(), m.get(2).unwrap().as_str() )), certainty: Some(Certainty::Possible), origin: Some(path.into()), }); } for m in regex::Regex::new(format!("https://coveralls.io/r/{}", PROJECT_RE).as_str()) .unwrap() .captures_iter(line) { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(format!( "https://github.com/{}/{}", m.get(1).unwrap().as_str(), m.get(2).unwrap().as_str() )), certainty: Some(Certainty::Possible), origin: Some(path.into()), }); } for m in lazy_regex::regex!("https://github.com/([^/]+)/([^/]+)/issues").find_iter(line) { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::BugDatabase(m.as_str().to_string()), certainty: Some(Certainty::Possible), origin: Some(path.into()), }); } for m in regex::Regex::new(format!("https://github.com/{}/(.git)?", PROJECT_RE).as_str()) .unwrap() .find_iter(line) { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(m.as_str().trim_end_matches('.').to_string()), certainty: Some(Certainty::Possible), origin: Some(path.into()), }); } for m in regex::Regex::new(format!("https://github.com/{}", PROJECT_RE).as_str()) .unwrap() .captures_iter(line) { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository( m.get(0).unwrap().as_str().trim_end_matches('.').to_string(), ), certainty: Some(Certainty::Possible), origin: Some(path.into()), }); } if let Some(m) = lazy_regex::regex_find!(r"git://([^ ]+)", line) { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(m.trim_end_matches('.').to_string()), certainty: Some(Certainty::Possible), origin: Some(path.into()), }); } for m in lazy_regex::regex!("https://([^]/]+)/([^]\\s()\"#]+)").find_iter(line) { let url = m.as_str().trim_end_matches('.'); if crate::vcs::is_gitlab_site(m.as_str(), None).await { if let Some(repo_url) = crate::vcs::guess_repo_from_url(&url.parse().unwrap(), None).await { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(repo_url), certainty: Some(Certainty::Possible), origin: Some(path.into()), }); } else { log::warn!("Ignoring invalid URL {} in {}", url, path.display()); } } } } let (description, extra_metadata) = match path.extension().and_then(|s| s.to_str()) { Some("md") => { let contents = std::fs::read_to_string(path)?; description_from_readme_md(&contents) } Some("rst") => { let contents = std::fs::read_to_string(path)?; description_from_readme_rst(&contents) } None => { let contents = std::fs::read_to_string(path)?; Ok(description_from_readme_plain(&contents)?) } Some("pod") => { let contents = std::fs::read_to_string(path)?; let metadata = crate::providers::perl::guess_from_pod( &contents, &Origin::Path(path.to_path_buf()), )?; Ok((None, metadata)) } _ => Ok((None, vec![])), }?; if let Some(description) = description { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Description(description), certainty: Some(Certainty::Possible), origin: Some(path.into()), }); } ret.extend(extra_metadata); let prefer_public = |url: &url::Url| -> i32 { if url.scheme().contains("ssh") { 1 } else { 0 } }; urls.sort_by_key(prefer_public); if !urls.is_empty() { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(urls.remove(0).to_string()), certainty: Some(Certainty::Possible), origin: Some(path.into()), }); } Ok(ret) } /// Parses the first header from text to extract name, tagline, and rest pub fn parse_first_header_text(text: &str) -> (Option<&str>, Option<&str>, Option<&str>) { if let Some((_, name, version)) = lazy_regex::regex_captures!(r"^([A-Za-z]+) ([0-9.]+)$", text) { return (Some(name), None, Some(version)); } if let Some((_, name, summary)) = lazy_regex::regex_captures!(r"^([A-Za-z]+): (.+)$", text) { return (Some(name), Some(summary), None); } if let Some((_, name, summary)) = lazy_regex::regex_captures!(r"^([A-Za-z]+) - (.+)$", text) { return (Some(name), Some(summary), None); } if let Some((_, name, summary)) = lazy_regex::regex_captures!(r"^([A-Za-z]+) -- (.+)$", text) { return (Some(name), Some(summary), None); } if let Some((_, name, version)) = lazy_regex::regex_captures!(r"^([A-Za-z]+) version ([^ ]+)", text) { return (Some(name), None, Some(version)); } (None, None, None) } #[test] fn test_parse_first_header_text() { assert_eq!( parse_first_header_text("libwand 1.0"), (Some("libwand"), None, Some("1.0")) ); assert_eq!( parse_first_header_text("libwand -- A wand"), (Some("libwand"), Some("A wand"), None) ); assert_eq!( parse_first_header_text("libwand version 1.0"), (Some("libwand"), None, Some("1.0")) ); } /// Extracts description and metadata from plain text README content pub fn description_from_readme_plain( text: &str, ) -> Result<(Option, Vec), ProviderError> { let mut lines: Vec<&str> = text.split_terminator('\n').collect(); let mut metadata: Vec = Vec::new(); if lines.is_empty() { return Ok((None, Vec::new())); } if !lines[0].trim().is_empty() && lines.len() > 1 && (lines[1].is_empty() || !lines[1].chars().next().unwrap().is_alphanumeric()) { let (name, summary, version) = parse_first_header_text(lines[0]); if let Some(name) = name { metadata.push(UpstreamDatumWithMetadata { origin: None, datum: UpstreamDatum::Name(name.to_string()), certainty: Some(Certainty::Likely), }); } if let Some(version) = version { metadata.push(UpstreamDatumWithMetadata { origin: None, datum: UpstreamDatum::Version(version.to_string()), certainty: Some(Certainty::Likely), }); } if let Some(summary) = summary { metadata.push(UpstreamDatumWithMetadata { origin: None, datum: UpstreamDatum::Summary(summary.to_string()), certainty: Some(Certainty::Likely), }); } if name.is_some() || version.is_some() || summary.is_some() { lines.remove(0); } } while !lines.is_empty() && lines[0].trim().trim_matches('-').is_empty() { lines.remove(0); } let mut paras: Vec> = Vec::new(); let mut current_para: Vec<&str> = Vec::new(); for line in lines { if line.trim().is_empty() { if !current_para.is_empty() { paras.push(current_para.clone()); current_para.clear(); } } else { current_para.push(line); } } if !current_para.is_empty() { paras.push(current_para.clone()); } let mut output: Vec = Vec::new(); for para in paras { if para.is_empty() { continue; } let line = para.join("\n"); let (skip, extra_metadata) = skip_paragraph(&line); metadata.extend(extra_metadata); if skip { continue; } output.push(format!("{}\n", line)); } let description = if output.len() > 30 { None } else { while !output.is_empty() && output.last().unwrap().trim().is_empty() { output.pop(); } Some(output.join("\n")) }; Ok((description, metadata)) } fn ul_is_field_list(el: Node) -> bool { let names = ["Issues", "Home", "Documentation", "License"]; for li in el.find(Name("li")) { let text = li.text(); if let Some((_, name)) = lazy_regex::regex_captures!(r"([A-Za-z]+)\s*:.*", text.trim()) { if !names.contains(&name) { return false; } } else { return false; } } true } #[test] fn test_ul_is_field_list() { let el = Document::from( r#""#, ); let ul = el.find(Name("ul")).next().unwrap(); assert!(ul_is_field_list(ul)); let el = Document::from( r#"
  • Some other thing
"#, ); let ul = el.find(Name("ul")).next().unwrap(); assert!(!ul_is_field_list(ul)); } fn skip_paragraph_block(para: &Node) -> (bool, Vec) { let (skip, mut extra_metadata) = skip_paragraph(&render(para)); if skip { return (true, extra_metadata); } for child in para.children() { if let Some(text_node) = child.as_text() { if text_node.trim().is_empty() { continue; } } if child.name() == Some("a") { let mut name: Option = None; if let Some(first_child) = para.first_child() { if let Some(text) = first_child.as_text() { name = Some(text.to_string()); } else if first_child.name() == Some("img") { name = first_child.attr("alt").map(|s| s.to_string()); } } if let Some(name) = name { match name.as_str() { "CRAN" | "CRAN_Status_Badge" | "CRAN_Logs_Badge" => { extra_metadata.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Archive("CRAN".to_string()), certainty: Some(Certainty::Confident), origin: None, }); } "Gitter" => { if let Some(href) = child.attr("href") { let parsed_url = Url::parse(href).unwrap(); extra_metadata.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(format!( "https://github.com/{}", parsed_url.path().trim_start_matches('/') )), certainty: Some(Certainty::Confident), origin: None, }); } } "Build Status" => { if let Some(href) = child.attr("href") { let parsed_url = Url::parse(href).unwrap(); if parsed_url.host_str() == Some("travis-ci.org") { extra_metadata.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(format!( "https://github.com/{}", parsed_url.path().trim_start_matches('/') )), certainty: Some(Certainty::Confident), origin: None, }); } } } "Documentation" => { if let Some(href) = child.attr("href") { extra_metadata.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Documentation(href.to_string()), certainty: Some(Certainty::Confident), origin: None, }); } } "API Docs" => { if let Some(href) = child.attr("href") { extra_metadata.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::APIDocumentation(href.to_string()), certainty: Some(Certainty::Confident), origin: None, }); } } "Downloads" => { if let Some(href) = child.attr("href") { extra_metadata.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Download(href.to_string()), certainty: Some(Certainty::Confident), origin: None, }); } } "crates.io" => { if let Some(href) = child.attr("href") { if href.starts_with("https://crates.io/crates/") { extra_metadata.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::CargoCrate( href.rsplit('/').next().unwrap().to_string(), ), certainty: Some(Certainty::Confident), origin: None, }); } } } name => { if let Some(caps) = regex!(r"(.*) License").captures(name) { extra_metadata.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::License(caps[1].to_string()), certainty: Some(Certainty::Likely), origin: None, }); } else { log::debug!("Unhandled field {:?} in README", name); } } } } } } if render(para).is_empty() { return (true, extra_metadata); } (false, vec![]) } fn render(el: &Node) -> String { el.find(Text).map(|t| t.text()).collect::>().join("") } fn parse_first_header(el: &Node) -> Vec { let mut metadata = Vec::new(); let binding = render(el); let (name, summary, version) = parse_first_header_text(&binding); if let Some(mut name) = name { if name.to_lowercase().contains("installation") { metadata.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(name.to_string()), certainty: Some(Certainty::Possible), origin: None, }); } else { metadata.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(name.to_string()), certainty: Some(Certainty::Likely), origin: None, }); } if let Some(suffix) = name.strip_prefix("About ") { name = suffix; } metadata.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(name.to_string()), certainty: Some(Certainty::Likely), origin: None, }); } if let Some(summary) = summary { metadata.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Summary(summary.to_string()), certainty: Some(Certainty::Likely), origin: None, }); } if let Some(version) = version { metadata.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Version(version.to_string()), certainty: Some(Certainty::Likely), origin: None, }); } metadata } fn is_semi_header(el: &Node) -> bool { if el.name() != Some("p") { return false; } let text = render(el); if text == "INSTALLATION" { return true; } if text.contains('\n') { return false; } let re = Regex::new(r"([a-z-A-Z0-9]+) - ([^\.]+)").unwrap(); re.is_match(&text) } fn extract_paragraphs<'a>( children: impl Iterator>, paragraphs: &mut Vec, metadata: &mut Vec, ) { for child in children { match child.name() { Some("div") => { extract_paragraphs(child.children(), paragraphs, metadata); if !paragraphs.is_empty() && child.is(Class("section")) { break; } } Some("section") => { extract_paragraphs(child.children(), paragraphs, metadata); if !paragraphs.is_empty() { break; } } Some("p") => { if is_semi_header(&child) { if paragraphs.is_empty() { metadata.extend(parse_first_header(&child)); continue; } else { break; } } let (skip, extra_metadata) = skip_paragraph_block(&child); metadata.extend(extra_metadata); if skip { if paragraphs.is_empty() { continue; } else { break; } } let text = render(&child); if !text.trim().is_empty() { paragraphs.push(text + "\n"); } } Some("pre") => paragraphs.push(render(&child)), Some("ul") if !paragraphs.is_empty() => { if ul_is_field_list(child) { metadata.extend(parse_ul_field_list(&child)); } else { paragraphs.push( child .find(Name("li")) .map(|li| format!("* {}\n", render(&li))) .collect::>() .join(""), ); } } Some(h) if h.starts_with("h") => { if paragraphs.is_empty() { if !["About", "Introduction", "Overview", "Documentation"] .contains(&render(&child).trim()) { metadata.extend(parse_first_header(&child)); } } else { break; } } None => {} _ => { log::debug!("Unhandled element in README: {:?}", child.name()); } } } } fn parse_field(name: &str, body: &NodeOrText) -> Vec { let mut metadata = Vec::new(); let get_link = || -> Option { match body { NodeOrText::Node(body) => { if let Some(a) = body.find(Name("a")).next() { Some(a.attr("href").unwrap().to_string()) } else if body.is(Name("a")) { Some(body.attr("href").unwrap().to_string()) } else { body.as_text() .filter(|u| Url::parse(u).is_ok()) .map(|text| text.to_string()) } } NodeOrText::Text(text) => { if let Ok(url) = Url::parse(text) { Some(url.to_string()) } else { None } } } }; match name { "Homepage" | "Home" => { if let Some(link) = get_link() { metadata.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Homepage(link), certainty: Some(Certainty::Confident), origin: None, }); } } "Issues" => { if let Some(link) = get_link() { metadata.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::BugDatabase(link), certainty: Some(Certainty::Confident), origin: None, }); } } "Documentation" => { if let Some(link) = get_link() { metadata.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Documentation(link), certainty: Some(Certainty::Confident), origin: None, }); } } "License" => { metadata.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::License(match body { NodeOrText::Node(body) => render(body), NodeOrText::Text(text) => text.to_string(), }), certainty: Some(Certainty::Confident), origin: None, }); } _ => { log::debug!("Unhandled field {:?} in README", name); } } metadata } enum NodeOrText<'a> { Node(Node<'a>), Text(&'a str), } impl<'a> From> for NodeOrText<'a> { fn from(node: Node<'a>) -> Self { if let Some(text) = node.as_text() { NodeOrText::Text(text) } else { NodeOrText::Node(node) } } } impl<'a> From<&'a str> for NodeOrText<'a> { fn from(text: &'a str) -> Self { NodeOrText::Text(text) } } /// Extracts a list of fields from a `ul` element. /// /// # Arguments /// * `el` - The `ul` element to extract fields from. /// /// # Returns /// A list of fields extracted from the `ul` element. fn iter_ul_field_list<'a>(el: &'a Node<'a>) -> Vec<(&'a str, NodeOrText<'a>)> { el.find(Name("li")) .filter_map(|li| { let children: Vec<_> = li.children().collect(); if children.len() == 2 && children[0].is(Text) { let name = children[0].as_text().unwrap().trim().trim_end_matches(':'); return Some((name, children[1].into())); } else if children.len() == 1 { let (name, value) = children[0].as_text().unwrap().split_once(':')?; return Some(( name.trim(), NodeOrText::Text(value.trim().trim_start_matches(':')), )); } None }) .collect() } /// Parses a list of fields from a `ul` element. /// /// # Arguments /// * `el` - The `ul` element to parse. /// /// # Returns /// A list of metadata extracted from the `ul` element. fn parse_ul_field_list(el: &Node) -> Vec { let mut metadata = Vec::new(); for (name, el_ref) in iter_ul_field_list(el) { metadata.extend(parse_field(name, &el_ref)); } metadata } fn description_from_basic_soup( soup: &Document, ) -> (Option, Vec) { let mut metadata = Vec::new(); let body = soup .find(Name("body")) .next() .expect("No body element found in HTML document"); let mut child_iter = body.children().peekable(); // Drop any headers while let Some(el) = child_iter.peek() { if el.name().map(|h| h.starts_with("h")).unwrap_or(false) { metadata.extend(parse_first_header(el)); child_iter.next(); } else if el.is(Text) { child_iter.next(); continue; } else { break; } } if let Some(table) = soup.find(And(Name("table"), Class("field-list"))).next() { metadata.extend(parse_ul_field_list(&table)); } let mut paragraphs: Vec = Vec::new(); extract_paragraphs(child_iter, &mut paragraphs, &mut metadata); if paragraphs.is_empty() { log::debug!("Empty description; no paragraphs."); return (None, metadata); } if paragraphs.len() < 6 { return (Some(paragraphs.join("\n")), metadata); } log::debug!( "Not returning description, number of paragraphs too high: {}", paragraphs.len() ); (None, metadata) } /// Extracts description and metadata from HTML README content pub fn description_from_readme_html( html_text: &str, ) -> Result<(Option, Vec), ProviderError> { let soup = Document::from(html_text); Ok(description_from_basic_soup(&soup)) } fn rst_to_html(rst_text: &str) -> String { use rst_parser::parse; use rst_renderer::render_html; let document = parse(rst_text).unwrap(); let mut output = Vec::new(); render_html(&document, &mut std::io::Cursor::new(&mut output), true).unwrap(); String::from_utf8(output).unwrap() } #[cfg(test)] mod tests { use super::*; #[test] fn test_rst_to_html() { let rst = r#".. _`rst`: RST === This is a test of RST to HTML conversion."#; let html = rst_to_html(rst); assert_eq!( html, "\n\n\n\n\n\n\n\n\n\n\n
\n

RST

\n

This is a test of RST to HTML conversion.

\n
\n\n\n" ); } #[test] fn test_parse_first_header_text() { assert_eq!( super::parse_first_header_text("libwand 1.0"), (Some("libwand"), None, Some("1.0")) ); assert_eq!( super::parse_first_header_text("libwand -- A wand"), (Some("libwand"), Some("A wand"), None) ); assert_eq!( super::parse_first_header_text("libwand version 1.0"), (Some("libwand"), None, Some("1.0")) ); } #[test] fn test_parse_field() { assert_eq!( super::parse_field( "Homepage", &root(&Document::from( r#"example"# )) .into() ), vec![super::UpstreamDatumWithMetadata { datum: super::UpstreamDatum::Homepage("https://example.com".to_string()), certainty: Some(super::Certainty::Confident), origin: None, }] ); assert_eq!( super::parse_field( "Issues", &root(&Document::from( r#"example"# )) .into(), ), vec![super::UpstreamDatumWithMetadata { datum: super::UpstreamDatum::BugDatabase("https://example.com".to_string()), certainty: Some(super::Certainty::Confident), origin: None, }] ); assert_eq!( super::parse_field( "Documentation", &root(&Document::from( r#"example"# )) .into() ), vec![super::UpstreamDatumWithMetadata { datum: super::UpstreamDatum::Documentation("https://example.com".to_string()), certainty: Some(super::Certainty::Confident), origin: None, }] ); assert_eq!( super::parse_field("License", &"MIT".into()), vec![super::UpstreamDatumWithMetadata { datum: super::UpstreamDatum::License("MIT".to_string()), certainty: Some(super::Certainty::Confident), origin: None, }] ); } struct Root; impl select::predicate::Predicate for Root { fn matches(&self, node: &Node) -> bool { node.parent().is_none() } } fn root(doc: &Document) -> Node<'_> { let root = doc.find(Root).next().unwrap(); assert_eq!(root.name(), Some("html")); root.find(Name("body")) .next() .unwrap() .first_child() .unwrap() } #[test] fn test_is_semi_header() { let fragment = Document::from("

INSTALLATION

"); assert!(root(&fragment).name() == Some("p")); assert!(super::is_semi_header(&root(&fragment))); let fragment = Document::from("

Some other thing

"); assert!(!super::is_semi_header(&root(&fragment))); } #[test] fn test_iter_ul_field_list() { let fragment = Document::from( r#""#, ); assert_eq!(Some("ul"), root(&fragment).name()); assert_eq!( super::iter_ul_field_list(&root(&fragment)) .iter() .map(|(name, _)| name) .collect::>(), vec![&"Issues", &"Home"] ); } #[test] fn test_parse_ul_field_list() { let fragment = Document::from( r#""#, ); assert_eq!( super::parse_ul_field_list(&root(&fragment)), vec![ super::UpstreamDatumWithMetadata { datum: super::UpstreamDatum::BugDatabase( "https://example.com/issues".to_string() ), certainty: Some(super::Certainty::Confident), origin: None, }, super::UpstreamDatumWithMetadata { datum: super::UpstreamDatum::Homepage("https://example.com".to_string()), certainty: Some(super::Certainty::Confident), origin: None, }, super::UpstreamDatumWithMetadata { datum: super::UpstreamDatum::Documentation( "https://example.com/docs".to_string() ), certainty: Some(super::Certainty::Confident), origin: None, }, super::UpstreamDatumWithMetadata { datum: super::UpstreamDatum::License("MIT".to_string()), certainty: Some(super::Certainty::Confident), origin: None, } ] ); } #[test] fn test_render() { let fragment = Document::from("

Some text

"); assert_eq!(super::render(&root(&fragment)), "Some text"); let fragment = Document::from("

Some bold text

"); assert_eq!(super::render(&root(&fragment)), "Some bold text"); } #[test] fn test_extract_paragraphs() { let fragment = Document::from( r#"

Some text

Some more text

"#, ); let mut paragraphs = Vec::new(); super::extract_paragraphs(root(&fragment).children(), &mut paragraphs, &mut vec![]); assert_eq!(paragraphs, vec!["Some text\n", "Some more text\n"]); } #[test] fn test_swh() { let document = Document::from(include_str!("testdata/swh.html")); let (description, metadata) = super::description_from_basic_soup(&document); assert_eq!( description, Some( r#"The Software Heritage Git Loader is a tool and a library to walk a local Git repository and inject into the SWH dataset all contained files that weren't known before. The main entry points are: * :class:swh.loader.git.loader.GitLoader for the main loader which can ingest either local or remote git repository's contents. This is the main implementation deployed in production. * :class:swh.loader.git.from_disk.GitLoaderFromDisk which ingests only local git clone repository. * :class:swh.loader.git.loader.GitLoaderFromArchive which ingests a git repository wrapped in an archive. "# .to_string() ) ); assert_eq!(metadata, vec![]); } } upstream-ontologist-0.3.6/src/repology.rs000064400000000000000000000064131046102023000166720ustar 00000000000000use crate::{ProviderError, UpstreamMetadata}; /// Parse a Repology name into family and name pub fn parse_repology_name(name: &str) -> Option<(&str, &str)> { let (family, name) = name.split_once(':')?; Some((family, name)) } fn perl_name_to_module(name: &str) -> String { name.split('-') .map(|x| { let mut x = x.chars(); x.next() .unwrap() .to_uppercase() .chain(x) .collect::() }) .collect::>() .join("::") } /// Find upstream metadata for a package from Repology pub async fn find_upstream_from_repology(name: &str) -> Result { let (family, name) = parse_repology_name(name) .ok_or_else(|| ProviderError::Other("Invalid repology name".to_string()))?; match family { "python" => crate::providers::python::remote_pypi_metadata(name).await, "go" => crate::providers::go::remote_go_metadata(name), "ruby" => crate::providers::ruby::remote_rubygem_metadata(name).await, "node" => crate::providers::node::remote_npm_metadata(name).await, "perl" => crate::providers::perl::remote_cpan_data(&perl_name_to_module(name)).await, "rust" => crate::providers::rust::remote_crate_data(name).await, "haskell" => crate::providers::haskell::remote_hackage_data(name).await, "apmod" => Ok(UpstreamMetadata::new()), "coq" => Ok(UpstreamMetadata::new()), "cursors" => Ok(UpstreamMetadata::new()), "deadbeef" => Ok(UpstreamMetadata::new()), "emacs" => Ok(UpstreamMetadata::new()), "erlang" => Ok(UpstreamMetadata::new()), "fonts" => Ok(UpstreamMetadata::new()), "fortunes" => Ok(UpstreamMetadata::new()), "fusefs" => Ok(UpstreamMetadata::new()), "gimp" => Ok(UpstreamMetadata::new()), "gstreamer" => Ok(UpstreamMetadata::new()), "gtktheme" => Ok(UpstreamMetadata::new()), "raku" => Ok(UpstreamMetadata::new()), "ros" => Ok(UpstreamMetadata::new()), "haxe" => Ok(UpstreamMetadata::new()), "icons" => Ok(UpstreamMetadata::new()), "java" => Ok(UpstreamMetadata::new()), "js" => Ok(UpstreamMetadata::new()), "julia" => Ok(UpstreamMetadata::new()), "ladspa" => Ok(UpstreamMetadata::new()), "lisp" => Ok(UpstreamMetadata::new()), "lua" => Ok(UpstreamMetadata::new()), "lv2" => Ok(UpstreamMetadata::new()), "mingw" => Ok(UpstreamMetadata::new()), "nextcloud" => Ok(UpstreamMetadata::new()), "nginx" => Ok(UpstreamMetadata::new()), "nim" => Ok(UpstreamMetadata::new()), "ocaml" => Ok(UpstreamMetadata::new()), "opencpn" => Ok(UpstreamMetadata::new()), "rhythmbox" => Ok(UpstreamMetadata::new()), "texlive" => Ok(UpstreamMetadata::new()), "tryton" => Ok(UpstreamMetadata::new()), "vapoursynth" => Ok(UpstreamMetadata::new()), "vdr" => Ok(UpstreamMetadata::new()), "vim" => Ok(UpstreamMetadata::new()), "xdrv" => Ok(UpstreamMetadata::new()), "xemacs" => Ok(UpstreamMetadata::new()), name => { log::warn!("Unknown family: {}", name); Ok(UpstreamMetadata::new()) } } } upstream-ontologist-0.3.6/src/testdata/cpan.json000064400000000000000000000017361046102023000201140ustar 00000000000000{ "version" : "0.02", "directory" : false, "mime" : "text/x-script.perl-module", "download_url" : "https://cpan.metacpan.org/authors/id/C/CT/CTRLSOFT/Parse-Pidl-0.02.tar.gz", "sloc" : 5, "status" : "latest", "pod_lines" : [], "version_numified" : 0.02, "stat" : { "uid" : 1009, "gid" : 1009, "mode" : 33188, "mtime" : 1135865157, "size" : 316 }, "indexed" : true, "release" : "Parse-Pidl-0.02", "pod" : "", "author" : "CTRLSOFT", "deprecated" : false, "path" : "lib/Parse/Pidl.pm", "level" : 2, "distribution" : "Parse-Pidl", "module" : [ { "indexed" : true, "authorized" : true, "version" : "0.02", "name" : "Parse::Pidl", "version_numified" : 0.02 } ], "id" : "GIj7X35DE9AYnQ6_TrGxB_VTeOU", "binary" : false, "slop" : 0, "maturity" : "released", "name" : "Pidl.pm", "authorized" : true, "date" : "2005-12-29T14:10:44" } upstream-ontologist-0.3.6/src/testdata/crates.io.json000064400000000000000000000057171046102023000210650ustar 00000000000000{"categories":[],"crate":{"badges":[],"categories":[],"created_at":"2022-10-29T18:45:24.262450+00:00","description":"Friendly distributed version control system","documentation":"https://www.breezy-vcs.org/doc/","downloads":1111,"exact_match":false,"homepage":"https://www.breezy-vcs.org/","id":"breezy","keywords":[],"links":{"owner_team":"/api/v1/crates/breezy/owner_team","owner_user":"/api/v1/crates/breezy/owner_user","owners":"/api/v1/crates/breezy/owners","reverse_dependencies":"/api/v1/crates/breezy/reverse_dependencies","version_downloads":"/api/v1/crates/breezy/downloads","versions":null},"max_stable_version":"3.3.4","max_version":"3.3.4","name":"breezy","newest_version":"3.3.4","recent_downloads":400,"repository":"https://code.launchpad.net/brz","updated_at":"2023-06-04T22:19:43.454989+00:00","versions":[815616,653355]},"keywords":[],"versions":[{"audit_actions":[{"action":"publish","time":"2023-06-04T22:19:43.454989+00:00","user":{"avatar":"https://avatars.githubusercontent.com/u/49032?v=4","id":38734,"login":"jelmer","name":"Jelmer Vernooij","url":"https://github.com/jelmer"}}],"bin_names":["brz"],"checksum":"fdc7f83f8febcd0dca02075844696a99d27d5d2014d9195ac0a1ae94c8393886","crate":"breezy","crate_size":25781,"created_at":"2023-06-04T22:19:43.454989+00:00","dl_path":"/api/v1/crates/breezy/3.3.4/download","downloads":630,"features":{"default":["i18n"],"i18n":["dep:gettext-rs"]},"has_lib":true,"id":815616,"lib_links":null,"license":"GPL-2.0+","links":{"authors":"/api/v1/crates/breezy/3.3.4/authors","dependencies":"/api/v1/crates/breezy/3.3.4/dependencies","version_downloads":"/api/v1/crates/breezy/3.3.4/downloads"},"num":"3.3.4","published_by":{"avatar":"https://avatars.githubusercontent.com/u/49032?v=4","id":38734,"login":"jelmer","name":"Jelmer Vernooij","url":"https://github.com/jelmer"},"readme_path":"/api/v1/crates/breezy/3.3.4/readme","rust_version":null,"updated_at":"2023-06-04T22:19:43.454989+00:00","yanked":false},{"audit_actions":[{"action":"publish","time":"2022-10-29T18:45:24.262450+00:00","user":{"avatar":"https://avatars.githubusercontent.com/u/49032?v=4","id":38734,"login":"jelmer","name":"Jelmer Vernooij","url":"https://github.com/jelmer"}}],"bin_names":["brz"],"checksum":"f41676959b4cb2f828b5f368e64df1048a2c1e6536aff5364e6183e980ecf0f9","crate":"breezy","crate_size":8620291,"created_at":"2022-10-29T18:45:24.262450+00:00","dl_path":"/api/v1/crates/breezy/3.3.0/download","downloads":480,"features":{},"has_lib":false,"id":653355,"lib_links":null,"license":"GPL-2.0+","links":{"authors":"/api/v1/crates/breezy/3.3.0/authors","dependencies":"/api/v1/crates/breezy/3.3.0/dependencies","version_downloads":"/api/v1/crates/breezy/3.3.0/downloads"},"num":"3.3.0","published_by":{"avatar":"https://avatars.githubusercontent.com/u/49032?v=4","id":38734,"login":"jelmer","name":"Jelmer Vernooij","url":"https://github.com/jelmer"},"readme_path":"/api/v1/crates/breezy/3.3.0/readme","rust_version":null,"updated_at":"2022-10-29T18:45:24.262450+00:00","yanked":false}]}upstream-ontologist-0.3.6/src/testdata/docdb-v.json000064400000000000000000000132041046102023000205020ustar 00000000000000{"shortname": "docdb-v", "name": "DocDB", "_id": "519a2fa6e88f3d77c1b32bdf", "url": "https://sourceforge.net/p/docdb-v/", "private": false, "short_description": "DocDB is a powerful and flexible collaborative web based document server which maintains a versioned list of documents. Information maintained in the database includes, author(s), title, topic(s), abstract, access restriction information, etc.", "creation_date": "2006-03-29", "summary": "", "external_homepage": "http://docdb-v.sourceforge.net", "video_url": "", "socialnetworks": [], "status": "active", "moved_to_url": "", "preferred_support_tool": "_url", "preferred_support_url": "http://sourceforge.net/tracker/?func=add&group_id=164024&atid=830064", "developers": [{"username": "mteck2", "name": "Marcia Teckenbrock", "url": "https://sourceforge.net/u/mteck2/"}, {"username": "vondo", "name": "Eric Vaandering", "url": "https://sourceforge.net/u/vondo/"}, {"username": "garren", "name": "L. Garren", "url": "https://sourceforge.net/u/garren/"}], "tools": [{"name": "support", "mount_point": "support", "url": "https://sourceforge.net/p/docdb-v/support/", "mount_label": "Support"}, {"name": "reviews", "mount_point": "reviews", "url": "https://sourceforge.net/p/docdb-v/reviews/", "mount_label": "Reviews"}, {"name": "files-sf", "mount_point": "files", "url": "https://sourceforge.net/p/docdb-v/files/", "mount_label": "Files"}, {"name": "summary", "mount_point": "summary", "url": "https://sourceforge.net/p/docdb-v/summary/", "mount_label": "Summary", "sourceforge_group_id": 164024}, {"name": "tickets", "mount_point": "feature-requests", "url": "https://sourceforge.net/p/docdb-v/feature-requests/", "mount_label": "Feature Requests", "api_url": "https://sourceforge.net/rest/p/docdb-v/feature-requests/"}, {"name": "tickets", "mount_point": "bugs", "url": "https://sourceforge.net/p/docdb-v/bugs/", "mount_label": "Bugs", "api_url": "https://sourceforge.net/rest/p/docdb-v/bugs/"}, {"name": "blog", "mount_point": "news", "url": "https://sourceforge.net/p/docdb-v/news/", "mount_label": "News", "api_url": "https://sourceforge.net/rest/p/docdb-v/news/"}, {"name": "cvs", "mount_point": "cvs", "url": "https://sourceforge.net/p/docdb-v/cvs/", "mount_label": "CVS"}, {"name": "git", "mount_point": "git", "url": "https://sourceforge.net/p/docdb-v/git/", "mount_label": "Git", "api_url": "https://sourceforge.net/rest/p/docdb-v/git/", "clone_url_https_anon": "https://git.code.sf.net/p/docdb-v/git", "clone_url_ro": "git://git.code.sf.net/p/docdb-v/git"}, {"name": "discussion", "mount_point": "discussion", "url": "https://sourceforge.net/p/docdb-v/discussion/", "mount_label": "Discussion", "api_url": "https://sourceforge.net/rest/p/docdb-v/discussion/"}, {"name": "activity", "mount_point": "activity", "url": "https://sourceforge.net/p/docdb-v/activity/", "mount_label": "Activity", "api_url": "https://sourceforge.net/rest/p/docdb-v/activity/"}, {"name": "mailman", "mount_point": "mailman", "url": "https://sourceforge.net/p/docdb-v/mailman/", "mount_label": "Mailing Lists"}], "labels": [], "categories": {"audience": [{"id": 2, "shortname": "end-users-desktop", "fullname": "End Users/Desktop", "fullpath": "Intended Audience :: by End-User Class :: End Users/Desktop"}], "developmentstatus": [{"id": 11, "shortname": "5-production-stable", "fullname": "5 - Production/Stable", "fullpath": "Development Status :: 5 - Production/Stable"}], "environment": [{"id": 237, "shortname": "web-based", "fullname": "Web-based", "fullpath": "User Interface :: Web-based"}], "language": [{"id": 176, "shortname": "perl", "fullname": "Perl", "fullpath": "Programming Language :: Perl"}], "license": [{"id": 15, "shortname": "gnu-general-public-license-version-2.0-gplv2", "fullname": "GNU General Public License version 2.0 (GPLv2)", "fullpath": "License :: OSI-Approved Open Source :: GNU General Public License version 2.0 (GPLv2)"}], "translation": [], "os": [{"id": 201, "shortname": "linux", "fullname": "Linux", "fullpath": "Operating System :: Linux"}, {"id": 235, "shortname": "os-independent", "fullname": "OS Independent", "fullpath": "Operating System :: OS Independent"}], "database": [{"id": 504, "shortname": "perl-dbi-dbd", "fullname": "Perl DBI/DBD", "fullpath": "Database Environment :: Database API :: Perl DBI/DBD"}, {"id": 524, "shortname": "mysql", "fullname": "MySQL", "fullpath": "Database Environment :: Network-based DBMS :: MySQL"}], "topic": [{"id": 607, "shortname": "project-management", "fullname": "Project Management", "fullpath": "Topic :: Business :: Project Management"}, {"id": 68, "shortname": "front-ends", "fullname": "Front-Ends", "fullpath": "Topic :: Database :: Front-Ends"}, {"id": 97, "shortname": "scientific-engineering", "fullname": "Scientific/Engineering", "fullpath": "Topic :: Scientific/Engineering"}]}, "icon_url": null, "screenshots": [{"url": "https://sourceforge.net/p/docdb-v/screenshot/68919.jpg", "thumbnail_url": "https://sourceforge.net/p/docdb-v/screenshot/68919.jpg/thumb", "caption": "List of Document Authors"}, {"url": "https://sourceforge.net/p/docdb-v/screenshot/68189.jpg", "thumbnail_url": "https://sourceforge.net/p/docdb-v/screenshot/68189.jpg/thumb", "caption": "DocDB Homepage (reduced)"}, {"url": "https://sourceforge.net/p/docdb-v/screenshot/68191.jpg", "thumbnail_url": "https://sourceforge.net/p/docdb-v/screenshot/68191.jpg/thumb", "caption": "Document View"}, {"url": "https://sourceforge.net/p/docdb-v/screenshot/68195.jpg", "thumbnail_url": "https://sourceforge.net/p/docdb-v/screenshot/68195.jpg/thumb", "caption": "Meeting View"}, {"url": "https://sourceforge.net/p/docdb-v/screenshot/68193.jpg", "thumbnail_url": "https://sourceforge.net/p/docdb-v/screenshot/68193.jpg/thumb", "caption": "Calendar of Events"}]}upstream-ontologist-0.3.6/src/testdata/gtab.json000064400000000000000000000102531046102023000201020ustar 00000000000000{"shortname": "gtab", "name": "gtab", "_id": "51adf6eae88f3d037b5e35bb", "url": "https://sourceforge.net/p/gtab/", "private": false, "short_description": "'gtab' is a guitar, bass and drum tablature tool.", "creation_date": "2004-11-12", "summary": "", "external_homepage": "http://gtab.sourceforge.net", "video_url": "", "socialnetworks": [], "status": "active", "moved_to_url": "", "preferred_support_tool": "", "preferred_support_url": "", "developers": [{"username": "m0ta", "name": "Matthias Vogelgesang", "url": "https://sourceforge.net/u/m0ta/"}, {"username": "seavan", "name": "Seavan", "url": "https://sourceforge.net/u/seavan/"}, {"username": "jeanseb", "name": "Jean-S\u00e9bastien Valette", "url": "https://sourceforge.net/u/jeanseb/"}, {"username": "ctrlsoft", "name": "Jelmer Vernooij", "url": "https://sourceforge.net/u/ctrlsoft/"}, {"username": "ebbex", "name": "Ebbex", "url": "https://sourceforge.net/u/ebbex/"}, {"username": "ekolosov", "name": "BlackPanther", "url": "https://sourceforge.net/u/ekolosov/"}], "tools": [{"name": "summary", "mount_point": "summary", "url": "https://sourceforge.net/p/gtab/summary/", "mount_label": "Summary", "sourceforge_group_id": 124026}, {"name": "blog", "mount_point": "news", "url": "https://sourceforge.net/p/gtab/news/", "mount_label": "News", "api_url": "https://sourceforge.net/rest/p/gtab/news/"}, {"name": "reviews", "mount_point": "reviews", "url": "https://sourceforge.net/p/gtab/reviews/", "mount_label": "Reviews"}, {"name": "files-sf", "mount_point": "files", "url": "https://sourceforge.net/p/gtab/files/", "mount_label": "Files"}, {"name": "svn", "mount_point": "svn", "url": "https://sourceforge.net/p/gtab/svn/", "mount_label": "SVN", "api_url": "https://sourceforge.net/rest/p/gtab/svn/", "clone_url_https_anon": "https://svn.code.sf.net/p/gtab/svn/trunk", "clone_url_ro": "svn://svn.code.sf.net/p/gtab/svn/trunk"}, {"name": "tickets", "mount_point": "bugs", "url": "https://sourceforge.net/p/gtab/bugs/", "mount_label": "Bugs", "api_url": "https://sourceforge.net/rest/p/gtab/bugs/"}, {"name": "mailman", "mount_point": "mailman", "url": "https://sourceforge.net/p/gtab/mailman/", "mount_label": "Mailing Lists"}, {"name": "support", "mount_point": "support", "url": "https://sourceforge.net/p/gtab/support/", "mount_label": "Support"}, {"name": "cvs", "mount_point": "cvs", "url": "https://sourceforge.net/p/gtab/cvs/", "mount_label": "CVS"}, {"name": "activity", "mount_point": "activity", "url": "https://sourceforge.net/p/gtab/activity/", "mount_label": "Activity", "api_url": "https://sourceforge.net/rest/p/gtab/activity/"}], "labels": [], "categories": {"audience": [{"id": 2, "shortname": "end-users-desktop", "fullname": "End Users/Desktop", "fullpath": "Intended Audience :: by End-User Class :: End Users/Desktop"}], "developmentstatus": [{"id": 9, "shortname": "3-alpha", "fullname": "3 - Alpha", "fullpath": "Development Status :: 3 - Alpha"}], "environment": [{"id": 481, "shortname": "wxwidgets", "fullname": "wxWidgets", "fullpath": "User Interface :: Toolkits/Libraries :: wxWidgets"}, {"id": 479, "shortname": "qt", "fullname": "Qt", "fullpath": "User Interface :: Toolkits/Libraries :: Qt"}], "language": [{"id": 165, "shortname": "c-plus-plus", "fullname": "C++", "fullpath": "Programming Language :: C++"}], "license": [{"id": 15, "shortname": "gnu-general-public-license-version-2.0-gplv2", "fullname": "GNU General Public License version 2.0 (GPLv2)", "fullpath": "License :: OSI-Approved Open Source :: GNU General Public License version 2.0 (GPLv2)"}], "translation": [{"id": 275, "shortname": "english", "fullname": "English", "fullpath": "Translations :: English"}, {"id": 279, "shortname": "german", "fullname": "German", "fullpath": "Translations :: German"}], "os": [{"id": 201, "shortname": "linux", "fullname": "Linux", "fullpath": "Operating System :: Linux"}, {"id": 202, "shortname": "bsd", "fullname": "BSD", "fullpath": "Operating System :: BSD"}, {"id": 3616, "shortname": "windows", "fullname": "Windows", "fullpath": "Operating System :: Windows"}], "database": [], "topic": [{"id": 120, "shortname": "audio-editing", "fullname": "Audio Editing", "fullpath": "Topic :: Multimedia :: Sound/Audio :: Audio Editing"}]}, "icon_url": null, "screenshots": []}upstream-ontologist-0.3.6/src/testdata/hg-diff.json000064400000000000000000000063421046102023000204750ustar 00000000000000{"shortname": "hg-diff", "name": "hg-diff", "_id": "4f0389dc1be1ce4a97000127", "url": "https://sourceforge.net/p/hg-diff/", "private": false, "short_description": "hg-diff is a simple GUI program to browse mercurial revisions. It is used to display a summary of all changes between two revisions and to display a graphical comparison of the two versions of each changed file.", "creation_date": "2012-01-03", "summary": "A GUI program to compare mercurial revisions.", "external_homepage": "http://hg-diff.sourceforge.net/", "video_url": "", "socialnetworks": [], "status": "active", "moved_to_url": "", "preferred_support_tool": "tickets", "preferred_support_url": "", "developers": [{"username": "goetzpf", "name": "Goetz Pfeiffer", "url": "https://sourceforge.net/u/goetzpf/"}], "tools": [{"name": "hg", "mount_point": "code", "url": "https://sourceforge.net/p/hg-diff/code/", "mount_label": "Code", "api_url": "https://sourceforge.net/rest/p/hg-diff/code/", "clone_url_ro": "http://hg.code.sf.net/p/hg-diff/code"}, {"name": "files-sf", "mount_point": "files", "url": "https://sourceforge.net/p/hg-diff/files/", "mount_label": "Files"}, {"name": "summary", "mount_point": "summary", "url": "https://sourceforge.net/p/hg-diff/summary/", "mount_label": "Summary", "sourceforge_group_id": 661105}, {"name": "support", "mount_point": "support", "url": "https://sourceforge.net/p/hg-diff/support/", "mount_label": "Support"}, {"name": "reviews", "mount_point": "reviews", "url": "https://sourceforge.net/p/hg-diff/reviews/", "mount_label": "Reviews"}, {"name": "activity", "mount_point": "activity", "url": "https://sourceforge.net/p/hg-diff/activity/", "mount_label": "Activity", "api_url": "https://sourceforge.net/rest/p/hg-diff/activity/"}], "labels": [""], "categories": {"audience": [{"id": 3, "shortname": "developers", "fullname": "Developers", "fullpath": "Intended Audience :: by End-User Class :: Developers"}], "developmentstatus": [{"id": 11, "shortname": "5-production-stable", "fullname": "5 - Production/Stable", "fullpath": "Development Status :: 5 - Production/Stable"}], "environment": [{"id": 478, "shortname": "tk", "fullname": "Tk", "fullpath": "User Interface :: Toolkits/Libraries :: Tk"}], "language": [{"id": 178, "shortname": "python", "fullname": "Python", "fullpath": "Programming Language :: Python"}], "license": [{"id": 679, "shortname": "gnu-general-public-license-version-3.0-gplv3", "fullname": "GNU General Public License version 3.0 (GPLv3)", "fullpath": "License :: OSI-Approved Open Source :: GNU General Public License version 3.0 (GPLv3)"}], "translation": [{"id": 275, "shortname": "english", "fullname": "English", "fullpath": "Translations :: English"}], "os": [{"id": 201, "shortname": "linux", "fullname": "Linux", "fullpath": "Operating System :: Linux"}, {"id": 202, "shortname": "bsd", "fullname": "BSD", "fullpath": "Operating System :: BSD"}], "database": [], "topic": [{"id": 841, "shortname": "mercurial", "fullname": "Mercurial", "fullpath": "Topic :: Software Development :: Version Control :: Mercurial"}]}, "icon_url": null, "screenshots": [{"url": "https://sourceforge.net/p/hg-diff/screenshot/hg-diff-small.png", "thumbnail_url": "https://sourceforge.net/p/hg-diff/screenshot/hg-diff-small.png/thumb", "caption": "the hg-diff main window"}]}upstream-ontologist-0.3.6/src/testdata/npm.json000064400000000000000000000070351046102023000177630ustar 00000000000000{"_id":"leftpad","_rev":"9-5c1f9bbc73c2fe312cd34d79f82d64f3","name":"leftpad","description":"left pad numbers","dist-tags":{"latest":"0.0.1"},"versions":{"0.0.0":{"name":"leftpad","version":"0.0.0","description":"left pad numbers","main":"index.js","scripts":{"test":"echo \"Error: no test specified\" && exit 1"},"repository":{"type":"git","url":"https://github.com/tmcw/leftpad.git"},"keywords":["pad","numbers","formatting","format"],"author":{"name":"Tom MacWright","email":"tom@macwright.org"},"license":"BSD","bugs":{"url":"https://github.com/tmcw/leftpad/issues"},"_id":"leftpad@0.0.0","dist":{"shasum":"020c9ad0787216ba0f30d79d479b4b355d7d39c3","tarball":"https://registry.npmjs.org/leftpad/-/leftpad-0.0.0.tgz","integrity":"sha512-/t6U+lg9XlxfLONkDyRclQrn5Hzp6WQsC79hw/6DSySh3k2tyqgVTQCJTUnRP4PrqlUWqWX2zj13JeZSUGzLdw==","signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEUCIQCNH1vf5b0RkSCpAq8NRdjyCT7c5SSWcOHm/BoK2uraTgIgRkXvz+CIXcQR+vQzX5p4ruPrMNOnwJZ1OLOsb+DW5HM="}]},"_from":".","_npmVersion":"1.2.23","_npmUser":{"name":"tmcw","email":"tom@macwright.org"},"maintainers":[{"name":"tmcw","email":"tom@macwright.org"}],"directories":{},"deprecated":"Use the built-in String.padStart function instead"},"0.0.1":{"name":"leftpad","version":"0.0.1","description":"left pad numbers","main":"index.js","scripts":{"test":"node test.js"},"repository":{"type":"git","url":"git+https://github.com/tmcw/leftpad.git"},"keywords":["pad","numbers","formatting","format"],"files":["index.js"],"author":{"name":"Tom MacWright","email":"tom@macwright.org"},"license":"BSD-3-Clause","devDependencies":{"jsverify":"^0.8.2"},"gitHead":"db1442a0556c2b133627ffebf455a78a1ced64b9","bugs":{"url":"https://github.com/tmcw/leftpad/issues"},"homepage":"https://github.com/tmcw/leftpad#readme","_id":"leftpad@0.0.1","_shasum":"86b1a4de4face180ac545a83f1503523d8fed115","_from":".","_npmVersion":"4.2.0","_nodeVersion":"7.9.0","_npmUser":{"name":"tmcw","email":"tom+npm@macwright.org"},"dist":{"shasum":"86b1a4de4face180ac545a83f1503523d8fed115","tarball":"https://registry.npmjs.org/leftpad/-/leftpad-0.0.1.tgz","integrity":"sha512-kBAuxBQJlJ85LDc+SnGSX6gWJnJR9Qk4lbgXmz/qPfCOCieCk7BgoN3YvzoNr5BUjqxQDOQxawJJvXXd6c+6Mg==","signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEYCIQCP9QFGBbsmVNj2JCpsX+YiML3z4OaES8TTiHZtHZNctwIhAKydP47vpc5XAMSz88TpgUT6S96t/TLu+RQFvkYIUACe"}]},"maintainers":[{"name":"tmcw","email":"tom@macwright.org"}],"_npmOperationalInternal":{"host":"packages-18-east.internal.npmjs.com","tmp":"tmp/leftpad-0.0.1.tgz_1493781560715_0.519889178685844"},"directories":{},"deprecated":"Use the built-in String.padStart function instead"}},"readme":"## leftpad\n\n[![CircleCI](https://circleci.com/gh/tmcw/leftpad/tree/master.svg?style=shield)](https://circleci.com/gh/tmcw/leftpad/tree/master)\n\nLike the [pad module](https://github.com/wdavidw/node-pad), except I'll remember\nthe argument order.\n\n```js\nvar leftpad = require('leftpad');\n\nleftpad(5, 10);\n'0000000005'\n```\n","maintainers":[{"email":"tom@macwright.org","name":"tmcw"}],"time":{"modified":"2022-06-19T11:27:17.024Z","created":"2013-06-19T01:28:05.244Z","0.0.0":"2013-06-19T01:28:06.350Z","0.0.1":"2017-05-03T03:19:21.248Z"},"author":{"name":"Tom MacWright","email":"tom@macwright.org"},"repository":{"type":"git","url":"git+https://github.com/tmcw/leftpad.git"},"homepage":"https://github.com/tmcw/leftpad#readme","keywords":["pad","numbers","formatting","format"],"bugs":{"url":"https://github.com/tmcw/leftpad/issues"},"license":"BSD-3-Clause","readmeFilename":"README.md"}upstream-ontologist-0.3.6/src/testdata/pecl.html000064400000000000000000000526021046102023000201070ustar 00000000000000 PECL :: Package :: smbclient
Login  |  Packages  |  Support  |  Bugs
Top Level :: File System :: smbclient

smbclient

Package Information
Summary A PHP wrapper for libsmbclient
Maintainers Eduardo Bacchi Kienetz (lead) [details]
Remi Collet (contributor) [details]
License BSD 2-clause
Description smbclient is a PHP extension that uses Samba's libsmbclient library to provide
Samba related functions and 'smb' streams to PHP programs.
Homepage https://github.com/eduardok/libsmbclient-php

[ Latest Tarball ] [ Changelog ] [ View Statistics ]
[ Browse Source ] [ Package Bugs ]

Available Releases
Version State Release Date Downloads  
1.1.1 stable 2023-04-17 smbclient-1.1.1.tgz (34.6kB) [ Changelog ]
1.1.0 stable 2023-04-01 smbclient-1.1.0.tgz (34.3kB) [ Changelog ]
1.0.7 stable 2023-04-01 smbclient-1.0.7.tgz (32.5kB) [ Changelog ]
1.0.6 stable 2021-02-28 smbclient-1.0.6.tgz (32.4kB) [ Changelog ]
1.0.5 stable 2021-02-11 smbclient-1.0.5.tgz (32.2kB) [ Changelog ]
1.0.4 stable 2021-01-22 smbclient-1.0.4.tgz (32.1kB) [ Changelog ]
1.0.3 stable 2021-01-21 smbclient-1.0.3.tgz (31.9kB) [ Changelog ]
1.0.2 stable 2021-01-20 smbclient-1.0.2.tgz (31.8kB) [ Changelog ]
1.0.1 stable 2020-12-29 smbclient-1.0.1.tgz (31.6kB) [ Changelog ]
1.0.0 stable 2018-12-24 smbclient-1.0.0.tgz (31.3kB) [ Changelog ]
0.9.0 stable 2017-02-10 smbclient-0.9.0.tgz (31.1kB) [ Changelog ]
0.8.0 stable 2016-03-01 smbclient-0.8.0.tgz (30.2kB) [ Changelog ]
0.8.0RC1 beta 2015-12-08 smbclient-0.8.0RC1.tgz (30.1kB) [ Changelog ]


Dependencies
Release 1.1.1: PEAR Package: PEAR 1.9.5 or newer
PHP Version: PHP 5.3.7 or newer
Release 1.1.0: PHP Version: PHP 5.3.7 or newer
PEAR Package: PEAR 1.9.5 or newer
Release 1.0.7: PHP Version: PHP 5.3.0 or newer
PEAR Package: PEAR 1.9.5 or newer
Dependencies for older releases can be found on the release overview page.
PRIVACY POLICY  |  CREDITS
Copyright © 2001-2023 The PHP Group
All rights reserved.
Last updated: Mon Jun 01 07:05:01 2020 UTC
Bandwidth and hardware provided by: pair Networks
upstream-ontologist-0.3.6/src/testdata/pypi.json000064400000000000000000000410221046102023000201440ustar 00000000000000{"info":{"author":null,"author_email":null,"bugtrack_url":null,"classifiers":["Development Status :: 4 - Beta","License :: OSI Approved :: GNU General Public License v2 or later (GPLv2+)","Operating System :: POSIX","Programming Language :: Python :: 3.10","Programming Language :: Python :: 3.11","Programming Language :: Python :: 3.12","Programming Language :: Python :: 3.8","Programming Language :: Python :: 3.9","Programming Language :: Python :: Implementation :: CPython","Programming Language :: Python :: Implementation :: PyPy"],"description":"A Python implementation of 3-way merge of texts.\n\nGiven BASE, OTHER, THIS, tries to produce a combined text\nincorporating the changes from both BASE->OTHER and BASE->THIS.\nAll three will typically be sequences of lines.\n\nUsage\n=====\n\nFrom the command-line::\n\n $ echo foo > mine\n $ echo bar > base\n $ echo blah > other\n $ python -m merge3 mine base other > merged\n $ cat merged\n\nOr from Python::\n\n >>> import merge3\n >>> m3 = merge3.Merge3(\n ... ['common\\n', 'base\\n'],\n ... ['common\\n', 'a\\n'],\n ... ['common\\n', 'b\\n'])\n >>> list(m3.merge_annotated())\n ['u | common\\n', '<<<<\\n', 'A | a\\n', '----\\n', 'B | b\\n', '>>>>\\n']\n","description_content_type":"text/x-rst","docs_url":null,"download_url":null,"downloads":{"last_day":-1,"last_month":-1,"last_week":-1},"dynamic":null,"home_page":null,"keywords":null,"license":"GNU GPLv2 or later","maintainer":null,"maintainer_email":"Breezy Developers ","name":"merge3","package_url":"https://pypi.org/project/merge3/","platform":null,"project_url":"https://pypi.org/project/merge3/","project_urls":{"GitHub":"https://github.com/breezy-team/merge3","Homepage":"https://www.breezy-vcs.org/"},"provides_extra":null,"release_url":"https://pypi.org/project/merge3/0.0.15/","requires_dist":["ruff==0.4.3; extra == \"dev\""],"requires_python":">=3.8","summary":"Python implementation of 3-way merge","version":"0.0.15","yanked":false,"yanked_reason":null},"last_serial":23067049,"releases":{"0.0.1":[{"comment_text":"","digests":{"blake2b_256":"d4d1804a713c68d7c83e71c23318e52a0613ee3053e7fbe65f143e3de39bda13","md5":"c9103afe839ec1ab60e2cc3f1ac31ef3","sha256":"d3235b34beeca02cae6340c84efe308589bae472b6f72d4eaf879a3a21864b3c"},"downloads":-1,"filename":"merge3-0.0.1.tar.gz","has_sig":false,"md5_digest":"c9103afe839ec1ab60e2cc3f1ac31ef3","packagetype":"sdist","python_version":"source","requires_python":null,"size":8193,"upload_time":"2018-07-12T00:16:05","upload_time_iso_8601":"2018-07-12T00:16:05.950804Z","url":"https://files.pythonhosted.org/packages/d4/d1/804a713c68d7c83e71c23318e52a0613ee3053e7fbe65f143e3de39bda13/merge3-0.0.1.tar.gz","yanked":false,"yanked_reason":null}],"0.0.10":[{"comment_text":"","digests":{"blake2b_256":"650d529f846cbb2b257404c6d371c1ab5d8925c83b3ceb80efdfdbe5b1a05391","md5":"2e6f88710525d2db6c3f839a489eba74","sha256":"a6b25ae1d1b05b3325242360ced4d5a266de09e2cb2527f9a6dbeb17291bdfe7"},"downloads":-1,"filename":"merge3-0.0.10-py3-none-any.whl","has_sig":false,"md5_digest":"2e6f88710525d2db6c3f839a489eba74","packagetype":"bdist_wheel","python_version":"py3","requires_python":null,"size":17873,"upload_time":"2022-10-20T21:52:20","upload_time_iso_8601":"2022-10-20T21:52:20.742331Z","url":"https://files.pythonhosted.org/packages/65/0d/529f846cbb2b257404c6d371c1ab5d8925c83b3ceb80efdfdbe5b1a05391/merge3-0.0.10-py3-none-any.whl","yanked":false,"yanked_reason":null},{"comment_text":"","digests":{"blake2b_256":"dda5d31be0033a9a7164282a5be0e00f66441a4e9f5fe222735b48d3fd431adf","md5":"3a1cae17cf54ac2a161e244c5da226e5","sha256":"841c688fc0b4054e30f6672a7af5acc9686d2b9875f9abd3954a14b590e4f626"},"downloads":-1,"filename":"merge3-0.0.10.tar.gz","has_sig":false,"md5_digest":"3a1cae17cf54ac2a161e244c5da226e5","packagetype":"sdist","python_version":"source","requires_python":null,"size":17387,"upload_time":"2022-10-20T21:52:23","upload_time_iso_8601":"2022-10-20T21:52:23.363088Z","url":"https://files.pythonhosted.org/packages/dd/a5/d31be0033a9a7164282a5be0e00f66441a4e9f5fe222735b48d3fd431adf/merge3-0.0.10.tar.gz","yanked":false,"yanked_reason":null}],"0.0.11":[{"comment_text":"","digests":{"blake2b_256":"402863ad6466eb4f0237476f822479567dfae0ccd512cb0d128d471fd6e48774","md5":"706b507d120b71f10c1839f45f4f40a7","sha256":"ef82fd95aeac124ea9a1616a6e2ef9368bc4ed0ba3cce5f21323ee9028d86206"},"downloads":-1,"filename":"merge3-0.0.11-py3-none-any.whl","has_sig":false,"md5_digest":"706b507d120b71f10c1839f45f4f40a7","packagetype":"bdist_wheel","python_version":"py3","requires_python":null,"size":17875,"upload_time":"2022-10-28T00:04:16","upload_time_iso_8601":"2022-10-28T00:04:16.219195Z","url":"https://files.pythonhosted.org/packages/40/28/63ad6466eb4f0237476f822479567dfae0ccd512cb0d128d471fd6e48774/merge3-0.0.11-py3-none-any.whl","yanked":false,"yanked_reason":null},{"comment_text":"","digests":{"blake2b_256":"1bef6543392d9dcca7694c9c9bff93562107c3a3c104165f98348de41a080cd3","md5":"ee9a2b9054b3123d6a783fc94f388ee6","sha256":"859ee1c31595c148f0961c55402779bc98c1c63dfdfca2f2cd7d443be6f0ab9c"},"downloads":-1,"filename":"merge3-0.0.11.tar.gz","has_sig":false,"md5_digest":"ee9a2b9054b3123d6a783fc94f388ee6","packagetype":"sdist","python_version":"source","requires_python":null,"size":16511,"upload_time":"2022-10-28T00:04:18","upload_time_iso_8601":"2022-10-28T00:04:18.241920Z","url":"https://files.pythonhosted.org/packages/1b/ef/6543392d9dcca7694c9c9bff93562107c3a3c104165f98348de41a080cd3/merge3-0.0.11.tar.gz","yanked":false,"yanked_reason":null}],"0.0.12":[{"comment_text":"","digests":{"blake2b_256":"125627d09d7bcc9222522f71aac3b0b1696132af4a11f9e0cdccd53bb32b6bfe","md5":"8ced7a7d8feff0235e3f58b51af24a0e","sha256":"ae65e506488778a046af040e424ceb041d0cee4b493c2ef2daac852cc92a8b84"},"downloads":-1,"filename":"merge3-0.0.12-py3-none-any.whl","has_sig":false,"md5_digest":"8ced7a7d8feff0235e3f58b51af24a0e","packagetype":"bdist_wheel","python_version":"py3","requires_python":null,"size":17874,"upload_time":"2022-11-20T12:25:16","upload_time_iso_8601":"2022-11-20T12:25:16.528915Z","url":"https://files.pythonhosted.org/packages/12/56/27d09d7bcc9222522f71aac3b0b1696132af4a11f9e0cdccd53bb32b6bfe/merge3-0.0.12-py3-none-any.whl","yanked":false,"yanked_reason":null},{"comment_text":"","digests":{"blake2b_256":"7d1d1a2a0ff25b18cc3b7af41180821099696c2c34e4459fff09a2d19729281e","md5":"1e62f7b01f2336f8771ec2edb6b9f991","sha256":"fd3fc873dcf60b9944606d125f72643055c739ff41793979ccbdea3ea6818d36"},"downloads":-1,"filename":"merge3-0.0.12.tar.gz","has_sig":false,"md5_digest":"1e62f7b01f2336f8771ec2edb6b9f991","packagetype":"sdist","python_version":"source","requires_python":null,"size":17454,"upload_time":"2022-11-20T12:25:18","upload_time_iso_8601":"2022-11-20T12:25:18.409844Z","url":"https://files.pythonhosted.org/packages/7d/1d/1a2a0ff25b18cc3b7af41180821099696c2c34e4459fff09a2d19729281e/merge3-0.0.12.tar.gz","yanked":false,"yanked_reason":null}],"0.0.13":[{"comment_text":"","digests":{"blake2b_256":"61fc0eb72422b8f64643ca7d9bc2b9076ec550388ff8e1022425fa11e7012e94","md5":"da44d9726a6b82c0146b6235d8e465cd","sha256":"4f8ee5f8c61823ff7c3ae68e6d0d2966bbf249bb08dc5e6f2abffd8d97c62cf3"},"downloads":-1,"filename":"merge3-0.0.13-py3-none-any.whl","has_sig":false,"md5_digest":"da44d9726a6b82c0146b6235d8e465cd","packagetype":"bdist_wheel","python_version":"py3","requires_python":null,"size":14680,"upload_time":"2023-02-07T20:39:24","upload_time_iso_8601":"2023-02-07T20:39:24.442838Z","url":"https://files.pythonhosted.org/packages/61/fc/0eb72422b8f64643ca7d9bc2b9076ec550388ff8e1022425fa11e7012e94/merge3-0.0.13-py3-none-any.whl","yanked":false,"yanked_reason":null},{"comment_text":"","digests":{"blake2b_256":"dc91647a2942b6f308c7dce358bec770fe62ee0689cfd1dd218b66e244acde7d","md5":"478955e6f5563644e0e62aec0c572aa5","sha256":"8abda1d2d49776323d23d09bfdd80d943a57d43d28d6152ffd2c87956a9b6b54"},"downloads":-1,"filename":"merge3-0.0.13.tar.gz","has_sig":false,"md5_digest":"478955e6f5563644e0e62aec0c572aa5","packagetype":"sdist","python_version":"source","requires_python":null,"size":13603,"upload_time":"2023-02-07T20:39:26","upload_time_iso_8601":"2023-02-07T20:39:26.975182Z","url":"https://files.pythonhosted.org/packages/dc/91/647a2942b6f308c7dce358bec770fe62ee0689cfd1dd218b66e244acde7d/merge3-0.0.13.tar.gz","yanked":false,"yanked_reason":null}],"0.0.14":[{"comment_text":"","digests":{"blake2b_256":"17a85e085653871c70d1b139d7888d0f79ba82d130106157cfc16a9e8078d086","md5":"8546ae3fc037d69070d23ec5a0d8a831","sha256":"7ac0aadbd9ff5bea89ba9bd3796cb26e0df361dc86a381055ce0a45a7b8726ec"},"downloads":-1,"filename":"merge3-0.0.14-py3-none-any.whl","has_sig":false,"md5_digest":"8546ae3fc037d69070d23ec5a0d8a831","packagetype":"bdist_wheel","python_version":"py3","requires_python":">=3.6","size":7950,"upload_time":"2023-09-17T11:50:26","upload_time_iso_8601":"2023-09-17T11:50:26.049150Z","url":"https://files.pythonhosted.org/packages/17/a8/5e085653871c70d1b139d7888d0f79ba82d130106157cfc16a9e8078d086/merge3-0.0.14-py3-none-any.whl","yanked":false,"yanked_reason":null},{"comment_text":"","digests":{"blake2b_256":"af4098be4a5641b0244be5881ff5e00057f8227ff183d8675a697bdfeae43b1a","md5":"e20464fec7d124550c3e2c1616c1a0e9","sha256":"30406e99386f4a65280fb9c43e681890fa2a1d839cac2759d156c7cc16030159"},"downloads":-1,"filename":"merge3-0.0.14.tar.gz","has_sig":false,"md5_digest":"e20464fec7d124550c3e2c1616c1a0e9","packagetype":"sdist","python_version":"source","requires_python":">=3.6","size":17742,"upload_time":"2023-09-17T11:50:27","upload_time_iso_8601":"2023-09-17T11:50:27.713705Z","url":"https://files.pythonhosted.org/packages/af/40/98be4a5641b0244be5881ff5e00057f8227ff183d8675a697bdfeae43b1a/merge3-0.0.14.tar.gz","yanked":false,"yanked_reason":null}],"0.0.15":[{"comment_text":"","digests":{"blake2b_256":"d2cd67e3bd2bba4bdc22aee41b27aa865500254c87ab48631ff64008b6ecc1a8","md5":"0672769e1ac41c6490c68d9d64d142e2","sha256":"97b5f54a17181be261fde03a10cf55f34f910b19fe81e21cbd5fe87a3395c4a6"},"downloads":-1,"filename":"merge3-0.0.15-py3-none-any.whl","has_sig":false,"md5_digest":"0672769e1ac41c6490c68d9d64d142e2","packagetype":"bdist_wheel","python_version":"py3","requires_python":">=3.8","size":14987,"upload_time":"2024-05-05T16:18:24","upload_time_iso_8601":"2024-05-05T16:18:24.296532Z","url":"https://files.pythonhosted.org/packages/d2/cd/67e3bd2bba4bdc22aee41b27aa865500254c87ab48631ff64008b6ecc1a8/merge3-0.0.15-py3-none-any.whl","yanked":false,"yanked_reason":null},{"comment_text":"","digests":{"blake2b_256":"91e1fe09c161f80b5a8d8ede3270eadedac7e59a64ea1c313b97c386234480c1","md5":"26b5483e68cc6c13f0d86edbb6daa72a","sha256":"d3eac213d84d56dfc9e39552ac8246c7860a940964ebeed8a8be4422f6492baf"},"downloads":-1,"filename":"merge3-0.0.15.tar.gz","has_sig":false,"md5_digest":"26b5483e68cc6c13f0d86edbb6daa72a","packagetype":"sdist","python_version":"source","requires_python":">=3.8","size":14121,"upload_time":"2024-05-05T16:18:26","upload_time_iso_8601":"2024-05-05T16:18:26.027455Z","url":"https://files.pythonhosted.org/packages/91/e1/fe09c161f80b5a8d8ede3270eadedac7e59a64ea1c313b97c386234480c1/merge3-0.0.15.tar.gz","yanked":false,"yanked_reason":null}],"0.0.2":[{"comment_text":"","digests":{"blake2b_256":"8b5cbaef7edc9e570a48a82be3bcf92db86fe3d3dcbebe40ef1113e614149f58","md5":"d5b14233bd3de5b93bb046272fa57c79","sha256":"35f2d6a5ea45d6dc16afb577205e5dd131146f2f8aa446e34f4228eb09afc52f"},"downloads":-1,"filename":"merge3-0.0.2.tar.gz","has_sig":false,"md5_digest":"d5b14233bd3de5b93bb046272fa57c79","packagetype":"sdist","python_version":"source","requires_python":null,"size":8937,"upload_time":"2018-08-02T18:36:08","upload_time_iso_8601":"2018-08-02T18:36:08.506432Z","url":"https://files.pythonhosted.org/packages/8b/5c/baef7edc9e570a48a82be3bcf92db86fe3d3dcbebe40ef1113e614149f58/merge3-0.0.2.tar.gz","yanked":false,"yanked_reason":null}],"0.0.3":[{"comment_text":"","digests":{"blake2b_256":"d50e500030c5835802ff4732a6fff92db94de3425e92bd4ed0800d4ec55b78c8","md5":"f7eff363c5f9e8b26eff2ff4ee1136da","sha256":"891d38e7e03498d5e482e19a701abf8b823b15b86966bd5e5c00da7280500da6"},"downloads":-1,"filename":"merge3-0.0.3.tar.gz","has_sig":false,"md5_digest":"f7eff363c5f9e8b26eff2ff4ee1136da","packagetype":"sdist","python_version":"source","requires_python":null,"size":16637,"upload_time":"2020-05-23T22:20:20","upload_time_iso_8601":"2020-05-23T22:20:20.037323Z","url":"https://files.pythonhosted.org/packages/d5/0e/500030c5835802ff4732a6fff92db94de3425e92bd4ed0800d4ec55b78c8/merge3-0.0.3.tar.gz","yanked":false,"yanked_reason":null}],"0.0.4":[{"comment_text":"","digests":{"blake2b_256":"5b51603938bac36447a2416d58087a21acb488308b6ad90554a5c29a71b47ad3","md5":"7369a27b187f4d87350a5784189c63ec","sha256":"33c896c436f319f5344647abd3b12d4738bc45bd7db490fe54f997f5484a14c0"},"downloads":-1,"filename":"merge3-0.0.4.tar.gz","has_sig":false,"md5_digest":"7369a27b187f4d87350a5784189c63ec","packagetype":"sdist","python_version":"source","requires_python":null,"size":16647,"upload_time":"2021-02-05T01:56:29","upload_time_iso_8601":"2021-02-05T01:56:29.537070Z","url":"https://files.pythonhosted.org/packages/5b/51/603938bac36447a2416d58087a21acb488308b6ad90554a5c29a71b47ad3/merge3-0.0.4.tar.gz","yanked":false,"yanked_reason":null}],"0.0.7":[{"comment_text":"","digests":{"blake2b_256":"9a3acd00ce76e31e5a011ea1025c6063c1897b34650f7f6752d26715546bd860","md5":"8e7dc81be76ce440069858c0552d9699","sha256":"0cd4eecba802733866ace6ca4e68eacbea31e4ca88eb410a09622d9631520258"},"downloads":-1,"filename":"merge3-0.0.7.tar.gz","has_sig":false,"md5_digest":"8e7dc81be76ce440069858c0552d9699","packagetype":"sdist","python_version":"source","requires_python":null,"size":16909,"upload_time":"2021-03-13T00:34:24","upload_time_iso_8601":"2021-03-13T00:34:24.302010Z","url":"https://files.pythonhosted.org/packages/9a/3a/cd00ce76e31e5a011ea1025c6063c1897b34650f7f6752d26715546bd860/merge3-0.0.7.tar.gz","yanked":false,"yanked_reason":null}],"0.0.8":[{"comment_text":"","digests":{"blake2b_256":"9ea0ebac6ed4b7c174832616731c3466e86768a2b856e03043ac55e1e5d49a5a","md5":"225c89dbf56f7b62767ac8a8750d5af4","sha256":"4ef90eda29fb6f291e5d5ee1103ae97e295e15826ef17abee3098f5ce46fe18b"},"downloads":-1,"filename":"merge3-0.0.8.tar.gz","has_sig":false,"md5_digest":"225c89dbf56f7b62767ac8a8750d5af4","packagetype":"sdist","python_version":"source","requires_python":null,"size":17376,"upload_time":"2021-03-13T13:36:11","upload_time_iso_8601":"2021-03-13T13:36:11.269467Z","url":"https://files.pythonhosted.org/packages/9e/a0/ebac6ed4b7c174832616731c3466e86768a2b856e03043ac55e1e5d49a5a/merge3-0.0.8.tar.gz","yanked":false,"yanked_reason":null}],"0.0.9":[{"comment_text":"","digests":{"blake2b_256":"7b8693994c5a6581ab7792ab917e5abfa509f7a1719f865e2f92a22304469176","md5":"f5f9edc8654c312840475b897cb12177","sha256":"e945c08c2aadcfd2d88c1511b839b90d3ce601bb5b9a39809d74b231bfc5ebcc"},"downloads":-1,"filename":"merge3-0.0.9.tar.gz","has_sig":false,"md5_digest":"f5f9edc8654c312840475b897cb12177","packagetype":"sdist","python_version":"source","requires_python":null,"size":17177,"upload_time":"2022-09-25T01:10:06","upload_time_iso_8601":"2022-09-25T01:10:06.352005Z","url":"https://files.pythonhosted.org/packages/7b/86/93994c5a6581ab7792ab917e5abfa509f7a1719f865e2f92a22304469176/merge3-0.0.9.tar.gz","yanked":false,"yanked_reason":null}]},"urls":[{"comment_text":"","digests":{"blake2b_256":"d2cd67e3bd2bba4bdc22aee41b27aa865500254c87ab48631ff64008b6ecc1a8","md5":"0672769e1ac41c6490c68d9d64d142e2","sha256":"97b5f54a17181be261fde03a10cf55f34f910b19fe81e21cbd5fe87a3395c4a6"},"downloads":-1,"filename":"merge3-0.0.15-py3-none-any.whl","has_sig":false,"md5_digest":"0672769e1ac41c6490c68d9d64d142e2","packagetype":"bdist_wheel","python_version":"py3","requires_python":">=3.8","size":14987,"upload_time":"2024-05-05T16:18:24","upload_time_iso_8601":"2024-05-05T16:18:24.296532Z","url":"https://files.pythonhosted.org/packages/d2/cd/67e3bd2bba4bdc22aee41b27aa865500254c87ab48631ff64008b6ecc1a8/merge3-0.0.15-py3-none-any.whl","yanked":false,"yanked_reason":null},{"comment_text":"","digests":{"blake2b_256":"91e1fe09c161f80b5a8d8ede3270eadedac7e59a64ea1c313b97c386234480c1","md5":"26b5483e68cc6c13f0d86edbb6daa72a","sha256":"d3eac213d84d56dfc9e39552ac8246c7860a940964ebeed8a8be4422f6492baf"},"downloads":-1,"filename":"merge3-0.0.15.tar.gz","has_sig":false,"md5_digest":"26b5483e68cc6c13f0d86edbb6daa72a","packagetype":"sdist","python_version":"source","requires_python":">=3.8","size":14121,"upload_time":"2024-05-05T16:18:26","upload_time_iso_8601":"2024-05-05T16:18:26.027455Z","url":"https://files.pythonhosted.org/packages/91/e1/fe09c161f80b5a8d8ede3270eadedac7e59a64ea1c313b97c386234480c1/merge3-0.0.15.tar.gz","yanked":false,"yanked_reason":null}],"vulnerabilities":[]} upstream-ontologist-0.3.6/src/testdata/rubygem.json000064400000000000000000000021371046102023000206410ustar 00000000000000{"name":"bullet","downloads":122616138,"version":"7.2.0","version_created_at":"2024-07-12T13:34:07.552Z","version_downloads":498664,"platform":"ruby","authors":"Richard Huang","info":"help to kill N+1 queries and unused eager loading.","licenses":["MIT"],"metadata":{"changelog_uri":"https://github.com/flyerhzm/bullet/blob/main/CHANGELOG.md","source_code_uri":"https://github.com/flyerhzm/bullet"},"yanked":false,"sha":"3502c8a1b4f5db77fc8f6d3dd89a6a8c1a968219a45e12ae6cbaa9c09967ea89","spec_sha":"c8cfdc6562e3e85302c624d2464c2f77b3a50a272ba15d64fcd021107fdaa0b8","project_uri":"https://rubygems.org/gems/bullet","gem_uri":"https://rubygems.org/gems/bullet-7.2.0.gem","homepage_uri":"https://github.com/flyerhzm/bullet","wiki_uri":null,"documentation_uri":null,"mailing_list_uri":null,"source_code_uri":"https://github.com/flyerhzm/bullet","bug_tracker_uri":null,"changelog_uri":"https://github.com/flyerhzm/bullet/blob/main/CHANGELOG.md","funding_uri":null,"dependencies":{"development":[],"runtime":[{"name":"activesupport","requirements":"\u003e= 3.0.0"},{"name":"uniform_notifier","requirements":"~\u003e 1.11"}]}}upstream-ontologist-0.3.6/src/testdata/swh.html000064400000000000000000000041751046102023000177670ustar 00000000000000

swh-loader-git

The Software Heritage Git Loader is a tool and a library to walk a local Git repository and inject into the SWH dataset all contained files that weren't known before.

The main entry points are:

  • :class:swh.loader.git.loader.GitLoader for the main loader which can ingest either local or remote git repository's contents. This is the main implementation deployed in production.

  • :class:swh.loader.git.from_disk.GitLoaderFromDisk which ingests only local git clone repository.

  • :class:swh.loader.git.loader.GitLoaderFromArchive which ingests a git repository wrapped in an archive.

License

This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version.

This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.

See top-level LICENSE file for the full text of the GNU General Public License along with this program.

Dependencies

Runtime

  • python3
  • python3-dulwich
  • python3-retrying
  • python3-swh.core
  • python3-swh.model
  • python3-swh.storage
  • python3-swh.scheduler

Test

  • python3-nose

Requirements

  • implementation language, Python3
  • coding guidelines: conform to PEP8
  • Git access: via dulwich

CLI Run

You can run the loader from a remote origin (loader) or from an origin on disk (from_disk) directly by calling:

swh loader -C <config-file> run git <git-repository-url>

or "git_disk".

Configuration sample

/tmp/git.yml:

storage:
  cls: remote
  args:
    url: http://localhost:5002/
upstream-ontologist-0.3.6/src/testdata/zsh.json000064400000000000000000000173701046102023000200000ustar 00000000000000{"shortname": "zsh", "name": "zsh", "_id": "5102b27f5fcbc919e876e8a3", "url": "https://sourceforge.net/p/zsh/", "private": false, "short_description": "zsh is a shell probably most similar to ksh, but with countless enhancements and differences.", "creation_date": "2000-03-27", "summary": "Customizable command shell for UNIX-like environments", "external_homepage": "http://zsh.sourceforge.net/", "video_url": null, "socialnetworks": [{"accounturl": "", "socialnetwork": "Twitter"}, {"accounturl": null, "socialnetwork": "Facebook"}], "status": "active", "moved_to_url": "", "preferred_support_tool": "support-requests", "preferred_support_url": "", "developers": [{"username": "barts", "name": "Bart Schaefer", "url": "https://sourceforge.net/u/barts/"}, {"username": "adamspiers", "name": "Adam Spiers", "url": "https://sourceforge.net/u/adamspiers/"}, {"username": "romkatv314", "name": "Roman Perepelitsa", "url": "https://sourceforge.net/u/romkatv314/"}, {"username": "jtakimoto", "name": "Jun T.", "url": "https://sourceforge.net/u/jtakimoto/"}, {"username": "mikamika", "name": "Mikael Magnusson", "url": "https://sourceforge.net/u/mikamika/"}, {"username": "pws", "name": "Peter Stephenson", "url": "https://sourceforge.net/u/pws/"}, {"username": "danielshahaf", "name": "Daniel Shahaf", "url": "https://sourceforge.net/u/danielshahaf/"}, {"username": "packersv", "name": "Paul Ackersviller", "url": "https://sourceforge.net/u/packersv/"}, {"username": "okdana", "name": "dana", "url": "https://sourceforge.net/u/okdana/"}, {"username": "bor", "name": "Andrey Borzenkov", "url": "https://sourceforge.net/u/bor/"}, {"username": "pcppopper", "name": "Nikolai Weibull", "url": "https://sourceforge.net/u/pcppopper/"}, {"username": "rudi_s", "name": "Simon Ruderich", "url": "https://sourceforge.net/u/rudi_s/"}, {"username": "acs", "name": "Vin Shelton", "url": "https://sourceforge.net/u/acs/"}, {"username": "clint", "name": "Clint Adams", "url": "https://sourceforge.net/u/clint/"}, {"username": "pdpennock", "name": "Phil Pennock", "url": "https://sourceforge.net/u/pdpennock/"}, {"username": "wayned", "name": "Wayne Davison", "url": "https://sourceforge.net/u/wayned/"}, {"username": "wm3", "name": "Motoi WASHIDA", "url": "https://sourceforge.net/u/wm3/"}, {"username": "hanpt", "name": "hpt", "url": "https://sourceforge.net/u/hanpt/"}, {"username": "phy1729", "name": "phy1729", "url": "https://sourceforge.net/u/phy1729/"}, {"username": "opk", "name": "Oliver Kiddle", "url": "https://sourceforge.net/u/opk/"}, {"username": "f_rosencrantz", "name": "Felix Rosencrantz", "url": "https://sourceforge.net/u/f_rosencrantz/"}, {"username": "akr", "name": "Tanaka Akira", "url": "https://sourceforge.net/u/akr/"}, {"username": "blueyed", "name": "daniel hahler", "url": "https://sourceforge.net/u/blueyed/"}, {"username": "eitanadler", "name": "Eitan Adler", "url": "https://sourceforge.net/u/eitanadler/"}, {"username": "schazelas", "name": "Stephane Chazelas", "url": "https://sourceforge.net/u/schazelas/"}, {"username": "wischnow", "name": "Sven Wischnowsky", "url": "https://sourceforge.net/u/wischnow/"}, {"username": "chmou", "name": "Chmouel Boudjnah", "url": "https://sourceforge.net/u/chmou/"}, {"username": "illua", "name": "Eric Cook", "url": "https://sourceforge.net/u/illua/"}, {"username": "bewater", "name": "Frank Terbeck", "url": "https://sourceforge.net/u/bewater/"}, {"username": "xtaran", "name": "Axel Beckert", "url": "https://sourceforge.net/u/xtaran/"}, {"username": "dkearns", "name": "Doug Kearns", "url": "https://sourceforge.net/u/dkearns/"}, {"username": "richih", "name": "Richard Hartmann", "url": "https://sourceforge.net/u/richih/"}, {"username": "larryv", "name": "Lawrence Vel\u00e1zquez", "url": "https://sourceforge.net/u/larryv/"}, {"username": "gcw", "name": "Geoff Wing", "url": "https://sourceforge.net/u/gcw/"}], "tools": [{"name": "summary", "mount_point": "summary", "url": "https://sourceforge.net/p/zsh/summary/", "mount_label": "Summary", "sourceforge_group_id": 4068}, {"name": "reviews", "mount_point": "reviews", "url": "https://sourceforge.net/p/zsh/reviews/", "mount_label": "Reviews"}, {"name": "blog", "mount_point": "news", "url": "https://sourceforge.net/p/zsh/news/", "mount_label": "News", "api_url": "https://sourceforge.net/rest/p/zsh/news/"}, {"name": "git", "mount_point": "code", "url": "https://sourceforge.net/p/zsh/code/", "mount_label": "Code", "api_url": "https://sourceforge.net/rest/p/zsh/code/", "clone_url_https_anon": "https://git.code.sf.net/p/zsh/code", "clone_url_ro": "git://git.code.sf.net/p/zsh/code"}, {"name": "files-sf", "mount_point": "files", "url": "https://sourceforge.net/p/zsh/files/", "mount_label": "Files"}, {"name": "tickets", "mount_point": "patches", "url": "https://sourceforge.net/p/zsh/patches/", "mount_label": "Patches", "api_url": "https://sourceforge.net/rest/p/zsh/patches/"}, {"name": "tickets", "mount_point": "bugs", "url": "https://sourceforge.net/p/zsh/bugs/", "mount_label": "Bugs", "api_url": "https://sourceforge.net/rest/p/zsh/bugs/"}, {"name": "tickets", "mount_point": "support-requests", "url": "https://sourceforge.net/p/zsh/support-requests/", "mount_label": "Support Requests", "api_url": "https://sourceforge.net/rest/p/zsh/support-requests/"}, {"name": "support", "mount_point": "support", "url": "https://sourceforge.net/p/zsh/support/", "mount_label": "Support"}, {"name": "tickets", "mount_point": "feature-requests", "url": "https://sourceforge.net/p/zsh/feature-requests/", "mount_label": "Feature Requests", "api_url": "https://sourceforge.net/rest/p/zsh/feature-requests/"}, {"name": "git", "mount_point": "web", "url": "https://sourceforge.net/p/zsh/web/", "mount_label": "Website-content", "api_url": "https://sourceforge.net/rest/p/zsh/web/", "clone_url_https_anon": "https://git.code.sf.net/p/zsh/web", "clone_url_ro": "git://git.code.sf.net/p/zsh/web"}, {"name": "activity", "mount_point": "activity", "url": "https://sourceforge.net/p/zsh/activity/", "mount_label": "Activity", "api_url": "https://sourceforge.net/rest/p/zsh/activity/"}, {"name": "mailman", "mount_point": "mailman", "url": "https://sourceforge.net/p/zsh/mailman/", "mount_label": "Mailing Lists"}], "labels": [], "categories": {"audience": [{"id": 4, "shortname": "system-administrators", "fullname": "System Administrators", "fullpath": "Intended Audience :: by End-User Class :: System Administrators"}, {"id": 3, "shortname": "developers", "fullname": "Developers", "fullpath": "Intended Audience :: by End-User Class :: Developers"}, {"id": 2, "shortname": "end-users-desktop", "fullname": "End Users/Desktop", "fullpath": "Intended Audience :: by End-User Class :: End Users/Desktop"}], "developmentstatus": [{"id": 12, "shortname": "6-mature", "fullname": "6 - Mature", "fullpath": "Development Status :: 6 - Mature"}], "environment": [{"id": 460, "shortname": "console-terminal", "fullname": "Console/Terminal", "fullpath": "User Interface :: Textual :: Console/Terminal"}, {"id": 459, "shortname": "command-line", "fullname": "Command-line", "fullpath": "User Interface :: Textual :: Command-line"}], "language": [{"id": 185, "shortname": "unix-shell", "fullname": "Unix Shell", "fullpath": "Programming Language :: Unix Shell"}, {"id": 164, "shortname": "c", "fullname": "C", "fullpath": "Programming Language :: C"}], "license": [{"id": 14, "shortname": "osi-approved-open-source", "fullname": "OSI-Approved Open Source", "fullpath": "License :: OSI-Approved Open Source"}], "translation": [], "os": [{"id": 201, "shortname": "linux", "fullname": "Linux", "fullpath": "Operating System :: Linux"}, {"id": 202, "shortname": "bsd", "fullname": "BSD", "fullpath": "Operating System :: BSD"}], "database": [], "topic": [{"id": 294, "shortname": "system-shells", "fullname": "System Shells", "fullpath": "Topic :: System :: System Shells"}]}, "icon_url": "https://sourceforge.net/p/zsh/icon", "screenshots": []}upstream-ontologist-0.3.6/src/vcs.rs000064400000000000000000001605661046102023000156370ustar 00000000000000use crate::with_path_segments; use lazy_regex::regex; use log::{debug, warn}; use std::borrow::Cow; use std::collections::HashMap; use url::Url; /// Known VCSes pub const VCSES: &[&str] = &["git", "bzr", "hg"]; /// Hostnames for known gitlab sites pub const KNOWN_GITLAB_SITES: &[&str] = &["salsa.debian.org", "invent.kde.org", "0xacab.org"]; /// URL schemes that are considered secure pub const SECURE_SCHEMES: &[&str] = &["https", "git+ssh", "bzr+ssh", "hg+ssh", "ssh", "svn+ssh"]; /// Known hosting sites const KNOWN_HOSTING_SITES: &[&str] = &[ "code.launchpad.net", "github.com", "launchpad.net", "git.openstack.org", ]; /// Check if a string can plausibly be a URL pub fn plausible_url(url: &str) -> bool { url.contains(':') } /// Drop VCS prefixes from a URL /// /// This function removes the VCS prefix from a URL, if it exists. For example, /// it will convert `git+http://example.com/repo.git` to `http://example.com/repo.git`. pub fn drop_vcs_in_scheme(url: &Url) -> Option { let scheme = url.scheme(); match scheme { "git+http" | "git+https" => { Some(derive_with_scheme(url, scheme.trim_start_matches("git+"))) } "hg+http" | "hg+https" => Some(derive_with_scheme(url, scheme.trim_start_matches("hg+"))), "bzr+lp" | "bzr+http" => Some(derive_with_scheme(url, scheme.trim_start_matches("bzr+"))), _ => None, } } /// Split a VCS URL into its components pub fn split_vcs_url(location: &str) -> (String, Option, Option) { let mut url = location; let mut branch = None; let mut subpath = None; // Check for subpath in brackets if let Some(idx) = url.find('[') { if let Some(idx2) = url.find(']') { subpath = Some(url[idx + 1..idx2].to_string()); url = &url[..idx]; } } // Check for branch specification if let Some(idx) = url.find(" -b ") { branch = Some(url[idx + 4..].to_string()); url = &url[..idx]; } (url.to_string(), branch, subpath) } /// Unsplit a VCS URL pub fn unsplit_vcs_url(location: &VcsLocation) -> String { let mut url = location.url.to_string(); if let Some(branch_name) = location.branch.as_deref() { url = format!("{} -b {}", url, branch_name); } if let Some(subpath_str) = location.subpath.as_deref() { url = format!("{} [{}]", url, subpath_str); } url } /// Check if a URL is plausibly VCS browse URL pub fn plausible_browse_url(url: &str) -> bool { if let Ok(url) = url::Url::parse(url) { if url.scheme() == "https" || url.scheme() == "http" { return true; } } false } /// Strip VCS prefixes from a URL /// /// Note: This function is not a complete URL parser and should not be used for /// general URL parsing. Consider using `drop_vcs_in_scheme` /// instead. pub fn strip_vcs_prefixes(url: &str) -> &str { let prefixes = ["git", "hg"]; for prefix in prefixes.iter() { if url.starts_with(&format!("{}+", prefix)) { return &url[prefix.len() + 1..]; } } url } /// Check if the specified GitHub URL is a valid repository /// /// If `version` is specified, it checks if the version is available in the /// repository. async fn probe_upstream_github_branch_url(url: &url::Url, version: Option<&str>) -> Option { let path = url.path(); let path = path.strip_suffix(".git").unwrap_or(path); let api_url = url::Url::parse( format!( "https://api.github.com/repos/{}/tags", path.trim_start_matches('/') ) .as_str(), ) .unwrap(); match crate::load_json_url(&api_url, None).await { Ok(json) => { if let Some(version) = version { let tags = json.as_array()?; let tag_names = tags .iter() .map(|x| x["name"].as_str().unwrap()) .collect::>(); if tag_names.is_empty() { // Uhm, hmm return Some(true); } return Some(version_in_tags(version, tag_names.as_slice())); } Some(true) } Err(crate::HTTPJSONError::Error { status: 404, .. }) => Some(false), Err(crate::HTTPJSONError::Error { status: 403, .. }) => { debug!("github api rate limit exceeded"); None } Err(e) => { warn!("failed to probe github api: {:?}", e); None } } } fn version_in_tags(version: &str, tag_names: &[&str]) -> bool { if tag_names.contains(&version) { return true; } if tag_names.contains(&format!("v{}", version).as_str()) { return true; } if tag_names.contains(&format!("release/{}", version).as_str()) { return true; } if tag_names.contains(&version.replace('.', "_").as_str()) { return true; } for tag_name in tag_names { if tag_name.ends_with(&format!("_{}", version)) { return true; } if tag_name.ends_with(&format!("-{}", version)) { return true; } if tag_name.ends_with(&format!("_{}", version.replace('.', "_"))) { return true; } } false } fn probe_upstream_breezy_branch_url(url: &url::Url, version: Option<&str>) -> Option { let tags: HashMap = breezyshim::ui::with_silent_ui_factory( || -> Result, breezyshim::error::Error> { let branch = breezyshim::branch::open(url)?; branch.tags()?.get_tag_dict() }, ) .map_err(|e| { warn!("failed to probe breezy branch: {:?}", e); e }) .ok()?; let tag_names = tags.keys().map(|x| x.as_str()).collect::>(); if let Some(version) = version { Some(version_in_tags(version, tag_names.as_slice())) } else { Some(true) } } /// Probe an upstream branch URL for a repository /// /// This function checks if the specified URL is a valid repository URL /// and if the specified version is available in the repository. /// /// # Returns /// Some(true) if the URL is valid and the version is available, /// Some(false) if the URL is valid but the version is not available, /// None if the URL is not valid or if the probe failed. pub async fn probe_upstream_branch_url(url: &url::Url, version: Option<&str>) -> Option { if url.scheme() == "git+ssh" || url.scheme() == "ssh" || url.scheme() == "bzr+ssh" { // Let's not probe anything possibly non-public. return None; } if url.host() == Some(url::Host::Domain("github.com")) { probe_upstream_github_branch_url(url, version).await } else { probe_upstream_breezy_branch_url(url, version) } } /// Check if a repository URL is canonical pub async fn check_repository_url_canonical( mut url: url::Url, version: Option<&str>, ) -> std::result::Result { if url.host_str() == Some("github.com") { let mut segments = url.path_segments().unwrap().collect::>(); if segments.len() < 2 { return Err(crate::CanonicalizeError::InvalidUrl( url, "GitHub URL with less than 2 path elements".to_string(), )); } if segments[0] == "sponsors" { return Err(crate::CanonicalizeError::InvalidUrl( url, "GitHub sponsors URL".to_string(), )); } segments[1] = segments[1].trim_end_matches(".git"); let api_url = format!( "https://api.github.com/repos/{}/{}", segments[0], segments[1] ); url = match crate::load_json_url(&url::Url::parse(api_url.as_str()).unwrap(), None).await { Ok(data) => { if data["archived"].as_bool().unwrap_or(false) { return Err(crate::CanonicalizeError::InvalidUrl( url, "GitHub URL is archived".to_string(), )); } if let Some(description) = data["description"].as_str() { if description.contains("DEPRECATED") { return Err(crate::CanonicalizeError::InvalidUrl( url, "GitHub URL is deprecated".to_string(), )); } if description.starts_with("Moved to") { let url = url::Url::parse( description .trim_start_matches("Moved to ") .trim_end_matches('.'), ) .unwrap(); return Box::pin(check_repository_url_canonical(url, version)).await; } if description.contains("has moved") { return Err(crate::CanonicalizeError::InvalidUrl( url, "GitHub URL has moved".to_string(), )); } if description.starts_with("Mirror of ") { let url = url::Url::parse( description .trim_start_matches("Mirror of ") .trim_end_matches('.'), ) .unwrap(); return Box::pin(check_repository_url_canonical(url, version)).await; } } if let Some(homepage) = data["homepage"].as_str() { if is_gitlab_site(homepage, None).await { return Err(crate::CanonicalizeError::InvalidUrl( url, format!("homepage is on GitLab: {}", homepage), )); } } // TODO(jelmer): Look at the contents of the repository; if it contains just a // single README file with < 10 lines, assume the worst. // return data['clone_url'] Ok(url::Url::parse(data["clone_url"].as_str().unwrap()).unwrap()) } Err(crate::HTTPJSONError::Error { status: 404, .. }) => { return Err(crate::CanonicalizeError::InvalidUrl( url, "GitHub URL does not exist".to_string(), )) } Err(crate::HTTPJSONError::Error { status: 403, .. }) => { return Err(crate::CanonicalizeError::Unverifiable( url, "GitHub URL rate-limited".to_string(), )) } Err(e) => { return Err(crate::CanonicalizeError::Unverifiable( url, format!("GitHub URL failed to load: {:?}", e), )) } }?; } let is_valid = probe_upstream_branch_url(&url, version).await; if is_valid.is_none() { return Err(crate::CanonicalizeError::Unverifiable( url, "unable to probe".to_string(), )); } if is_valid.unwrap() { return Ok(url); } Err(crate::CanonicalizeError::InvalidUrl( url, "unable to successfully probe URL".to_string(), )) } /// Check if the specified hostname contains a GitLab site pub async fn is_gitlab_site(hostname: &str, net_access: Option) -> bool { if KNOWN_GITLAB_SITES.contains(&hostname) { return true; } if hostname.starts_with("gitlab.") { return true; } if net_access.unwrap_or(false) { probe_gitlab_host(hostname).await } else { false } } /// Probe a GitLab host to check if it is a valid GitLab instance pub async fn probe_gitlab_host(hostname: &str) -> bool { let url = format!("https://{}/api/v4/version", hostname); match crate::load_json_url(&url::Url::parse(url.as_str()).unwrap(), None).await { Ok(_data) => true, Err(crate::HTTPJSONError::Error { status: 401, response, .. }) => { if let Ok(data) = response.json::().await { if let Some(message) = data["message"].as_str() { if message == "401 Unauthorized" { true } else { debug!("failed to parse JSON response: {:?}", data); false } } else { debug!("failed to parse JSON response: {:?}", data); false } } else { debug!("failed to parse JSON response"); false } } Err(e) => { debug!("failed to probe GitLab host: {:?}", e); false } } } /// Guess a repository URL from a URL pub async fn guess_repo_from_url(url: &url::Url, net_access: Option) -> Option { let net_access = net_access.unwrap_or(false); let path_segments = url.path_segments().unwrap().collect::>(); match url.host_str()? { "github.com" => { if path_segments.len() < 2 { return None; } Some( with_path_segments(url, &path_segments[0..2]) .unwrap() .to_string(), ) } "travis-ci.org" => { if path_segments.len() < 2 { return None; } Some(format!( "https://github.com/{}/{}", path_segments[0], path_segments[1] )) } "coveralls.io" => { if path_segments.len() < 3 { return None; } if path_segments[0] != "r" { return None; } Some(format!( "https://github.com/{}/{}", path_segments[1], path_segments[2] )) } "launchpad.net" => Some( url::Url::parse(format!("https://code.launchpad.net/{}", path_segments[0]).as_str()) .unwrap() .to_string(), ), "git.savannah.gnu.org" => { if path_segments.len() < 2 { return None; } if path_segments[0] != "git" { return None; } Some(url.to_string()) } "freedesktop.org" | "www.freedesktop.org" => { if path_segments.len() >= 2 && path_segments[0] == "software" { Some( url::Url::parse( format!("https://github.com/freedesktop/{}", path_segments[1]).as_str(), ) .unwrap() .to_string(), ) } else if path_segments.len() >= 3 && path_segments[0..2] == ["wiki", "Software"] { Some( url::Url::parse( format!("https://github.com/freedesktop/{}", path_segments[2]).as_str(), ) .unwrap() .to_string(), ) } else { None } } "download.gnome.org" => { if path_segments.len() < 2 { return None; } if path_segments[0] != "sources" { return None; } Some( url::Url::parse( format!("https://gitlab.gnome.org/GNOME/{}.git", path_segments[1]).as_str(), ) .unwrap() .to_string(), ) } "download.kde.org" => { if path_segments.len() < 2 { return None; } if path_segments[0] != "stable" && path_segments[0] != "unstable" { return None; } Some( url::Url::parse(format!("https://invent.kde.org/{}", path_segments[1]).as_str()) .unwrap() .to_string(), ) } "ftp.gnome.org" => { if path_segments.len() >= 4 && path_segments[0] == "pub" && path_segments[1] == "GNOME" && path_segments[2] == "sources" { Some( url::Url::parse( format!("https://gitlab.gnome.org/GNOME/{}.git", path_segments[3]).as_str(), ) .unwrap() .to_string(), ) } else { None } } "sourceforge.net" => { if path_segments.len() >= 4 && path_segments[0] == "p" && path_segments[3] == "ci" { Some( url::Url::parse( format!( "https://sourceforge.net/p/{}/{}", path_segments[1], path_segments[2] ) .as_str(), ) .unwrap() .to_string(), ) } else { None } } "www.apache.org" => { if path_segments.len() >= 2 && path_segments[0] == "dist" { Some( url::Url::parse( format!("https://svn.apache.org/repos/asf/{}", path_segments[1]).as_str(), ) .unwrap() .to_string(), ) } else { None } } "bitbucket.org" => { if path_segments.len() < 2 { return None; } Some( with_path_segments(url, &path_segments[0..2]) .unwrap() .to_string(), ) } "ftp.gnu.org" => { if path_segments.len() < 2 { return None; } if path_segments[0] != "gnu" { return None; } Some( url::Url::parse( format!("https://git.savannah.gnu.org/git/{}", path_segments[1]).as_str(), ) .unwrap() .to_string(), ) } "download.savannah.gnu.org" => { if path_segments.len() < 2 { return None; } if path_segments[0] != "releases" { return None; } Some( url::Url::parse( format!("https://git.savannah.gnu.org/git/{}", path_segments[1]).as_str(), ) .unwrap() .to_string(), ) } u if is_gitlab_site(u, Some(net_access)).await => { if path_segments.is_empty() { return None; } let proj_segments = if path_segments.contains(&"-") { path_segments[0..path_segments.iter().position(|s| s.contains('-')).unwrap()] .to_vec() } else if path_segments.contains(&"tags") { path_segments[0..path_segments.iter().position(|s| s == &"tags").unwrap()].to_vec() } else if path_segments.contains(&"blob") { path_segments[0..path_segments.iter().position(|s| s == &"blob").unwrap()].to_vec() } else { path_segments.to_vec() }; Some(with_path_segments(url, &proj_segments).unwrap().to_string()) } "git.php.net" => { if path_segments[0] == "repository" { Some(url.to_string()) } else if path_segments.is_empty() { let qs = url.query_pairs().collect::>(); qs.get("p") .map(|p| { url::Url::parse(format!("https://git.php.net/repository/?{}", p).as_str()) .unwrap() }) .map(|u| u.to_string()) } else { None } } u if KNOWN_HOSTING_SITES.contains(&u) => Some(url.to_string()), u if u.starts_with("svn.") => { // 'svn' subdomains are often used for hosting SVN repositories Some(url.to_string()) } _ => { if net_access { match check_repository_url_canonical(url.clone(), None).await { Ok(url) => Some(url.to_string()), Err(_) => { debug!("Failed to canonicalize URL: {}", url); None } } } else { None } } } } #[tokio::test] async fn test_guess_repo_url() { assert_eq!( Some("https://github.com/jelmer/blah".to_string()), guess_repo_from_url( &"https://github.com/jelmer/blah".parse().unwrap(), Some(false) ) .await ); assert_eq!( Some("https://github.com/jelmer/blah".to_string()), guess_repo_from_url( &"https://github.com/jelmer/blah/blob/README" .parse() .unwrap(), Some(false) ) .await ); assert_eq!( None, guess_repo_from_url(&"https://github.com/jelmer".parse().unwrap(), Some(false)).await ); assert_eq!( None, guess_repo_from_url(&"https://www.jelmer.uk/".parse().unwrap(), Some(false)).await ); assert_eq!( Some("http://code.launchpad.net/blah".to_string()), guess_repo_from_url( &"http://code.launchpad.net/blah".parse().unwrap(), Some(false) ) .await, ); assert_eq!( Some("https://code.launchpad.net/bzr".to_string()), guess_repo_from_url( &"http://launchpad.net/bzr/+download".parse().unwrap(), Some(false) ) .await, ); assert_eq!( Some("https://git.savannah.gnu.org/git/auctex.git".to_string()), guess_repo_from_url( &"https://git.savannah.gnu.org/git/auctex.git" .parse() .unwrap(), Some(false) ) .await, ); assert_eq!( None, guess_repo_from_url( &"https://git.savannah.gnu.org/blah/auctex.git" .parse() .unwrap(), Some(false) ) .await, ); assert_eq!( Some("https://bitbucket.org/fenics-project/dolfin".to_string()), guess_repo_from_url( &"https://bitbucket.org/fenics-project/dolfin/downloads/" .parse() .unwrap(), Some(false) ) .await, ); } /// Canonicalize a Git repository URL pub async fn canonical_git_repo_url(repo_url: &Url, net_access: Option) -> Option { if let Some(hostname) = repo_url.host_str() { if (is_gitlab_site(hostname, net_access).await || hostname == "github.com") && !repo_url.path().ends_with(".git") { let mut url = repo_url.clone(); url.set_path(&(url.path().to_owned() + ".git")); return Some(url); } } None } /// Determine the browse URL from a repo URL pub async fn browse_url_from_repo_url( location: &VcsLocation, net_access: Option, ) -> Option { if location.url.host_str() == Some("github.com") { let mut path = location .url .path_segments() .unwrap() .take(3) .collect::>() .join("/"); if path.ends_with(".git") { path = path[..path.len() - 4].to_string(); } if location.subpath.is_some() || location.branch.is_some() { path.push_str(&format!( "/tree/{}", location.branch.as_deref().unwrap_or("HEAD") )); } if let Some(subpath_str) = location.subpath.as_deref() { path.push_str(&format!("/{}", subpath_str)); } Some( Url::parse("https://github.com") .unwrap() .join(&path) .unwrap(), ) } else if location.url.host_str() == Some("gopkg.in") { let mut els = location .url .path_segments() .unwrap() .take(3) .collect::>(); if els.len() != 2 { return None; } if let Some(version) = els[2].strip_prefix(".v") { els[2] = ""; let mut path = els.join("/"); path.push_str(&format!("/tree/{}", version)); if let Some(subpath_str) = location.subpath.as_deref() { path.push_str(&format!("/{}", subpath_str)); } Some( Url::parse("https://github.com") .unwrap() .join(&path) .unwrap(), ) } else { None } } else if location.url.host_str() == Some("code.launchpad.net") || location.url.host_str() == Some("launchpad.net") { let mut path = location.url.path().to_string(); if let Some(subpath_str) = location.subpath.as_deref() { path.push_str(&format!("/view/head:{}", subpath_str)); return Some( Url::parse(format!("https://bazaar.launchpad.net{}", path).as_str()).unwrap(), ); } else { return Some( Url::parse(format!("https://code.launchpad.net{}", path).as_str()).unwrap(), ); } } else if location.url.host_str() == Some("svn.apache.org") { let path_elements = location .url .path_segments() .map(|segments| segments.into_iter().collect::>()) .unwrap_or_default(); if path_elements.len() >= 2 && path_elements[0] == "repos" && path_elements[1] == "asf" { let mut path_elements = path_elements.into_iter().skip(1).collect::>(); path_elements[0] = "viewvc"; if let Some(subpath_str) = location.subpath.as_deref() { path_elements.push(subpath_str); } return Some( Url::parse(format!("https://svn.apache.org{}", path_elements.join("/")).as_str()) .unwrap(), ); } else { None } } else if location.url.host_str() == Some("git.savannah.gnu.org") || location.url.host_str() == Some("git.sv.gnu.org") { let mut path_elements = location.url.path_segments().unwrap().collect::>(); if location.url.scheme() == "https" && path_elements.first() == Some(&"git") { path_elements.remove(0); } path_elements.insert(0, "cgit"); if let Some(subpath_str) = location.subpath.as_deref() { path_elements.push("tree"); path_elements.push(subpath_str); } Some( Url::parse(format!("https://git.savannah.gnu.org{}", path_elements.join("/")).as_str()) .unwrap(), ) } else if location.url.host_str().is_some() && is_gitlab_site(location.url.host_str().unwrap(), net_access).await { let mut path = location.url.path().to_string(); if path.ends_with(".git") { path = path[..path.len() - 4].to_string(); } if let Some(subpath_str) = location.subpath.as_deref() { path.push_str(&format!("/-/blob/HEAD/{}", subpath_str)); } Some( Url::parse(format!("https://{}{}", location.url.host_str().unwrap(), path).as_str()) .unwrap(), ) } else { None } } /// Find a public repository URL from a given repository URL pub async fn find_public_repo_url(repo_url: &str, net_access: Option) -> Option { let parsed = match Url::parse(repo_url) { Ok(parsed) => parsed, Err(_) => { if repo_url.contains(':') { let re = regex!(r"^(?P[^@:/]+@)?(?P[^/:]+):(?P.*)$"); if let Some(captures) = re.captures(repo_url) { let host = captures.name("host").unwrap().as_str(); let path = captures.name("path").unwrap().as_str(); if host == "github.com" || is_gitlab_site(host, net_access).await { return Some(format!("https://{}/{}", host, path)); } } } return None; } }; let revised_url: Option; match parsed.host_str() { Some("github.com") => { if ["https", "http", "git"].contains(&parsed.scheme()) { return Some(repo_url.to_string()); } revised_url = Some( Url::parse("https://github.com") .unwrap() .join(parsed.path()) .unwrap() .to_string(), ); } Some(hostname) if is_gitlab_site(hostname, net_access).await => { if ["https", "http"].contains(&parsed.scheme()) { return Some(repo_url.to_string()); } if parsed.scheme() == "ssh" { revised_url = Some(format!( "https://{}{}", parsed.host_str().unwrap(), parsed.path(), )); } else { revised_url = None; } } Some("code.launchpad.net") | Some("bazaar.launchpad.net") | Some("git.launchpad.net") => { if parsed.scheme().starts_with("http") || parsed.scheme() == "lp" { return Some(repo_url.to_string()); } if ["ssh", "bzr+ssh"].contains(&parsed.scheme()) { revised_url = Some(format!( "https://{}{}", parsed.host_str().unwrap(), parsed.path() )); } else { revised_url = None; } } _ => revised_url = None, } revised_url } /// Fix up a Git URL to use the correct scheme pub fn fixup_rcp_style_git_repo_url(url: &str) -> Option { breezyshim::location::rcp_location_to_url(url).ok() } /// Try to open a branch from a URL pub fn try_open_branch( url: &url::Url, branch_name: Option<&str>, ) -> Option> { use breezyshim::branch::Branch; let old_ui_factory = breezyshim::ui::get_ui_factory(); breezyshim::ui::install_ui_factory(&breezyshim::ui::SilentUIFactory::new()); let controldir = match breezyshim::controldir::open(url, None) { Ok(c) => c, Err(_) => return None, }; let rev = match controldir.open_branch(branch_name) { Ok(b) => { b.last_revision(); Some(b) } Err(_) => None, }; breezyshim::ui::install_ui_factory(old_ui_factory.as_ref()); rev } /// Find a secure repository URL pub async fn find_secure_repo_url( mut url: url::Url, branch: Option<&str>, net_access: Option, ) -> Option { use breezyshim::branch::Branch; if SECURE_SCHEMES.contains(&url.scheme()) { return Some(url); } // Sites we know to be available over https if let Some(hostname) = url.host_str() { if is_gitlab_site(hostname, net_access).await || [ "github.com", "git.launchpad.net", "bazaar.launchpad.net", "code.launchpad.net", ] .contains(&hostname) { url = derive_with_scheme(&url, "https"); } } if url.scheme() == "lp" { url = derive_with_scheme(&url, "https"); url.set_host(Some("code.launchpad.net")).unwrap(); } if let Some(host) = url.host_str() { if ["git.savannah.gnu.org", "git.sv.gnu.org"].contains(&host) { if url.scheme() == "http" { url = derive_with_scheme(&url, "https"); } else { url = derive_with_scheme(&url, "https"); url.set_path(format!("/git{}", url.path()).as_str()); } } } else { return None; } if net_access.unwrap_or(true) { let secure_repo_url = derive_with_scheme(&url, "https"); let insecure_branch = try_open_branch(&url, branch); let secure_branch = try_open_branch(&secure_repo_url, branch); if let Some(secure_branch) = secure_branch { if insecure_branch.is_none() || secure_branch.last_revision() == insecure_branch.unwrap().last_revision() { url = secure_repo_url; } } } if SECURE_SCHEMES.contains(&url.scheme()) { Some(url) } else { // Can't find a secure URI :( None } } #[derive(Debug, Clone, PartialEq, Eq)] /// A VCS location pub struct VcsLocation { /// URL of the repository pub url: url::Url, /// Name of the branch pub branch: Option, /// Subpath within the repository pub subpath: Option, } impl VcsLocation { async fn from_str(url: &str) -> Result { let (url, branch, subpath) = split_vcs_url(url); let url = fixup_git_url(url.as_str()).await; Ok(VcsLocation { url: url.parse()?, branch, subpath, }) } } impl std::fmt::Display for VcsLocation { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { write!(f, "{}", unsplit_vcs_url(self)) } } impl From for url::Url { fn from(v: VcsLocation) -> Self { v.url } } impl From for VcsLocation { fn from(url: url::Url) -> Self { VcsLocation { url, branch: None, subpath: None, } } } fn derive_with_scheme(url: &url::Url, scheme: &str) -> url::Url { let mut s = url.to_string(); s.replace_range(..url.scheme().len(), scheme); url::Url::parse(&s).unwrap() } fn fix_path_in_port(url: &str) -> Option { let (_, scheme, host, port, rest) = lazy_regex::regex_captures!(r"^([^:]+)://([^:]+):([^/]+)(/.*)$", url)?; if port.ends_with(']') { return None; } if let Ok(port) = port.parse::() { Some(format!("{}://{}:{}{}", scheme, host, port, rest)) } else { Some(format!("{}://{}/{}{}", scheme, host, port, rest)) } } async fn fix_gitlab_scheme(url: &str) -> Option { if let Ok(url) = url::Url::parse(url) { if let Some(host) = url.host_str() { if is_gitlab_site(host, None).await { return Some(derive_with_scheme(&url, "https").to_string()); } } } None } fn fix_github_scheme(url: &str) -> Option { // GitHub no longer supports the git:// scheme if let Ok(url) = url::Url::parse(url) { if url.host_str() == Some("github.com") { return Some(derive_with_scheme(&url, "https").to_string()); } } None } fn fix_salsa_cgit_url(url: &str) -> Option { if let Ok(url) = url::Url::parse(url) { if url.host_str() == Some("salsa.debian.org") { if let Some(suffix) = url.path().strip_prefix("/cgit/") { let mut url = url.clone(); url.set_path(suffix); return Some(url.to_string()); } } } None } async fn fix_gitlab_tree_in_url(location: &VcsLocation) -> Option { if is_gitlab_site(location.url.host_str()?, None).await { let segments = location.url.path_segments().unwrap().collect::>(); if let Some(p) = segments.iter().position(|p| *p == "tree") { let branch = segments[(p + 1)..].join("/"); let path = segments[..p].join("/"); let mut url = location.url.clone(); url.set_path(path.as_str()); return Some(VcsLocation { url, branch: Some(branch), subpath: location.subpath.clone(), }); } } None } fn fix_double_slash(url: &str) -> Option { if let Ok(mut url) = url::Url::parse(url) { if url.path().starts_with("//") { let path = url .path() .to_string() .strip_prefix("//") .unwrap() .to_string(); url.set_path(path.as_str()); return Some(url.to_string()); } } None } fn fix_extra_colon(url: &str) -> Option { if let Ok(mut url) = url::Url::parse(url) { if url.path().starts_with(':') { let path = url .path() .to_string() .strip_prefix(':') .unwrap() .to_string(); url.set_path(&path); return Some(url.to_string()); } } None } fn drop_git_username(url: &str) -> Option { if let Ok(mut url) = url::Url::parse(url) { if let Some(host) = url.host_str() { if !["github.com", "salsa.debian.org", "gitlab.com"].contains(&host) { return None; } } else { return None; } if !["git", "http", "https"].contains(&url.scheme()) { return None; } if url.username() == "git" { url.set_username("").unwrap(); return Some(url.to_string()); } } None } fn fix_branch_argument(location: &VcsLocation) -> Option { if location.url.host_str() == Some("github.com") { // TODO(jelmer): Handle gitlab sites too? let path_elements = location.url.path_segments().unwrap().collect::>(); if path_elements.len() > 2 && path_elements[2] == "tree" { let branch = path_elements[3..].join("/"); let path = path_elements[..2].join("/"); let mut url = location.url.clone(); url.set_path(path.as_str()); Some(VcsLocation { url, branch: Some(branch), subpath: location.subpath.clone(), }) } else { None } } else { None } } fn fix_git_gnome_org_url(url: &str) -> Option { if let Ok(url) = url::Url::parse(url) { if url.host_str() == Some("git.gnome.org") { let mut path_segments = url.path_segments().unwrap().collect::>(); if path_segments.first().map(|p| *p == "browse") == Some(true) { path_segments.remove(0); } let mut url = derive_with_scheme(&url, "https"); url.set_host(Some("gitlab.gnome.org")).unwrap(); url.set_path(format!("GNOME/{}", path_segments.join("/")).as_str()); return Some(url.to_string()); } } None } fn fix_kde_anongit_url(url: &str) -> Option { if let Ok(url) = url::Url::parse(url) { if url.host_str() == Some("anongit.kde.org") { let url = derive_with_scheme(&url, "https"); return Some(url.to_string()); } } None } fn fix_freedesktop_org_url(url: &str) -> Option { if let Ok(url) = url::Url::parse(url) { if url.host_str() == Some("anongit.freedesktop.org") { let suffix = url.path().strip_prefix("/git/"); let mut url = derive_with_scheme(&url, "https"); if let Some(suffix) = suffix { url.set_path(suffix); } url.set_host(Some("gitlab.freedesktop.org")).unwrap(); return Some(url.to_string()); } } None } type AsyncLocationFixer = for<'a> fn( &'a VcsLocation, ) -> std::pin::Pin< Box> + Send + 'a>, >; const LOCATION_FIXERS: &[AsyncLocationFixer] = &[ |loc| Box::pin(async move { fix_gitlab_tree_in_url(loc).await }), |loc| Box::pin(async move { fix_branch_argument(loc) }), ]; /// Attempt to fix up broken Git URLs. pub async fn fixup_git_location(location: &VcsLocation) -> Cow<'_, VcsLocation> { let mut location = Cow::Borrowed(location); for cb in LOCATION_FIXERS { location = cb(&location).await.map_or(location, Cow::Owned); } location } type AsyncFixer = for<'a> fn( &'a str, ) -> std::pin::Pin< Box> + Send + 'a>, >; const URL_FIXERS: &[AsyncFixer] = &[ |url| Box::pin(async move { fix_path_in_port(url) }), |url| Box::pin(async move { fix_gitlab_scheme(url).await }), |url| Box::pin(async move { fix_github_scheme(url) }), |url| Box::pin(async move { fix_salsa_cgit_url(url) }), |url| Box::pin(async move { fix_double_slash(url) }), |url| Box::pin(async move { fix_extra_colon(url) }), |url| Box::pin(async move { drop_git_username(url) }), |url| Box::pin(async move { fix_freedesktop_org_url(url) }), |url| Box::pin(async move { fix_kde_anongit_url(url) }), |url| Box::pin(async move { fix_git_gnome_org_url(url) }), ]; /// Fixup a Git URL pub async fn fixup_git_url(url: &str) -> String { let mut url = url.to_string(); for cb in URL_FIXERS { url = cb(&url).await.unwrap_or(url); } url } /// Convert a CVS URL to a Breezy URL. pub fn convert_cvs_list_to_str(urls: &[&str]) -> Option { if urls[0].starts_with(":extssh:") || urls[0].starts_with(":pserver:") { let url = breezyshim::location::cvs_to_url(urls[0]); Some(format!("{}#{}", url, urls[1])) } else { None } } type AsyncSanitizer = for<'a> fn( &'a str, ) -> std::pin::Pin< Box> + Send + 'a>, >; const SANITIZERS: &[AsyncSanitizer] = &[ |url| Box::pin(async move { drop_vcs_in_scheme(&url.parse().ok()?) }), |url| { Box::pin(async move { let location = VcsLocation::from_str(url).await.ok()?; Some(fixup_git_location(&location).await.url.clone()) }) }, |url| Box::pin(async move { fixup_rcp_style_git_repo_url(url) }), |url| { Box::pin(async move { find_public_repo_url(url.to_string().as_str(), None) .await .and_then(|u| u.parse().ok()) }) }, |url| Box::pin(async move { canonical_git_repo_url(&url.parse().ok()?, None).await }), |url| Box::pin(async move { find_secure_repo_url(url.parse().ok()?, None, Some(false)).await }), ]; /// Sanitize a VCS URL pub async fn sanitize_url(url: &str) -> String { let mut url: Cow<'_, str> = Cow::Borrowed(url); for sanitizer in SANITIZERS { url = sanitizer(url.as_ref()) .await .map_or(url, |f| Cow::Owned(f.to_string())); } url.into_owned() } #[cfg(test)] mod tests { use super::fixup_git_url; async fn fixup_git_location(url: &str) -> String { let location = super::VcsLocation::from_str(url) .await .expect("Failed to parse URL in test"); super::fixup_git_location(&location).await.to_string() } #[test] fn test_plausible_url() { use super::plausible_url; assert!(!plausible_url("the")); assert!(!plausible_url("1")); assert!(plausible_url("git@foo:blah")); assert!(plausible_url("git+ssh://git@foo/blah")); assert!(plausible_url("https://foo/blah")); } #[tokio::test] async fn test_is_gitlab_site() { use super::is_gitlab_site; assert!(is_gitlab_site("gitlab.com", Some(false)).await); assert!(is_gitlab_site("gitlab.example.com", Some(false)).await); assert!(is_gitlab_site("salsa.debian.org", Some(false)).await); assert!(!is_gitlab_site("github.com", Some(false)).await); assert!(!is_gitlab_site("foo.example.com", Some(false)).await); } #[tokio::test] async fn test_canonicalize_github() { use super::canonical_git_repo_url; use url::Url; assert_eq!( Some( "https://github.com/jelmer/example.git" .parse::() .unwrap() ), canonical_git_repo_url( &"https://github.com/jelmer/example".parse::().unwrap(), Some(false) ) .await ); } #[tokio::test] async fn test_canonicalize_github_ssh() { use super::canonical_git_repo_url; use url::Url; assert_eq!( Some( "https://salsa.debian.org/jelmer/example.git" .parse::() .unwrap() ), canonical_git_repo_url( &"https://salsa.debian.org/jelmer/example" .parse::() .unwrap(), Some(false) ) .await ); assert_eq!( None, canonical_git_repo_url( &"https://salsa.debian.org/jelmer/example.git" .parse::() .unwrap(), Some(false) ) .await ); } #[tokio::test] async fn test_find_public_github() { use super::find_public_repo_url; assert_eq!( "https://github.com/jelmer/example", find_public_repo_url("ssh://git@github.com/jelmer/example", Some(false)) .await .unwrap() ); assert_eq!( Some("https://github.com/jelmer/example"), find_public_repo_url("https://github.com/jelmer/example", Some(false)) .await .as_deref() ); assert_eq!( "https://github.com/jelmer/example", find_public_repo_url("git@github.com:jelmer/example", Some(false)) .await .unwrap() .as_str() ); } #[tokio::test] async fn test_find_public_salsa() { use super::find_public_repo_url; assert_eq!( "https://salsa.debian.org/jelmer/example", find_public_repo_url("ssh://salsa.debian.org/jelmer/example", Some(false)) .await .unwrap() .as_str() ); assert_eq!( "https://salsa.debian.org/jelmer/example", find_public_repo_url("https://salsa.debian.org/jelmer/example", Some(false)) .await .unwrap() .as_str() ); } #[test] fn test_fixup_rcp_style() { use super::fixup_rcp_style_git_repo_url; use url::Url; assert_eq!( Some( "ssh://git@github.com/jelmer/example" .parse::() .unwrap() ), fixup_rcp_style_git_repo_url("git@github.com:jelmer/example") ); assert_eq!( Some("ssh://github.com/jelmer/example".parse::().unwrap()), fixup_rcp_style_git_repo_url("github.com:jelmer/example") ); } #[test] fn test_fixup_rcp_leave() { use super::fixup_rcp_style_git_repo_url; assert_eq!( None, fixup_rcp_style_git_repo_url("https://salsa.debian.org/jelmer/example") ); assert_eq!( None, fixup_rcp_style_git_repo_url("ssh://git@salsa.debian.org/jelmer/example") ); } #[tokio::test] async fn test_guess_repo_url_travis_ci_org() { use super::guess_repo_from_url; assert_eq!( Some("https://github.com/jelmer/dulwich"), guess_repo_from_url( &"https://travis-ci.org/jelmer/dulwich".parse().unwrap(), Some(false) ) .await .as_deref(), ); } #[tokio::test] async fn test_guess_repo_url_coveralls() { use super::guess_repo_from_url; assert_eq!( Some("https://github.com/jelmer/dulwich"), guess_repo_from_url( &"https://coveralls.io/r/jelmer/dulwich".parse().unwrap(), Some(false) ) .await .as_deref(), ); } #[tokio::test] async fn test_guess_repo_url_gitlab() { use super::guess_repo_from_url; assert_eq!( Some("https://gitlab.com/jelmer/dulwich"), guess_repo_from_url( &"https://gitlab.com/jelmer/dulwich".parse().unwrap(), Some(false) ) .await .as_deref(), ); assert_eq!( Some("https://gitlab.com/jelmer/dulwich"), guess_repo_from_url( &"https://gitlab.com/jelmer/dulwich/tags".parse().unwrap(), Some(false) ) .await .as_deref(), ); } #[tokio::test] async fn test_fixup_git_location() { use super::{fixup_git_location, VcsLocation}; assert_eq!( VcsLocation { url: "https://github.com/jelmer/dulwich".parse().unwrap(), branch: None, subpath: None, }, fixup_git_location(&VcsLocation { url: "https://github.com/jelmer/dulwich".parse().unwrap(), branch: None, subpath: None, }) .await .into_owned() ); } #[tokio::test] async fn test_browse_url_from_repo() { use super::browse_url_from_repo_url; assert_eq!( Some("https://github.com/jelmer/dulwich".parse().unwrap()), browse_url_from_repo_url( &super::VcsLocation { url: "https://github.com/jelmer/dulwich".parse().unwrap(), branch: None, subpath: None, }, Some(false) ) .await ); assert_eq!( Some("https://github.com/jelmer/dulwich".parse().unwrap()), browse_url_from_repo_url( &super::VcsLocation { url: "https://github.com/jelmer/dulwich.git".parse().unwrap(), branch: None, subpath: None, }, Some(false) ) .await ); assert_eq!( Some( "https://github.com/jelmer/dulwich/tree/foo" .parse() .unwrap() ), browse_url_from_repo_url( &super::VcsLocation { url: "https://github.com/jelmer/dulwich.git".parse().unwrap(), branch: Some("foo".to_string()), subpath: None, }, Some(false) ) .await ); assert_eq!( Some( "https://github.com/jelmer/dulwich/tree/HEAD/foo" .parse() .unwrap() ), browse_url_from_repo_url( &super::VcsLocation { url: "https://github.com/jelmer/dulwich.git".parse().unwrap(), branch: None, subpath: Some("foo".to_string()), }, Some(false) ) .await ); } #[test] fn test_fix_github_scheme() { use super::fix_github_scheme; assert_eq!( Some("https://github.com/jelmer/example"), fix_github_scheme("git://github.com/jelmer/example").as_deref() ); } #[test] fn test_fix_git_gnome_org_url() { use super::fix_git_gnome_org_url; assert_eq!( Some("https://gitlab.gnome.org/GNOME/example".to_string()), fix_git_gnome_org_url("https://git.gnome.org/browse/example") ); } #[tokio::test] async fn test_fixup() { assert_eq!( "https://github.com/jelmer/dulwich", fixup_git_url("https://github.com:jelmer/dulwich").await, ); assert_eq!( "https://github.com/jelmer/dulwich -b blah", fixup_git_location("https://github.com:jelmer/dulwich -b blah").await ); assert_eq!( "https://github.com/jelmer/dulwich", fixup_git_url("git://github.com/jelmer/dulwich").await, ); } #[tokio::test] async fn test_preserves() { assert_eq!( "https://github.com/jelmer/dulwich", fixup_git_url("https://github.com/jelmer/dulwich").await ); } #[tokio::test] async fn test_salsa_not_https() { assert_eq!( "https://salsa.debian.org/jelmer/dulwich", fixup_git_url("git://salsa.debian.org/jelmer/dulwich").await ); } #[tokio::test] async fn test_salsa_uses_cgit() { assert_eq!( "https://salsa.debian.org/jelmer/dulwich", fixup_git_url("https://salsa.debian.org/cgit/jelmer/dulwich").await ); } #[tokio::test] async fn test_salsa_tree_branch() { assert_eq!( "https://salsa.debian.org/jelmer/dulwich -b master", fixup_git_location("https://salsa.debian.org/jelmer/dulwich/tree/master").await ); } #[tokio::test] async fn test_strip_extra_slash() { assert_eq!( "https://salsa.debian.org/salve/auctex.git", fixup_git_url("https://salsa.debian.org//salve/auctex.git").await ); } #[tokio::test] async fn test_strip_extra_colon() { assert_eq!( "https://salsa.debian.org/mckinstry/lcov.git", fixup_git_url("https://salsa.debian.org:/mckinstry/lcov.git").await ); } #[tokio::test] async fn test_strip_username() { assert_eq!( "https://github.com/RPi-Distro/pgzero.git", fixup_git_url("git://git@github.com:RPi-Distro/pgzero.git").await ); assert_eq!( "https://salsa.debian.org/debian-astro-team/pyavm.git", fixup_git_url("https://git@salsa.debian.org:debian-astro-team/pyavm.git").await ); } #[tokio::test] async fn test_github_tree_url() { assert_eq!( "https://github.com/blah/blah -b master", fixup_git_location("https://github.com/blah/blah/tree/master").await ); } #[tokio::test] async fn test_freedesktop() { assert_eq!( "https://gitlab.freedesktop.org/xorg/xserver", fixup_git_url("git://anongit.freedesktop.org/xorg/xserver").await ); assert_eq!( "https://gitlab.freedesktop.org/xorg/lib/libSM", fixup_git_url("git://anongit.freedesktop.org/git/xorg/lib/libSM").await ); } #[tokio::test] async fn test_anongit() { assert_eq!( "https://anongit.kde.org/kdev-php.git", fixup_git_url("git://anongit.kde.org/kdev-php.git").await ); } #[tokio::test] async fn test_gnome() { assert_eq!( "https://gitlab.gnome.org/GNOME/alacarte", fixup_git_url("https://git.gnome.org/browse/alacarte").await ); } #[tokio::test] async fn test_cvs_url_parsing_error() { // Test that CVS URLs that aren't valid HTTP URLs return an error instead of panicking // This was the bug reported in upstream-ontologist-cvs-array-panic.md let cvs_url = ":extssh:_anoncvs@anoncvs.example.org:/cvs"; let result = super::VcsLocation::from_str(cvs_url).await; assert!(result.is_err(), "CVS URL should return an error, not panic"); } #[tokio::test] async fn test_sanitize_url_with_cvs() { // Test that sanitize_url handles CVS URLs gracefully without panicking let cvs_url = ":extssh:_anoncvs@anoncvs.example.org:/cvs"; let result = super::sanitize_url(cvs_url).await; // The URL should be returned as-is since none of the sanitizers can process it assert_eq!(result, cvs_url); } #[tokio::test] async fn test_sanitize_url_with_invalid_url() { // Test that other invalid URLs are also handled gracefully let invalid_url = "not a valid url at all"; let result = super::sanitize_url(invalid_url).await; // Should not panic, and should return the original string assert_eq!(result, invalid_url); } } upstream-ontologist-0.3.6/src/vcs_command.rs000064400000000000000000000151721046102023000173250ustar 00000000000000use crate::vcs; use crate::GuesserSettings; use log::warn; fn parse_command_bytes(command: &[u8]) -> Option> { if command.ends_with(b"\\") { warn!( "Ignoring command with line break: {}", String::from_utf8_lossy(command) ); return None; } let command_str = match String::from_utf8(command.to_vec()) { Ok(s) => s, Err(_) => { warn!( "Ignoring command with non-UTF-8: {}", String::from_utf8_lossy(command) ); return None; } }; let args: Vec<_> = shlex::split(command_str.as_str())? .into_iter() .filter(|arg| !arg.trim().is_empty()) .collect(); if args.is_empty() { None } else { Some(args) } } /// Extract the upstream repository URL from a command line that looks like /// `git clone `, `fossil clone `, `cvs -d co ` or /// `svn co `. pub fn url_from_vcs_command(command: &[u8]) -> Option { if let Some(url) = url_from_git_clone_command(command) { return Some(url); } if let Some(url) = url_from_fossil_clone_command(command) { return Some(url); } if let Some(url) = url_from_cvs_co_command(command) { return Some(url); } if let Some(url) = url_from_svn_co_command(command) { return Some(url); } None } /// Extract the upstream repository URL from a command line that looks like /// `git clone `. pub fn url_from_git_clone_command(command: &[u8]) -> Option { let mut args = parse_command_bytes(command)?; if args.remove(0) != "git" || args.remove(0) != "clone" { return None; } let mut i = 0; while i < args.len() { if !args[i].starts_with('-') { i += 1; continue; } if args[i].contains('=') { args.remove(i); continue; } // arguments that take a parameter if args[i] == "-b" || args[i] == "--depth" || args[i] == "--branch" { args.remove(i); args.remove(i); continue; } args.remove(i); } let url = args .get(2) .cloned() .unwrap_or_else(|| args.first().cloned().unwrap_or_default()); if vcs::plausible_url(&url) { Some(url) } else { None } } #[test] fn test_url_from_git_clone_command() { assert_eq!( url_from_git_clone_command(b"git clone https://github.com/foo/bar foo"), Some("https://github.com/foo/bar".to_string()) ); assert_eq!( Some("https://github.com/jelmer/blah".to_string()), url_from_git_clone_command(b"git clone https://github.com/jelmer/blah"), ); assert_eq!( Some("https://github.com/jelmer/blah".to_string()), url_from_git_clone_command(b"git clone https://github.com/jelmer/blah target"), ); assert_eq!( Some("https://github.com/jelmer/blah".to_string()), url_from_git_clone_command(b"git clone -b foo https://github.com/jelmer/blah target"), ); assert_eq!(None, url_from_git_clone_command(b"git ls-tree")); } /// Get the upstream source from a command line that looks like /// `fossil clone `. pub fn url_from_fossil_clone_command(command: &[u8]) -> Option { let mut args = parse_command_bytes(command)?; if args.remove(0) != "fossil" || args.remove(0) != "clone" { return None; } let mut i = 0; while i < args.len() { if !args[i].starts_with('-') { i += 1; continue; } if args[i].contains('=') { args.remove(i); continue; } args.remove(i); } let url = args .get(2) .cloned() .unwrap_or_else(|| args.first().cloned().unwrap_or_default()); if vcs::plausible_url(&url) { Some(url) } else { None } } #[test] fn test_url_from_fossil_clone_command() { assert_eq!( Some("https://example.com/repo/blah".to_string()), url_from_fossil_clone_command(b"fossil clone https://example.com/repo/blah blah.fossil"), ); } /// Get the upstream source from a command line that looks like /// `cvs -d co `. pub fn url_from_cvs_co_command(command: &[u8]) -> Option { let mut args = parse_command_bytes(command)?; let i = 0; let mut cvsroot = None; let mut module = None; let mut command_seen = false; if args.remove(0) != "cvs" { return None; } while i < args.len() { if args[i] == "-d" { args.remove(i); cvsroot = Some(args.remove(i)); continue; } if args[i].starts_with("-d") { cvsroot = Some(args.remove(i)[2..].to_string()); continue; } if command_seen && !args[i].starts_with('-') { module = Some(args[i].clone()); } else if args[i] == "co" || args[i] == "checkout" { command_seen = true; } args.remove(i); } if let Some(cvsroot) = cvsroot { let url = breezyshim::location::cvs_to_url(&cvsroot); if let Some(module) = module { return Some(url.join(module.as_str()).unwrap().to_string()); } return Some(url.to_string()); } None } /// Get the upstream source from a command line that looks like /// `svn co `. pub fn url_from_svn_co_command(command: &[u8]) -> Option { let args = parse_command_bytes(command)?; if args[0] != "svn" || args[1] != "co" { return None; } let url_schemes = ["svn+ssh", "http", "https", "svn"]; args.into_iter().find(|arg| { url_schemes .iter() .any(|scheme| arg.starts_with(&format!("{}://", scheme))) }) } /// Guess upstream data from a Makefile or other file that contains a /// command to get the source code. pub fn guess_from_get_orig_source( path: &std::path::Path, _settings: &GuesserSettings, ) -> Result, crate::ProviderError> { let text = std::fs::read(path)?; let mut result = Vec::new(); for line in text.split(|b| *b == b'\n') { if let Some(url) = url_from_vcs_command(line) { let certainty = if url.contains('$') { crate::Certainty::Possible } else { crate::Certainty::Likely }; result.push(crate::UpstreamDatumWithMetadata { datum: crate::UpstreamDatum::Repository(url), certainty: Some(certainty), origin: Some(path.into()), }); } } Ok(result) } upstream-ontologist-0.3.6/testdata/cabal/blah.cabal000064400000000000000000000010361046102023000204360ustar 00000000000000Name: TestPackage Version: 0.0 Cabal-Version: >= 1.2 License: BSD3 Author: Angela Author Synopsis: Small package with two programs Build-Type: Simple Bug-Reports: https://github.com/example/blah/issues Executable program1 Build-Depends: HUnit Main-Is: Main.hs Hs-Source-Dirs: prog1 Executable program2 Main-Is: Main.hs Build-Depends: HUnit Hs-Source-Dirs: prog2 Other-Modules: Utils source-repository head type: git location: https://github.com/example/blah upstream-ontologist-0.3.6/testdata/cabal/debian/control000064400000000000000000000004211046102023000213660ustar 00000000000000Source: haskell-blah Section: devel Priority: optional Standards-Version: 4.2.0 Maintainer: Some Maintainer Build-Depends: debhelper (>= 11~) Package: libghc6-haskell-blah Architecture: all Depends: ${misc:Depends} Description: blah blah blah blah upstream-ontologist-0.3.6/testdata/cabal/expected.yaml000064400000000000000000000006031046102023000212300ustar 00000000000000Author: - !Person name: Angela Author Bug-Database: https://github.com/example/blah/issues Bug-Submit: https://github.com/example/blah/issues/new Homepage: https://github.com/example/blah Name: TestPackage Repository: https://github.com/example/blah.git Repository-Browse: https://github.com/example/blah Summary: Small package with two programs Description: blah blah License: BSD3 upstream-ontologist-0.3.6/testdata/composer.json/composer.json000064400000000000000000000034241046102023000230260ustar 00000000000000{ "name": "laravel/laravel", "type": "project", "description": "The Laravel Framework.", "keywords": ["framework", "laravel"], "license": "MIT", "require": { "php": "^8.1", "guzzlehttp/guzzle": "^7.2", "laravel/framework": "^10.0", "laravel/sanctum": "^3.2", "laravel/tinker": "^2.8" }, "require-dev": { "fakerphp/faker": "^1.9.1", "laravel/pint": "^1.0", "laravel/sail": "^1.18", "mockery/mockery": "^1.4.4", "nunomaduro/collision": "^7.0", "phpunit/phpunit": "^10.0", "spatie/laravel-ignition": "^2.0" }, "autoload": { "psr-4": { "App\\": "app/", "Database\\Factories\\": "database/factories/", "Database\\Seeders\\": "database/seeders/" } }, "autoload-dev": { "psr-4": { "Tests\\": "tests/" } }, "scripts": { "post-autoload-dump": [ "Illuminate\\Foundation\\ComposerScripts::postAutoloadDump", "@php artisan package:discover --ansi" ], "post-update-cmd": [ "@php artisan vendor:publish --tag=laravel-assets --ansi --force" ], "post-root-package-install": [ "@php -r \"file_exists('.env') || copy('.env.example', '.env');\"" ], "post-create-project-cmd": [ "@php artisan key:generate --ansi" ] }, "extra": { "laravel": { "dont-discover": [] } }, "config": { "optimize-autoloader": true, "preferred-install": "dist", "sort-packages": true, "allow-plugins": { "pestphp/pest-plugin": true } }, "minimum-stability": "stable", "prefer-stable": true } upstream-ontologist-0.3.6/testdata/composer.json/expected.yaml000064400000000000000000000001451046102023000227660ustar 00000000000000Name: laravel/laravel Keywords: ['framework', 'laravel'] Summary: The Laravel Framework License: MIT upstream-ontologist-0.3.6/testdata/configure/configure000075500000000000000000000006451046102023000213770ustar 00000000000000#! /bin/sh # Guess values for system-dependent variables and create Makefiles. # Generated by GNU Autoconf 2.69 for GNU Autoconf 2.69. # # Report bugs to . # # # Identity of this package. PACKAGE_NAME='GNU Autoconf' PACKAGE_TARNAME='autoconf' PACKAGE_VERSION='2.69' PACKAGE_STRING='GNU Autoconf 2.69' PACKAGE_BUGREPORT='bug-autoconf@gnu.org' PACKAGE_URL='http://www.gnu.org/software/autoconf/' ... upstream-ontologist-0.3.6/testdata/configure/expected.yaml000064400000000000000000000002121046102023000221430ustar 00000000000000{"Bug-Submit": "bug-autoconf@gnu.org", "Homepage": "http://www.gnu.org/software/autoconf/", "Name": "GNU Autoconf", "Version": "2.69"} upstream-ontologist-0.3.6/testdata/copyright-meta/debian/copyright000064400000000000000000000023051046102023000236130ustar 00000000000000Format: https://www.debian.org/doc/packaging-manuals/copyright-format/1.0/ Upstream-Name: Blah Upstream-Contact: An upstream contact Source: https://www.some-homepage/ X-Source: https://www.another-homepage/ X-Source-Downloaded-From: https://github.com/example/example/releases X-Upstream-Bugs: https://github.com/example/example/issues Files-Excluded: doc/manual.pdf Files: * Copyright: 2018 Somebody License: GPL-2+ License: GPL-2+ This package is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. . This package is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. . You should have received a copy of the GNU General Public License along with this package. If not, see . . On Debian systems, the complete text of the GNU General Public License version 2 can be found in "/usr/share/common-licenses/GPL-2". upstream-ontologist-0.3.6/testdata/copyright-meta/expected.yaml000064400000000000000000000007021046102023000231220ustar 00000000000000{"Bug-Database": "https://github.com/example/example/issues", "Bug-Submit": "https://github.com/example/example/issues/new", "Contact": "An upstream contact ", "Download": "https://github.com/example/example/releases", "Homepage": "https://github.com/example/example", "Name": "Blah", "Repository": "https://github.com/example/example.git", "Repository-Browse": "https://github.com/example/example", "License": "GPL-2+"} upstream-ontologist-0.3.6/testdata/copyright-more-on-line/debian/copyright000064400000000000000000000020271046102023000251670ustar 00000000000000Format: https://www.debian.org/doc/packaging-manuals/copyright-format/1.0/ Upstream-Name: Blah Upstream-Contact: An upstream contact Source: https://github.com/example/blah, modified to do blah. Files: * Copyright: 2018 Somebody License: GPL-2+ License: GPL-2+ This package is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. . This package is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. . You should have received a copy of the GNU General Public License along with this package. If not, see . . On Debian systems, the complete text of the GNU General Public License version 2 can be found in "/usr/share/common-licenses/GPL-2". upstream-ontologist-0.3.6/testdata/copyright-more-on-line/expected.yaml000064400000000000000000000005671046102023000245060ustar 00000000000000{"Bug-Database": "https://github.com/example/blah/issues", "Bug-Submit": "https://github.com/example/blah/issues/new", "Contact": "An upstream contact ", "Homepage": "https://github.com/example/blah", "Name": "Blah", "Repository": "https://github.com/example/blah.git", "Repository-Browse": "https://github.com/example/blah", "License": "GPL-2+"} upstream-ontologist-0.3.6/testdata/doap/blah.doap000064400000000000000000000070231046102023000202020ustar 00000000000000 blah blah 2006-06-11 blah is a swiss army knife for project maintainers and developers. Lorem ipsum. python Joe Maintainer 0.2.4 trunk Pacito 2007-05-20 Added RSS 2.0 feed generation from .doap release entries using Genshi or Cheetah templates. Added support for CHANGE_LOG_EMAIL_ADDRESS environment variable to blah changelog prepare. Added parsing of wiki attribute of a .DOAP project. Implemented "blah doap search" to search Google or Yahoo for your project's home page. Added support for multiple doap files to "blah doap" Added code to check the user's distribution and offer hints on how to install dependencies. 0.2.3 trunk Ketnet 2007-04-17 0.2.2 trunk Airlines 2007-03-25 0.2.1 trunk Ambulance 2007-02-04 0.2.0 trunk Waffle Flop 2006-12-17 upstream-ontologist-0.3.6/testdata/doap/expected.yaml000064400000000000000000000007241046102023000211150ustar 00000000000000Bug-Database: http://example.com/blah/trac/newticket Contact: Joe Maintainer Homepage: http://example.com/blah/trac/ Name: blah Repository: http://example.com/blah/svn/trunk/ Repository-Browse: http://example.com/blah/trac/browser/ Description: "\n Lorem ipsum.\n " Download: http://example.com/projects/blah/ Maintainer: !Person name: Joe Maintainer Summary: "\nblah is a swiss army knife for project maintainers and developers" Wiki: http://example.com/blah/trac/ upstream-ontologist-0.3.6/testdata/gemspec/expected.yaml000064400000000000000000000006751046102023000216220ustar 00000000000000Name: jekyll License: MIT Summary: A simple, blog aware, static site generator Description: Jekyll is a simple, blog aware, static site generator. Author: - !Person name: Tom Preston-Werner Homepage: http://github.com/jekyll/jekyll Repository: https://github.com/jekyll/jekyll.git Repository-Browse: https://github.com/jekyll/jekyll Bug-Database: https://github.com/jekyll/jekyll/issues Bug-Submit: https://github.com/jekyll/jekyll/issues/new upstream-ontologist-0.3.6/testdata/gemspec/jekyll.gemspec000064400000000000000000000051701046102023000217670ustar 00000000000000# coding: utf-8 lib = File.expand_path('../lib', __FILE__) $LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib) require 'jekyll/version' Gem::Specification.new do |s| s.specification_version = 2 if s.respond_to? :specification_version= s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version= s.rubygems_version = '2.2.2' s.required_ruby_version = '>= 1.9.3' s.name = 'jekyll' s.version = Jekyll::VERSION s.license = 'MIT' s.summary = "A simple, blog aware, static site generator." s.description = "Jekyll is a simple, blog aware, static site generator." s.authors = ["Tom Preston-Werner"] s.email = 'tom@mojombo.com' s.homepage = 'http://github.com/jekyll/jekyll' s.files = `git ls-files`.split($/) s.executables = s.files.grep(%r{^bin/}) { |f| File.basename(f) } s.test_files = s.files.grep(%r{^(test|spec|features)/}) s.require_paths = ["lib"] s.rdoc_options = ["--charset=UTF-8"] s.extra_rdoc_files = %w[README.markdown LICENSE] s.add_runtime_dependency('liquid', "~> 2.5.5") s.add_runtime_dependency('classifier', "~> 1.3") s.add_runtime_dependency('listen', "~> 2.5") s.add_runtime_dependency('kramdown', "~> 1.3") s.add_runtime_dependency('pygments.rb', "~> 0.5.0") s.add_runtime_dependency('mercenary', "~> 0.3.1") s.add_runtime_dependency('safe_yaml', "~> 1.0") s.add_runtime_dependency('colorator', "~> 0.1") s.add_runtime_dependency('redcarpet', "~> 3.1") s.add_runtime_dependency('toml', '~> 0.1.0') s.add_runtime_dependency('jekyll-coffeescript', '~> 1.0') s.add_runtime_dependency('jekyll-sass-converter', '~> 1.0') s.add_development_dependency('rake', "~> 10.1") s.add_development_dependency('rdoc', "~> 3.11") s.add_development_dependency('redgreen', "~> 1.2") s.add_development_dependency('shoulda', "~> 3.5") s.add_development_dependency('rr', "~> 1.1") s.add_development_dependency('cucumber', "1.3.11") s.add_development_dependency('RedCloth', "~> 4.2") s.add_development_dependency('maruku', "0.7.0") s.add_development_dependency('rdiscount', "~> 1.6") s.add_development_dependency('launchy', "~> 2.3") s.add_development_dependency('simplecov', "~> 0.7") s.add_development_dependency('simplecov-gem-adapter', "~> 1.0.1") s.add_development_dependency('coveralls', "~> 0.7.0") s.add_development_dependency('mime-types', "~> 1.5") s.add_development_dependency('activesupport', '~> 3.2.13') s.add_development_dependency('jekyll_test_plugin') s.add_development_dependency('jekyll_test_plugin_malicious') s.add_development_dependency('rouge', '~> 1.3') end upstream-ontologist-0.3.6/testdata/get-orig-source/debian/get-orig-source.sh000064400000000000000000000011471046102023000253110ustar 00000000000000#!/bin/sh set -eux repack_version="$1" version="${repack_version%+repack*}" tag="v$(echo "$version" | tr '~' '.')" tmpdir=$(mktemp -d -t exampl.get-orig-source.XXXXXX) orig_dir="exampl-${version}+repack.orig" git clone -b "$tag" --depth 1 https://example.com/scm/project.git "$tmpdir/${orig_dir}" rm -rf "$tmpdir"/*.orig/src/tls/ # free, but appears to be an unused code example from gnutls export TAR_OPTIONS='--owner root --group root --mode a+rX --format ustar' tar -cJ --wildcards --exclude '.git*' -C "$tmpdir/" "${orig_dir}" \ > "../exampl_${version}+repack.orig.tar.xz" rm -rf "$tmpdir" # vim:ts=4 sw=4 et upstream-ontologist-0.3.6/testdata/get-orig-source/expected.yaml000064400000000000000000000001211046102023000231740ustar 00000000000000{"Name": "get-orig-source", "Repository": "https://example.com/scm/project.git"} upstream-ontologist-0.3.6/testdata/go/debian/control000064400000000000000000000006321046102023000207350ustar 00000000000000Source: golang-github-blah-blah Section: devel Priority: optional Standards-Version: 4.2.0 Maintainer: Some Maintainer Build-Depends: debhelper (>= 11~), dh-golang, golang-any XS-Go-Import-Path: github.com/blah/blah Testsuite: autopkgtest-pkg-go Package: golang-github-blah-blah Architecture: all Depends: ${misc:Depends} Description: blah blah blah blah upstream-ontologist-0.3.6/testdata/go/expected.yaml000064400000000000000000000005721046102023000206000ustar 00000000000000{"Bug-Database": "https://github.com/blah/blah/issues", "Bug-Submit": "https://github.com/blah/blah/issues/new", "Homepage": "https://github.com/blah/blah", "Name": "blah", "Go-Import-Path": "github.com/blah/blah", "Repository": "https://github.com/blah/blah.git", "Repository-Browse": "https://github.com/blah/blah", "Description": "blah blah", "Summary": "blah blah"} upstream-ontologist-0.3.6/testdata/homepage/debian/control000064400000000000000000000005771046102023000221250ustar 00000000000000Source: golang-github-blah-blah Section: devel Priority: optional Standards-Version: 4.2.0 Maintainer: Some Maintainer Build-Depends: debhelper (>= 11~), dh-golang, golang-any Homepage: https://github.com/j-keck/arping Package: golang-github-blah-blah Architecture: all Depends: ${misc:Depends} Description: blah blah blah blah upstream-ontologist-0.3.6/testdata/homepage/expected.yaml000064400000000000000000000005451046102023000217600ustar 00000000000000{"Bug-Database": "https://github.com/j-keck/arping/issues", "Bug-Submit": "https://github.com/j-keck/arping/issues/new", "Homepage": "https://github.com/j-keck/arping", "Name": "arping", "Repository": "https://github.com/j-keck/arping.git", "Repository-Browse": "https://github.com/j-keck/arping", "Description": "blah blah", "Summary": "blah blah"} upstream-ontologist-0.3.6/testdata/js/expected.yaml000064400000000000000000000011311046102023000205770ustar 00000000000000Bug-Database: https://github.com/jackmoore/autosize/issues Bug-Submit: https://github.com/jackmoore/autosize/issues/new Homepage: http://www.jacklmoore.com/autosize Demo: http://www.jacklmoore.com/autosize Name: autosize Keywords: - textarea - form - ui Repository: https://github.com/jackmoore/autosize.git Repository-Browse: https://github.com/jackmoore/autosize Author: - !Person name: Jack Moore email: hello@jacklmoore.com url: http://www.jacklmoore.com License: MIT Summary: Autosize is a small, stand-alone script to automatically adjust textarea height to fit text Version: 4.0.2 upstream-ontologist-0.3.6/testdata/js/package.json000064400000000000000000000017061046102023000204100ustar 00000000000000{ "name": "autosize", "description": "Autosize is a small, stand-alone script to automatically adjust textarea height to fit text.", "version": "4.0.2", "keywords": [ "textarea", "form", "ui" ], "files": [ "dist", "src" ], "author": { "name": "Jack Moore", "url": "http://www.jacklmoore.com", "email": "hello@jacklmoore.com" }, "main": "dist/autosize.js", "license": "MIT", "homepage": "http://www.jacklmoore.com/autosize", "demo": "http://www.jacklmoore.com/autosize", "repository": { "type": "git", "url": "http://github.com/jackmoore/autosize.git" }, "dependencies": {}, "devDependencies": { "babel-core": "^6.26.0", "babel-plugin-add-module-exports": "^0.2.1", "babel-plugin-transform-es2015-modules-umd": "^6.24.1", "babel-preset-env": "^1.6.1", "gaze": "^1.1.2", "jshint": "^2.9.5", "uglify-js": "^3.3.16" }, "scripts": { "build": "node build" } } upstream-ontologist-0.3.6/testdata/js2/expected.yaml000064400000000000000000000011311046102023000206610ustar 00000000000000Bug-Database: https://github.com/jackmoore/autosize/issues Bug-Submit: https://github.com/jackmoore/autosize/issues/new Homepage: http://www.jacklmoore.com/autosize Demo: http://www.jacklmoore.com/autosize Name: autosize Repository: https://github.com/jackmoore/autosize.git Repository-Browse: https://github.com/jackmoore/autosize Keywords: - textarea - form - ui Author: - !Person name: Jack Moore email: hello@jacklmoore.com url: http://www.jacklmoore.com License: MIT Summary: Autosize is a small, stand-alone script to automatically adjust textarea height to fit text Version: 4.0.2 upstream-ontologist-0.3.6/testdata/js2/package.json000064400000000000000000000016421046102023000204710ustar 00000000000000{ "name": "autosize", "description": "Autosize is a small, stand-alone script to automatically adjust textarea height to fit text.", "version": "4.0.2", "keywords": [ "textarea", "form", "ui" ], "files": [ "dist", "src" ], "author": { "name": "Jack Moore", "url": "http://www.jacklmoore.com", "email": "hello@jacklmoore.com" }, "main": "dist/autosize.js", "license": "MIT", "homepage": "http://www.jacklmoore.com/autosize", "demo": "http://www.jacklmoore.com/autosize", "repository": "http://github.com/jackmoore/autosize.git", "dependencies": {}, "devDependencies": { "babel-core": "^6.26.0", "babel-plugin-add-module-exports": "^0.2.1", "babel-plugin-transform-es2015-modules-umd": "^6.24.1", "babel-preset-env": "^1.6.1", "gaze": "^1.1.2", "jshint": "^2.9.5", "uglify-js": "^3.3.16" }, "scripts": { "build": "node build" } } upstream-ontologist-0.3.6/testdata/js3/expected.yaml000064400000000000000000000011311046102023000206620ustar 00000000000000Bug-Database: https://github.com/jackmoore/autosize/issues Bug-Submit: https://github.com/jackmoore/autosize/issues/new Demo: http://www.jacklmoore.com/autosize Homepage: http://www.jacklmoore.com/autosize Name: autosize Repository: https://github.com/jackmoore/autosize.git Repository-Browse: https://github.com/jackmoore/autosize Keywords: - textarea - form - ui Author: - !Person name: Jack Moore email: hello@jacklmoore.com url: http://www.jacklmoore.com License: MIT Summary: Autosize is a small, stand-alone script to automatically adjust textarea height to fit text Version: 4.0.2 upstream-ontologist-0.3.6/testdata/js3/package.json000064400000000000000000000016201046102023000204660ustar 00000000000000{ "name": "autosize", "description": "Autosize is a small, stand-alone script to automatically adjust textarea height to fit text.", "version": "4.0.2", "keywords": [ "textarea", "form", "ui" ], "files": [ "dist", "src" ], "author": { "name": "Jack Moore", "url": "http://www.jacklmoore.com", "email": "hello@jacklmoore.com" }, "main": "dist/autosize.js", "license": "MIT", "homepage": "http://www.jacklmoore.com/autosize", "demo": "http://www.jacklmoore.com/autosize", "repository": "jackmoore/autosize.git", "dependencies": {}, "devDependencies": { "babel-core": "^6.26.0", "babel-plugin-add-module-exports": "^0.2.1", "babel-plugin-transform-es2015-modules-umd": "^6.24.1", "babel-preset-env": "^1.6.1", "gaze": "^1.1.2", "jshint": "^2.9.5", "uglify-js": "^3.3.16" }, "scripts": { "build": "node build" } } upstream-ontologist-0.3.6/testdata/line-interrupted/DESCRIPTION000064400000000000000000000010261046102023000225010ustar 00000000000000Package: tsne Type: Package Title: T-Distributed Stochastic Neighbor Embedding for R (t-SNE) Version: 0.1-3 Date: 2016-06-04 Author: Justin Donaldson Maintainer: Justin Donaldson Description: A "pure R" implementation of the t-SNE algorithm. License: GPL LazyLoad: yes NeedsCompilation: no URL: https://github.com/jdonaldson/rtsne/ BugReports: https://github.com/jdonaldson/rtsne/issues Packaged: 2016-07-15 15:40:42 UTC; jdonaldson Repository: CRAN Date/Publication: 2016-07-15 20:02:16 upstream-ontologist-0.3.6/testdata/line-interrupted/debian/upstream/metadata000064400000000000000000000000651046102023000255620ustar 00000000000000Registry: - Name: conda:conda-forge Entry: r-tsneupstream-ontologist-0.3.6/testdata/line-interrupted/expected.yaml000064400000000000000000000011031046102023000234540ustar 00000000000000Archive: CRAN Bug-Database: https://github.com/jdonaldson/rtsne/issues Bug-Submit: https://github.com/jdonaldson/rtsne/issues/new Contact: Justin Donaldson Homepage: https://github.com/jdonaldson/rtsne/ Name: tsne Repository: https://github.com/jdonaldson/rtsne.git Repository-Browse: https://github.com/jdonaldson/rtsne Description: A "pure R" implementation of the t-SNE algorithm. License: GPL Maintainer: !Person name: Justin Donaldson email: jdonaldson@gmail.com Summary: T-Distributed Stochastic Neighbor Embedding for R (t-SNE) Version: 0.1-3 upstream-ontologist-0.3.6/testdata/machine-copyright/debian/copyright000064400000000000000000000003061046102023000242700ustar 00000000000000Format: https://www.debian.org/doc/packaging-manuals/copyright-format/1.0/ Upstream-Name: autosize Upstream-Contact: Jelmer Vernooij Source: https://salsa.debian.org/jelmer/blah upstream-ontologist-0.3.6/testdata/machine-copyright/expected.yaml000064400000000000000000000012061046102023000236000ustar 00000000000000Author: - !Person name: Jack Moore email: hello@jacklmoore.com url: http://www.jacklmoore.com Bug-Database: https://github.com/jackmoore/autosize/issues Bug-Submit: https://github.com/jackmoore/autosize/issues/new Contact: Jelmer Vernooij Demo: http://www.jacklmoore.com/autosize Homepage: http://www.jacklmoore.com/autosize Keywords: - textarea - form - ui License: MIT Name: autosize Repository: https://github.com/jackmoore/autosize.git Repository-Browse: https://github.com/jackmoore/autosize Summary: Autosize is a small, stand-alone script to automatically adjust textarea height to fit text Version: 4.0.2 upstream-ontologist-0.3.6/testdata/machine-copyright/package.json000064400000000000000000000017061046102023000234060ustar 00000000000000{ "name": "autosize", "description": "Autosize is a small, stand-alone script to automatically adjust textarea height to fit text.", "version": "4.0.2", "keywords": [ "textarea", "form", "ui" ], "files": [ "dist", "src" ], "author": { "name": "Jack Moore", "url": "http://www.jacklmoore.com", "email": "hello@jacklmoore.com" }, "main": "dist/autosize.js", "license": "MIT", "homepage": "http://www.jacklmoore.com/autosize", "demo": "http://www.jacklmoore.com/autosize", "repository": { "type": "git", "url": "http://github.com/jackmoore/autosize.git" }, "dependencies": {}, "devDependencies": { "babel-core": "^6.26.0", "babel-plugin-add-module-exports": "^0.2.1", "babel-plugin-transform-es2015-modules-umd": "^6.24.1", "babel-preset-env": "^1.6.1", "gaze": "^1.1.2", "jshint": "^2.9.5", "uglify-js": "^3.3.16" }, "scripts": { "build": "node build" } } upstream-ontologist-0.3.6/testdata/meta.json/META.json000064400000000000000000000301451046102023000210240ustar 00000000000000{ "abstract" : "parse and validate simple name/value option pairs", "author" : [ "Somebody " ], "dynamic_config" : 0, "license" : [ "perl_5" ], "meta-spec" : { "url" : "http://search.cpan.org/perldoc?CPAN::Meta::Spec", "version" : 2 }, "name" : "Some-Blah", "prereqs" : { "configure" : { "requires" : { "ExtUtils::MakeMaker" : "0" }, "suggests" : { "JSON::PP" : "2.27300" } }, "develop" : { "requires" : { "Test::More" : "0", "Test::Pod" : "1.41" } }, "runtime" : { "requires" : { "List::Util" : "0", "Params::Util" : "0", "Sub::Install" : "0.921", "strict" : "0", "warnings" : "0" } }, "test" : { "recommends" : { "CPAN::Meta" : "2.120900" }, "requires" : { "ExtUtils::MakeMaker" : "0", "File::Spec" : "0", "Test::More" : "0.96" } } }, "release_status" : "stable", "resources" : { "bugtracker" : { "web" : "https://github.com/blah/Blie/issues" }, "homepage" : "https://github.com/blah/Blie", "repository" : { "type" : "git", "url" : "https://github.com/blah/Blie.git", "web" : "https://github.com/blah/Blie" } }, "version" : "0.110", "x_Dist_Zilla" : { "perl" : { "version" : "5.023008" }, "plugins" : [ { "class" : "Dist::Zilla::Plugin::Git::GatherDir", "config" : { "Dist::Zilla::Plugin::GatherDir" : { "exclude_filename" : [], "exclude_match" : [], "follow_symlinks" : 0, "include_dotfiles" : 0, "prefix" : "", "prune_directory" : [], "root" : "." }, "Dist::Zilla::Plugin::Git::GatherDir" : { "include_untracked" : 0 } }, "name" : "@RJBS/Git::GatherDir", "version" : "2.036" }, { "class" : "Dist::Zilla::Plugin::CheckPrereqsIndexed", "name" : "@RJBS/CheckPrereqsIndexed", "version" : "0.017" }, { "class" : "Dist::Zilla::Plugin::CheckExtraTests", "name" : "@RJBS/CheckExtraTests", "version" : "0.028" }, { "class" : "Dist::Zilla::Plugin::PromptIfStale", "config" : { "Dist::Zilla::Plugin::PromptIfStale" : { "check_all_plugins" : 0, "check_all_prereqs" : 0, "modules" : [ "Dist::Zilla::PluginBundle::RJBS" ], "phase" : "build", "skip" : [] } }, "name" : "@RJBS/RJBS-Outdated", "version" : "0.047" }, { "class" : "Dist::Zilla::Plugin::PromptIfStale", "config" : { "Dist::Zilla::Plugin::PromptIfStale" : { "check_all_plugins" : 1, "check_all_prereqs" : 0, "modules" : [], "phase" : "release", "skip" : [] } }, "name" : "@RJBS/CPAN-Outdated", "version" : "0.047" }, { "class" : "Dist::Zilla::Plugin::PruneCruft", "name" : "@RJBS/@Filter/PruneCruft", "version" : "5.043" }, { "class" : "Dist::Zilla::Plugin::ManifestSkip", "name" : "@RJBS/@Filter/ManifestSkip", "version" : "5.043" }, { "class" : "Dist::Zilla::Plugin::MetaYAML", "name" : "@RJBS/@Filter/MetaYAML", "version" : "5.043" }, { "class" : "Dist::Zilla::Plugin::License", "name" : "@RJBS/@Filter/License", "version" : "5.043" }, { "class" : "Dist::Zilla::Plugin::Readme", "name" : "@RJBS/@Filter/Readme", "version" : "5.043" }, { "class" : "Dist::Zilla::Plugin::ExecDir", "name" : "@RJBS/@Filter/ExecDir", "version" : "5.043" }, { "class" : "Dist::Zilla::Plugin::ShareDir", "name" : "@RJBS/@Filter/ShareDir", "version" : "5.043" }, { "class" : "Dist::Zilla::Plugin::Manifest", "name" : "@RJBS/@Filter/Manifest", "version" : "5.043" }, { "class" : "Dist::Zilla::Plugin::TestRelease", "name" : "@RJBS/@Filter/TestRelease", "version" : "5.043" }, { "class" : "Dist::Zilla::Plugin::ConfirmRelease", "name" : "@RJBS/@Filter/ConfirmRelease", "version" : "5.043" }, { "class" : "Dist::Zilla::Plugin::UploadToCPAN", "name" : "@RJBS/@Filter/UploadToCPAN", "version" : "5.043" }, { "class" : "Dist::Zilla::Plugin::MakeMaker", "config" : { "Dist::Zilla::Role::TestRunner" : { "default_jobs" : 9 } }, "name" : "@RJBS/MakeMaker", "version" : "5.043" }, { "class" : "Dist::Zilla::Plugin::AutoPrereqs", "name" : "@RJBS/AutoPrereqs", "version" : "5.043" }, { "class" : "Dist::Zilla::Plugin::Git::NextVersion", "config" : { "Dist::Zilla::Plugin::Git::NextVersion" : { "first_version" : "0.001", "version_by_branch" : 0, "version_regexp" : "(?^:^([0-9]+\\.[0-9]+)$)" }, "Dist::Zilla::Role::Git::Repo" : { "repo_root" : "." } }, "name" : "@RJBS/Git::NextVersion", "version" : "2.036" }, { "class" : "Dist::Zilla::Plugin::PkgVersion", "name" : "@RJBS/PkgVersion", "version" : "5.043" }, { "class" : "Dist::Zilla::Plugin::MetaConfig", "name" : "@RJBS/MetaConfig", "version" : "5.043" }, { "class" : "Dist::Zilla::Plugin::MetaJSON", "name" : "@RJBS/MetaJSON", "version" : "5.043" }, { "class" : "Dist::Zilla::Plugin::NextRelease", "name" : "@RJBS/NextRelease", "version" : "5.043" }, { "class" : "Dist::Zilla::Plugin::Test::ChangesHasContent", "name" : "@RJBS/Test::ChangesHasContent", "version" : "0.008" }, { "class" : "Dist::Zilla::Plugin::PodSyntaxTests", "name" : "@RJBS/PodSyntaxTests", "version" : "5.043" }, { "class" : "Dist::Zilla::Plugin::Test::ReportPrereqs", "name" : "@RJBS/Test::ReportPrereqs", "version" : "0.024" }, { "class" : "Dist::Zilla::Plugin::Prereqs", "config" : { "Dist::Zilla::Plugin::Prereqs" : { "phase" : "test", "type" : "requires" } }, "name" : "@RJBS/TestMoreWithSubtests", "version" : "5.043" }, { "class" : "Dist::Zilla::Plugin::PodWeaver", "config" : { "Dist::Zilla::Plugin::PodWeaver" : { "config_plugins" : [ "@RJBS" ], "finder" : [ ":InstallModules", ":ExecFiles" ], "plugins" : [ { "class" : "Pod::Weaver::Plugin::EnsurePod5", "name" : "@CorePrep/EnsurePod5", "version" : "4.012" } ] } }, "name" : "@RJBS/PodWeaver", "version" : "4.006" }, { "class" : "Dist::Zilla::Plugin::GithubMeta", "name" : "@RJBS/GithubMeta", "version" : "0.54" }, { "class" : "Dist::Zilla::Plugin::Git::Check", "config" : { "Dist::Zilla::Plugin::Git::Check" : { "untracked_files" : "die" }, "Dist::Zilla::Role::Git::DirtyFiles" : { "allow_dirty" : [ "Changes", "dist.ini" ], "allow_dirty_match" : [], "changelog" : "Changes" }, "Dist::Zilla::Role::Git::Repo" : { "repo_root" : "." } }, "name" : "@RJBS/@Git/Check", "version" : "2.036" }, { "class" : "Dist::Zilla::Plugin::Git::Commit", "config" : { "Dist::Zilla::Plugin::Git::Commit" : { "add_files_in" : [], "commit_msg" : "v%v%n%n%c" }, "Dist::Zilla::Role::Git::DirtyFiles" : { "allow_dirty" : [ "Changes", "dist.ini" ], "allow_dirty_match" : [], "changelog" : "Changes" }, "Dist::Zilla::Role::Git::Repo" : { "repo_root" : "." }, "Dist::Zilla::Role::Git::StringFormatter" : { "time_zone" : "local" } }, "name" : "@RJBS/@Git/Commit", "version" : "2.036" }, { "class" : "Dist::Zilla::Plugin::Git::Tag", "config" : { "Dist::Zilla::Plugin::Git::Tag" : { "branch" : null, "changelog" : "Changes", "signed" : 0, "tag" : "0.110", "tag_format" : "%v", "tag_message" : "v%v" }, "Dist::Zilla::Role::Git::Repo" : { "repo_root" : "." }, "Dist::Zilla::Role::Git::StringFormatter" : { "time_zone" : "local" } }, "name" : "@RJBS/@Git/Tag", "version" : "2.036" }, { "class" : "Dist::Zilla::Plugin::Git::Push", "config" : { "Dist::Zilla::Plugin::Git::Push" : { "push_to" : [ "origin :", "github :" ], "remotes_must_exist" : 0 }, "Dist::Zilla::Role::Git::Repo" : { "repo_root" : "." } }, "name" : "@RJBS/@Git/Push", "version" : "2.036" }, { "class" : "Dist::Zilla::Plugin::Git::Contributors", "config" : { "Dist::Zilla::Plugin::Git::Contributors" : { "include_authors" : 0, "include_releaser" : 1, "order_by" : "name", "paths" : [ "." ] } }, "name" : "@RJBS/Git::Contributors", "version" : "0.020" }, { "class" : "Dist::Zilla::Plugin::FinderCode", "name" : ":NoFiles", "version" : "5.043" } ], "zilla" : { "class" : "Dist::Zilla::Dist::Builder", "config" : { "is_trial" : "0" }, "version" : "5.043" } }, "x_contributors" : [ "Contributor 1 ", "Contributor 2 " ] } upstream-ontologist-0.3.6/testdata/meta.json/expected.yaml000064400000000000000000000005641046102023000220720ustar 00000000000000{"Bug-Database": "https://github.com/blah/Blie/issues", "Bug-Submit": "https://github.com/blah/Blie/issues/new", "Homepage": "https://github.com/blah/Blie", "Name": "Some-Blah", "Repository": "https://github.com/blah/Blie.git", "Repository-Browse": "https://github.com/blah/Blie", "Summary": "parse and validate simple name/value option pairs", "Version": "0.110"} upstream-ontologist-0.3.6/testdata/meta.yml/META.yml000064400000000000000000000122001046102023000204740ustar 00000000000000--- abstract: 'do things' author: - 'Somebody ' build_requires: ExtUtils::MakeMaker: '0' File::Spec: '0' Test::More: '0.96' configure_requires: ExtUtils::MakeMaker: '0' dynamic_config: 0 generated_by: 'Dist::Zilla version 5.043, CPAN::Meta::Converter version 2.150005' license: perl meta-spec: url: http://module-build.sourceforge.net/META-spec-v1.4.html version: '1.4' name: Blah-Blieh requires: List::Util: '0' Params::Util: '0' Sub::Install: '0.921' strict: '0' warnings: '0' resources: bugtracker: https://github.com/example/Blah/issues homepage: https://github.com/example/Blah repository: https://github.com/example/Blah.git version: '0.110' x_Dist_Zilla: perl: version: '5.023008' plugins: - class: Dist::Zilla::Plugin::Git::GatherDir config: Dist::Zilla::Plugin::GatherDir: exclude_filename: [] exclude_match: [] follow_symlinks: 0 include_dotfiles: 0 prefix: '' prune_directory: [] root: . Dist::Zilla::Plugin::Git::GatherDir: include_untracked: 0 name: '@RJBS/Git::GatherDir' version: '2.036' - class: Dist::Zilla::Plugin::PromptIfStale config: Dist::Zilla::Plugin::PromptIfStale: check_all_plugins: 1 check_all_prereqs: 0 modules: [] phase: release skip: [] name: '@RJBS/CPAN-Outdated' version: '0.047' - class: Dist::Zilla::Plugin::MakeMaker config: Dist::Zilla::Role::TestRunner: default_jobs: 9 name: '@RJBS/MakeMaker' version: '5.043' - class: Dist::Zilla::Plugin::AutoPrereqs name: '@RJBS/AutoPrereqs' version: '5.043' - class: Dist::Zilla::Plugin::Prereqs config: Dist::Zilla::Plugin::Prereqs: phase: test type: requires name: '@RJBS/TestMoreWithSubtests' version: '5.043' - class: Dist::Zilla::Plugin::PodWeaver config: Dist::Zilla::Plugin::PodWeaver: config_plugins: - '@RJBS' finder: - ':InstallModules' - ':ExecFiles' plugins: - class: Pod::Weaver::Section::Legal name: '@RJBS/Legal' version: '4.012' - class: Pod::Weaver::Plugin::Transformer name: '@RJBS/List' version: '4.012' name: '@RJBS/PodWeaver' version: '4.006' - class: Dist::Zilla::Plugin::GithubMeta name: '@RJBS/GithubMeta' version: '0.54' - class: Dist::Zilla::Plugin::Git::Check config: Dist::Zilla::Plugin::Git::Check: untracked_files: die Dist::Zilla::Role::Git::DirtyFiles: allow_dirty: - Changes - dist.ini allow_dirty_match: [] changelog: Changes Dist::Zilla::Role::Git::Repo: repo_root: . name: '@RJBS/@Git/Check' version: '2.036' - class: Dist::Zilla::Plugin::Git::Commit config: Dist::Zilla::Plugin::Git::Commit: add_files_in: [] commit_msg: v%v%n%n%c Dist::Zilla::Role::Git::DirtyFiles: allow_dirty: - Changes - dist.ini allow_dirty_match: [] changelog: Changes Dist::Zilla::Role::Git::Repo: repo_root: . Dist::Zilla::Role::Git::StringFormatter: time_zone: local name: '@RJBS/@Git/Commit' version: '2.036' - class: Dist::Zilla::Plugin::Git::Tag config: Dist::Zilla::Plugin::Git::Tag: branch: ~ changelog: Changes signed: 0 tag: '0.110' tag_format: '%v' tag_message: v%v Dist::Zilla::Role::Git::Repo: repo_root: . Dist::Zilla::Role::Git::StringFormatter: time_zone: local name: '@RJBS/@Git/Tag' version: '2.036' - class: Dist::Zilla::Plugin::Git::Push config: Dist::Zilla::Plugin::Git::Push: push_to: - 'origin :' - 'github :' remotes_must_exist: 0 Dist::Zilla::Role::Git::Repo: repo_root: . name: '@RJBS/@Git/Push' version: '2.036' - class: Dist::Zilla::Plugin::Git::Contributors config: Dist::Zilla::Plugin::Git::Contributors: include_authors: 0 include_releaser: 1 order_by: name paths: - . name: '@RJBS/Git::Contributors' version: '0.020' - class: Dist::Zilla::Plugin::FinderCode name: ':ShareFiles' version: '5.043' - class: Dist::Zilla::Plugin::FinderCode name: ':MainModule' version: '5.043' - class: Dist::Zilla::Plugin::FinderCode name: ':AllFiles' version: '5.043' - class: Dist::Zilla::Plugin::FinderCode name: ':NoFiles' version: '5.043' zilla: class: Dist::Zilla::Dist::Builder config: is_trial: '0' version: '5.043' x_contributors: - 'Contributor 1 ' - 'Contributor 2 ' upstream-ontologist-0.3.6/testdata/meta.yml/expected.yaml000064400000000000000000000005271046102023000217210ustar 00000000000000{"Bug-Database": "https://github.com/example/Blah/issues", "Bug-Submit": "https://github.com/example/Blah/issues/new", "Homepage": "https://github.com/example/Blah", "Name": "Blah-Blieh", "Repository": "https://github.com/example/Blah.git", "Repository-Browse": "https://github.com/example/Blah", "License": "perl", "Version": "0.110"} upstream-ontologist-0.3.6/testdata/metadata-json-cvs-array/expected.yaml000064400000000000000000000001051046102023000246170ustar 00000000000000Name: yep Repository: cvs+ssh://_anoncvs@anoncvs.example.org/cvs#yep upstream-ontologist-0.3.6/testdata/metadata-json-cvs-array/metadata.json000064400000000000000000000001421046102023000246060ustar 00000000000000{ "name": "yep", "source": [ ":extssh:_anoncvs@anoncvs.example.org:/cvs", "yep" ] } upstream-ontologist-0.3.6/testdata/metadata.json/expected.yaml000064400000000000000000000006521046102023000227220ustar 00000000000000Name: puppet-nginx Version: 4.3.1-rc0 Author: - !Person name: Vox Pupuli Summary: Puppet NGINX management module License: MIT Repository: https://github.com/voxpupuli/puppet-nginx.git Homepage: http://github.com/voxpupuli/puppet-nginx Bug-Database: https://github.com/voxpupuli/puppet-nginx/issues Repository-Browse: https://github.com/voxpupuli/puppet-nginx Bug-Submit: https://github.com/voxpupuli/puppet-nginx/issues/new upstream-ontologist-0.3.6/testdata/metadata.json/metadata.json000064400000000000000000000032311046102023000227040ustar 00000000000000{ "name": "puppet-nginx", "version": "4.3.1-rc0", "author": "Vox Pupuli", "summary": "Puppet NGINX management module", "license": "MIT", "source": "https://github.com/voxpupuli/puppet-nginx.git", "project_page": "http://github.com/voxpupuli/puppet-nginx", "issues_url": "https://github.com/voxpupuli/puppet-nginx/issues", "dependencies": [ { "name": "puppetlabs/concat", "version_requirement": ">= 4.1.0 < 8.0.0" }, { "name": "puppetlabs/stdlib", "version_requirement": ">= 5.0.0 < 9.0.0" } ], "requirements": [ { "name": "puppet", "version_requirement": ">= 6.1.0 < 8.0.0" } ], "operatingsystem_support": [ { "operatingsystem": "Debian", "operatingsystemrelease": [ "10", "11" ] }, { "operatingsystem": "OpenBSD" }, { "operatingsystem": "RedHat", "operatingsystemrelease": [ "7", "8" ] }, { "operatingsystem": "CentOS", "operatingsystemrelease": [ "7", "8" ] }, { "operatingsystem": "VirtuozzoLinux", "operatingsystemrelease": [ "6", "7" ] }, { "operatingsystem": "SLES" }, { "operatingsystem": "Solaris" }, { "operatingsystem": "AIX" }, { "operatingsystem": "FreeBSD" }, { "operatingsystem": "DragonFly" }, { "operatingsystem": "NetBSD" }, { "operatingsystem": "Archlinux" }, { "operatingsystem": "Ubuntu", "operatingsystemrelease": [ "18.04", "20.04", "22.04" ] } ] } upstream-ontologist-0.3.6/testdata/native/configure000075500000000000000000000006451046102023000207040ustar 00000000000000#! /bin/sh # Guess values for system-dependent variables and create Makefiles. # Generated by GNU Autoconf 2.69 for GNU Autoconf 2.69. # # Report bugs to . # # # Identity of this package. PACKAGE_NAME='GNU Autoconf' PACKAGE_TARNAME='autoconf' PACKAGE_VERSION='2.69' PACKAGE_STRING='GNU Autoconf 2.69' PACKAGE_BUGREPORT='bug-autoconf@gnu.org' PACKAGE_URL='http://www.gnu.org/software/autoconf/' ... upstream-ontologist-0.3.6/testdata/native/expected.yaml000064400000000000000000000002121046102023000214500ustar 00000000000000{"Bug-Submit": "bug-autoconf@gnu.org", "Homepage": "http://www.gnu.org/software/autoconf/", "Name": "GNU Autoconf", "Version": "2.69"} upstream-ontologist-0.3.6/testdata/override/debian/source/lintian-overrides000064400000000000000000000000571046102023000254260ustar 00000000000000blah source: upstream-metadata-file-is-missing upstream-ontologist-0.3.6/testdata/override/expected.yaml000064400000000000000000000011311046102023000220020ustar 00000000000000Bug-Database: https://github.com/jackmoore/autosize/issues Bug-Submit: https://github.com/jackmoore/autosize/issues/new Demo: http://www.jacklmoore.com/autosize Homepage: http://www.jacklmoore.com/autosize Name: autosize Keywords: - textarea - form - ui Repository: https://github.com/jackmoore/autosize.git Repository-Browse: https://github.com/jackmoore/autosize Author: - !Person name: Jack Moore email: hello@jacklmoore.com url: http://www.jacklmoore.com License: MIT Summary: Autosize is a small, stand-alone script to automatically adjust textarea height to fit text Version: 4.0.2 upstream-ontologist-0.3.6/testdata/override/package.json000064400000000000000000000017061046102023000216130ustar 00000000000000{ "name": "autosize", "description": "Autosize is a small, stand-alone script to automatically adjust textarea height to fit text.", "version": "4.0.2", "keywords": [ "textarea", "form", "ui" ], "files": [ "dist", "src" ], "author": { "name": "Jack Moore", "url": "http://www.jacklmoore.com", "email": "hello@jacklmoore.com" }, "main": "dist/autosize.js", "license": "MIT", "homepage": "http://www.jacklmoore.com/autosize", "demo": "http://www.jacklmoore.com/autosize", "repository": { "type": "git", "url": "http://github.com/jackmoore/autosize.git" }, "dependencies": {}, "devDependencies": { "babel-core": "^6.26.0", "babel-plugin-add-module-exports": "^0.2.1", "babel-plugin-transform-es2015-modules-umd": "^6.24.1", "babel-preset-env": "^1.6.1", "gaze": "^1.1.2", "jshint": "^2.9.5", "uglify-js": "^3.3.16" }, "scripts": { "build": "node build" } } upstream-ontologist-0.3.6/testdata/package.json/expected.yaml000064400000000000000000000000441046102023000225300ustar 00000000000000Name: react-fixtures Version: 0.1.0 upstream-ontologist-0.3.6/testdata/package.json/package.json000064400000000000000000000026441046102023000223410ustar 00000000000000{ "name": "react-fixtures", "version": "0.1.0", "private": true, "devDependencies": { "react-scripts": "^1.0.11" }, "dependencies": { "@babel/standalone": "^7.0.0", "art": "^0.10.3", "classnames": "^2.2.5", "codemirror": "^5.40.0", "core-js": "^2.4.1", "jest-diff": "^29.4.1", "prop-types": "^15.6.0", "query-string": "^4.2.3", "react": "^15.4.1", "react-dom": "^15.4.1", "semver": "^5.5.0" }, "scripts": { "start": "react-scripts start", "prestart": "cp ../../build/oss-stable/scheduler/umd/scheduler-unstable_mock.development.js ../../build/oss-stable/scheduler/umd/scheduler-unstable_mock.production.min.js ../../build/oss-stable/react/umd/react.development.js ../../build/oss-stable/react-dom/umd/react-dom.development.js ../../build/oss-stable/react/umd/react.production.min.js ../../build/oss-stable/react-dom/umd/react-dom.production.min.js ../../build/oss-stable/react-dom/umd/react-dom-server.browser.development.js ../../build/oss-stable/react-dom/umd/react-dom-server.browser.production.min.js ../../build/oss-stable/react-dom/umd/react-dom-test-utils.development.js ../../build/oss-stable/react-dom/umd/react-dom-test-utils.production.min.js public/ && cp -a ../../build/oss-stable/. node_modules", "build": "react-scripts build && cp build/index.html build/200.html", "test": "react-scripts test --env=jsdom", "eject": "react-scripts eject" } } upstream-ontologist-0.3.6/testdata/package.json2/expected.yaml000064400000000000000000000007141046102023000226160ustar 00000000000000Name: autosize Version: 4.0.2 Demo: http://www.jacklmoore.com/autosize Author: - !Person name: Jack Moore email: hello@jacklmoore.com url: http://www.jacklmoore.com License: MIT Homepage: http://www.jacklmoore.com/autosize Repository: https://github.com/jackmoore/autosize.git Repository-Browse: https://github.com/jackmoore/autosize Bug-Database: https://github.com/jackmoore/autosize/issues Bug-Submit: https://github.com/jackmoore/autosize/issues/new upstream-ontologist-0.3.6/testdata/package.json2/package.json000064400000000000000000000006361046102023000224220ustar 00000000000000{ "name": "autosize", "version": "4.0.2", "author": { "name": "Jack Moore", "url": "http://www.jacklmoore.com", "email": "hello@jacklmoore.com" }, "main": "dist/autosize.js", "license": "MIT", "homepage": "http://www.jacklmoore.com/autosize", "demo": "http://www.jacklmoore.com/autosize", "repository": { "type": "git", "url": "http://github.com/jackmoore/autosize.git" } } upstream-ontologist-0.3.6/testdata/package.xml/expected.yaml000064400000000000000000000007131046102023000223620ustar 00000000000000Author: - !Person name: Author 1 email: author1@example.com - !Person name: Author 2 email: author2@example.com Bug-Database: https://github.com/example/tracker/issues Bug-Submit: https://github.com/example/tracker/issues/new Homepage: https://github.com/example/repo Name: blah Repository: https://github.com/example/repo.git Repository-Browse: https://github.com/example/repo Description: "\n This package does something\n " License: BSD upstream-ontologist-0.3.6/testdata/package.xml/package.xml000064400000000000000000000014241046102023000220120ustar 00000000000000 blah 1.12.4 This package does something Author 1 Author 2 Maintainer 1 Other maintainer BSD http://website.example.com/ https://github.com/example/repo https://github.com/example/tracker/issues catkin curl boost python-rospkg upstream-ontologist-0.3.6/testdata/package.xml2/expected.yaml000064400000000000000000000012101046102023000224350ustar 00000000000000Name: phalcon Summary: Phalcon is a full stack PHP framework offering low resource consumption and high performance Description: |2- Phalcon is an open source full stack framework for PHP, written as a C-extension. Phalcon is optimized for high performance. Its unique architecture allows the framework to always be memory resident, offering its functionality whenever its needed, without expensive file stats and file reads that traditional PHP frameworks employ. Version: 5.1.4 License: BSD 3-Clause License Maintainer: !Person name: Anton Vasiliev email: anton@phalcon.io Contact: Anton Vasiliev upstream-ontologist-0.3.6/testdata/package.xml2/package.xml000064400000000000000000000050421046102023000220740ustar 00000000000000 phalcon pecl.php.net Phalcon is a full stack PHP framework offering low resource consumption and high performance. Phalcon is an open source full stack framework for PHP, written as a C-extension. Phalcon is optimized for high performance. Its unique architecture allows the framework to always be memory resident, offering its functionality whenever its needed, without expensive file stats and file reads that traditional PHP frameworks employ. Anton Vasiliev jeckerson anton@phalcon.io yes Nikolaos Dimopoulos niden nikos@phalcon.io yes 2023-01-10 5.1.4 5.1.4 stable stable BSD 3-Clause License Full changelog can be found at: https://github.com/phalcon/cphalcon/blob/master/CHANGELOG-5.0.md ### Fixed - Fixed `Phalcon\Acl\Adapter\Memory::isAllowed` to not use the deprecated `ReflectionType::getClass` [#16255](https://github.com/phalcon/cphalcon/issues/16255) 7.4.1 8.1.99 1.10 phalcon upstream-ontologist-0.3.6/testdata/package.yaml/expected.yaml000064400000000000000000000010301046102023000225150ustar 00000000000000Name: css-text Version: 0.1.2.2 Summary: CSS parser and renderer License: MIT Homepage: https://github.com/yesodweb/css-text Bug-Submit: https://github.com/yesodweb/css-text/issues/new Bug-Database: https://github.com/yesodweb/css-text/issues Repository: https://github.com/yesodweb/css-text.git Repository-Browse: https://github.com/yesodweb/css-text Contact: Greg Weber Maintainer: !Person name: Greg Weber email: greg@gregweber.info Author: - !Person name: Michael Snoyman email: michael@snoyman.com upstream-ontologist-0.3.6/testdata/package.yaml/package.yaml000064400000000000000000000014441046102023000223200ustar 00000000000000name: css-text version: 0.1.2.2 synopsis: CSS parser and renderer. description: Please see the README and generated docs at category: Web, Yesod author: Michael Snoyman maintainer: Michael Snoyman , Greg Weber license: MIT github: yesodweb/css-text.git stability: Stable extra-source-files: - README.md - ChangeLog.md dependencies: - base >=4 && <5 - text >=0.11 - attoparsec >=0.10.2.0 library: source-dirs: src ghc-options: -Wall when: - condition: ! '!(impl(ghc >=8.0))' dependencies: - semigroups >=0.16.1 tests: runtests: main: runtests.hs source-dirs: test dependencies: - hspec >=1.3 - QuickCheck - css-text upstream-ontologist-0.3.6/testdata/perl/dist.ini000064400000000000000000000014361046102023000201140ustar 00000000000000name = PerlIO-eol author = Shlomi Fish license = Perl_5 copyright_holder = Audrey Tang copyright_year = 2004 [@Filter] -bundle = @Basic -remove = MakeMaker -remove = ExtraTests -remove = License -remove = Readme [AutoPrereqs] [MakeMaker::Awesome] WriteMakefile_arg = 'OBJECT' => 'eol.o' [MetaJSON] [MetaProvides::Package] [MetaResources] bugtracker.web = https://rt.cpan.org/Public/Dist/Display.html?Name=PerlIO-eol bugtracker.mailto = bug-perlio-eol@rt.cpan.org repository.url = https://github.com/shlomif/PerlIO-eol.git repository.web = https://github.com/shlomif/PerlIO-eol repository.type = git [PodCoverageTests] [PodSyntaxTests] [PruneCruft] [RewriteVersion] [RunExtraTests] [Test::CPAN::Changes] [Test::Compile] fake_home = 1 [Test::Kwalitee] [Test::TrailingSpace] upstream-ontologist-0.3.6/testdata/perl/expected.yaml000064400000000000000000000005221046102023000211300ustar 00000000000000{"Bug-Database": "https://rt.cpan.org/Public/Dist/Display.html?Name=PerlIO-eol", "Homepage": "https://github.com/shlomif/PerlIO-eol", "Name": "PerlIO-eol", "Repository": "https://github.com/shlomif/PerlIO-eol.git", "Repository-Browse": "https://github.com/shlomif/PerlIO-eol", "Copyright": "2004 Audrey Tang", "License": "Perl_5"} upstream-ontologist-0.3.6/testdata/perl-parsingerror/dist.ini000064400000000000000000000002611046102023000226220ustar 00000000000000name = IO-Blah-Blah author = Somebody license = Perl_5 [@Author::ETHER] :version = 0.097 [Prereqs] perl = 5.008 upstream-ontologist-0.3.6/testdata/perl-parsingerror/expected.yaml000064400000000000000000000000361046102023000236430ustar 00000000000000{"Name": "perl-parsingerror"} upstream-ontologist-0.3.6/testdata/pkg-info/PKG-INFO000064400000000000000000000054521046102023000203170ustar 00000000000000Metadata-Version: 2.1 Name: swh.loader.git Version: 2.1.1.dev2+g0002d5a.d20230125 Summary: Software Heritage git loader Home-page: https://forge.softwareheritage.org/diffusion/DLDG/ Author: Software Heritage developers Author-email: swh-devel@inria.fr Project-URL: Bug Reports, https://forge.softwareheritage.org/maniphest Project-URL: Funding, https://www.softwareheritage.org/donate Project-URL: Source, https://forge.softwareheritage.org/source/swh-loader-git Project-URL: Documentation, https://docs.softwareheritage.org/devel/swh-loader-git/ Classifier: Programming Language :: Python :: 3 Classifier: Intended Audience :: Developers Classifier: License :: OSI Approved :: GNU General Public License v3 (GPLv3) Classifier: Operating System :: OS Independent Classifier: Development Status :: 5 - Production/Stable Requires-Python: >=3.7 Description-Content-Type: text/markdown Provides-Extra: testing License-File: LICENSE License-File: AUTHORS swh-loader-git ============== The Software Heritage Git Loader is a tool and a library to walk a local Git repository and inject into the SWH dataset all contained files that weren't known before. The main entry points are: - :class:`swh.loader.git.loader.GitLoader` for the main loader which can ingest either local or remote git repository's contents. This is the main implementation deployed in production. - :class:`swh.loader.git.from_disk.GitLoaderFromDisk` which ingests only local git clone repository. - :class:`swh.loader.git.loader.GitLoaderFromArchive` which ingests a git repository wrapped in an archive. License ------- This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. See top-level LICENSE file for the full text of the GNU General Public License along with this program. Dependencies ------------ ### Runtime - python3 - python3-dulwich - python3-retrying - python3-swh.core - python3-swh.model - python3-swh.storage - python3-swh.scheduler ### Test - python3-nose Requirements ------------ - implementation language, Python3 - coding guidelines: conform to PEP8 - Git access: via dulwich CLI Run ---------- You can run the loader from a remote origin (*loader*) or from an origin on disk (*from_disk*) directly by calling: ``` swh loader -C run git ``` or "git_disk". ## Configuration sample /tmp/git.yml: ``` storage: cls: remote args: url: http://localhost:5002/ ``` upstream-ontologist-0.3.6/testdata/pkg-info/expected.yaml000064400000000000000000000021371046102023000217040ustar 00000000000000Name: swh.loader.git Version: 2.1.1.dev2+g0002d5a.d20230125 Summary: Software Heritage git loader Homepage: https://forge.softwareheritage.org/diffusion/DLDG/ Author: - !Person name: Software Heritage developers email: swh-devel@inria.fr Bug-Database: https://forge.softwareheritage.org/maniphest Funding: https://www.softwareheritage.org/donate Repository: https://forge.softwareheritage.org/source/swh-loader-git Documentation: https://docs.softwareheritage.org/devel/swh-loader-git/ Description: "The Software Heritage Git Loader is a tool and a library to walk a local\nGit repository and inject into the SWH dataset all contained files that\nweren't known before.\n\nThe main entry points are:\n\n* \n:class:swh.loader.git.loader.GitLoader for the main loader which can ingest either\nlocal or remote git repository's contents. This is the main implementation deployed in\nproduction.\n\n* \n:class:swh.loader.git.from_disk.GitLoaderFromDisk which ingests only local git clone\nrepository.\n\n* \n:class:swh.loader.git.loader.GitLoaderFromArchive which ingests a git repository\nwrapped in an archive.\n\n" upstream-ontologist-0.3.6/testdata/poetry/expected.yaml000064400000000000000000000011621046102023000215110ustar 00000000000000Name: gi-docgen Version: '2021.1' Summary: Documentation tool for GObject-based libraries Author: - !Person name: Emmanuele Bassi email: ebassi@gnome.org License: GPL-3.0-or-later AND Apache-2.0 AND CC0-1.0 Homepage: https://gitlab.gnome.org/GNOME/gi-docgen Documentation: https://gnome.pages.gitlab.gnome.org/gi-docgen/ Keywords: - documentation - introspection - gobject - gtk Bug-Database: https://gitlab.gnome.org/GNOME/gi-docgen/issues Repository: https://gitlab.gnome.org/GNOME/gi-docgen.git Repository-Browse: https://gitlab.gnome.org/GNOME/gi-docgen Bug-Submit: https://gitlab.gnome.org/GNOME/gi-docgen/issues/new upstream-ontologist-0.3.6/testdata/poetry/pyproject.toml000064400000000000000000000033021046102023000217360ustar 00000000000000# SPDX-FileCopyrightText: 2021 GNOME Foundation # # SPDX-License-Identifier: Apache-2.0 OR GPL-3.0-or-later [tool.poetry] name = "gi-docgen" packages = [ { include = "gidocgen" }, ] version = "2021.1" description = "Documentation tool for GObject-based libraries" authors = ["Emmanuele Bassi "] license = "GPL-3.0-or-later AND Apache-2.0 AND CC0-1.0" readme = "README.md" homepage = "https://gitlab.gnome.org/GNOME/gi-docgen" documentation = "https://gnome.pages.gitlab.gnome.org/gi-docgen/" keywords = ["documentation","introspection","gobject","gtk"] classifiers = [ "Development Status :: 4 - Beta", "Environment :: Console", "Intended Audience :: Developers", "License :: OSI Approved :: Apache Software License", "License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)", "Natural Language :: English", "Operating System :: MacOS :: MacOS X", "Operating System :: Microsoft :: Windows", "Operating System :: POSIX :: BSD", "Operating System :: POSIX :: Linux", "Topic :: Desktop Environment :: Gnome", "Topic :: Software Development :: Documentation" ] [tool.poetry.urls] "Bug Tracker" = "https://gitlab.gnome.org/GNOME/gi-docgen/issues" [tool.poetry.dependencies] python = "^3.6" Markdown = "^3" MarkupSafe = "^1" Pygments = "^2" Jinja2 = "^2" toml = "^0" typogrify = "^2" [tool.poetry.dev-dependencies] coverage = "^5" green = "^3" mypy = "0.812" flake8 = "^3" black = {version = "^20.8b1", allow-prereleases = true} [tool.poetry.scripts] gi-docgen = "gidocgen.__main__:main" [tool.coverage.report] show_missing = true exclude_lines = [ "pragma: no cover", "if False" ] [build-system] requires = ["setuptools","wheel"] upstream-ontologist-0.3.6/testdata/pom/debian/control000064400000000000000000000000151046102023000211160ustar 00000000000000Source: blah upstream-ontologist-0.3.6/testdata/pom/expected.yaml000064400000000000000000000005771046102023000207730ustar 00000000000000{"Bug-Database": "https://github.com/example/blah/issues", "Bug-Submit": "https://github.com/example/blah/issues/new", "Homepage": "http://www.example.com", "License": "GNU Lesser General Public License, Version 2.1", "Name": "libblah", "Repository": "https://github.com/example/blah.git", "Repository-Browse": "https://github.com/example/blah", "Summary": "Bla lah lah lah"} upstream-ontologist-0.3.6/testdata/pom/pom.xml000064400000000000000000000062471046102023000176230ustar 00000000000000 4.0.0 com.example libblah jar ${version} libblah http://www.example.com Bla lah lah lah. GNU Lesser General Public License, Version 2.1 http://www.gnu.org/licenses/lgpl-2.1.txt Joe Example joe@example.com Org1 http://www.example.com/org1 scm:git:https://github.com/example/blah.git scm:git:git@github.com/example/blah.git https://github.com/example/blah ossrh https://oss.sonatype.org/content/repositories/snapshots java org.apache.maven.plugins maven-compiler-plugin 3.1 org.apache.maven.plugins maven-release-plugin 2.4.1 org.apache.maven.plugins maven-source-plugin attach-sources jar org.apache.maven.plugins maven-javadoc-plugin attach-javadocs jar org.sonatype.plugins nexus-staging-maven-plugin 1.6.2 true ossrh https://oss.sonatype.org/ true org.apache.maven.plugins maven-gpg-plugin 1.5 sign-artifacts verify sign upstream-ontologist-0.3.6/testdata/pubspec.yml/expected.yaml000064400000000000000000000007021046102023000224270ustar 00000000000000Name: dart Description: You don't need to be worried just because you have to support multiple screens Version: 2.1.53 Homepage: https://github.com/DisplayKit/responsive_styles Repository: https://github.com/DisplayKit/responsive_styles.git Repository-Browse: https://github.com/DisplayKit/responsive_styles Bug-Database: https://github.com/DisplayKit/responsive_styles/issues Bug-Submit: https://github.com/DisplayKit/responsive_styles/issues/new upstream-ontologist-0.3.6/testdata/pubspec.yml/pubspec.yml000064400000000000000000000006021046102023000221250ustar 00000000000000name: dart description: You don't need to be worried just because you have to support multiple screens version: 2.1.53 homepage: https://github.com/DisplayKit/responsive_styles environment: sdk: ">=2.17.5 <3.0.0" flutter: ">=1.17.0" dependencies: flutter: sdk: flutter mockito: ^5.2.0 dev_dependencies: flutter_test: sdk: flutter flutter_lints: ^2.0.0 flutter: upstream-ontologist-0.3.6/testdata/python/blah/__init__.py000064400000000000000000000000001046102023000220300ustar 00000000000000upstream-ontologist-0.3.6/testdata/python/debian/control000064400000000000000000000005241046102023000216510ustar 00000000000000Source: golang-github-blah-blah Section: devel Priority: optional Standards-Version: 4.2.0 Maintainer: Some Maintainer Build-Depends: debhelper (>= 11~), dh-golang, golang-any Package: golang-github-blah-blah Architecture: all Depends: ${misc:Depends} Description: blah blah blah blah upstream-ontologist-0.3.6/testdata/python/expected.yaml000064400000000000000000000007141046102023000215120ustar 00000000000000{"Bug-Database": "https://salsa.debian.org/jelmer/lintian-brush/issues", "Bug-Submit": "https://salsa.debian.org/jelmer/lintian-brush/issues/new", "Description": "blah blah", "Homepage": "https://salsa.debian.org/jelmer/lintian-brush", "Name": "blah", "Repository": "https://salsa.debian.org/jelmer/lintian-brush.git", "Repository-Browse": "https://salsa.debian.org/jelmer/lintian-brush", "Summary": "Automatic lintian issue fixer", "Version": "0.16"} upstream-ontologist-0.3.6/testdata/python/setup.py000075500000000000000000000005011046102023000205340ustar 00000000000000#!/usr/bin/python3 from setuptools import setup setup( name="blah", version="0.16", packages=["blah"], url="https://salsa.debian.org/jelmer/lintian-brush", description="Automatic lintian issue fixer", project_urls={ "Repository": "https://salsa.debian.org/jelmer/lintian-brush", }, ) upstream-ontologist-0.3.6/testdata/r-description/DESCRIPTION000064400000000000000000000056251046102023000220020ustar 00000000000000Package: readxl Title: Read Excel Files Version: 1.3.1 Authors@R: c(person(given = "Hadley", family = "Wickham", role = "aut", email = "hadley@rstudio.com", comment = c(ORCID = "0000-0003-4757-117X")), person(given = "Jennifer", family = "Bryan", role = c("aut", "cre"), email = "jenny@rstudio.com", comment = c(ORCID = "0000-0002-6983-2759")), person(given = "RStudio", role = c("cph", "fnd"), comment = "Copyright holder of all R code and all C/C++ code without explicit copyright attribution"), person(given = "Marcin", family = "Kalicinski", role = c("ctb", "cph"), comment = "Author of included RapidXML code"), person(given = "Komarov Valery", role = c("ctb", "cph"), comment = "Author of included libxls code"), person(given = "Christophe Leitienne", role = c("ctb", "cph"), comment = "Author of included libxls code"), person(given = "Bob Colbert", role = c("ctb", "cph"), comment = "Author of included libxls code"), person(given = "David Hoerl", role = c("ctb", "cph"), comment = "Author of included libxls code"), person(given = "Evan Miller", role = c("ctb", "cph"), comment = "Author of included libxls code")) Description: Import excel files into R. Supports '.xls' via the embedded 'libxls' C library and '.xlsx' via the embedded 'RapidXML' C++ library . Works on Windows, Mac and Linux without external dependencies. License: GPL-3 URL: https://readxl.tidyverse.org, https://github.com/tidyverse/readxl BugReports: https://github.com/tidyverse/readxl/issues Imports: cellranger, Rcpp (>= 0.12.18), tibble (>= 1.3.1), utils Suggests: covr, knitr, rmarkdown, rprojroot (>= 1.1), testthat LinkingTo: progress, Rcpp VignetteBuilder: knitr Encoding: UTF-8 LazyData: true Note: libxls-SHA cef1393 RoxygenNote: 6.1.1 NeedsCompilation: yes Packaged: 2019-03-13 16:01:23 UTC; jenny Author: Hadley Wickham [aut] (), Jennifer Bryan [aut, cre] (), RStudio [cph, fnd] (Copyright holder of all R code and all C/C++ code without explicit copyright attribution), Marcin Kalicinski [ctb, cph] (Author of included RapidXML code), Komarov Valery [ctb, cph] (Author of included libxls code), Christophe Leitienne [ctb, cph] (Author of included libxls code), Bob Colbert [ctb, cph] (Author of included libxls code), David Hoerl [ctb, cph] (Author of included libxls code), Evan Miller [ctb, cph] (Author of included libxls code) Maintainer: Jennifer Bryan Repository: CRAN Date/Publication: 2019-03-13 16:30:02 UTC upstream-ontologist-0.3.6/testdata/r-description/expected.yaml000064400000000000000000000013601046102023000227510ustar 00000000000000Archive: CRAN Bug-Database: https://github.com/tidyverse/readxl/issues Bug-Submit: https://github.com/tidyverse/readxl/issues/new Contact: Jennifer Bryan Homepage: https://github.com/tidyverse/readxl Name: readxl Repository: https://github.com/tidyverse/readxl.git Repository-Browse: https://github.com/tidyverse/readxl Description: |- Import excel files into R. Supports '.xls' via the embedded 'libxls' C library and '.xlsx' via the embedded 'RapidXML' C++ library . Works on Windows, Mac and Linux without external dependencies. License: GPL-3 Maintainer: !Person name: Jennifer Bryan email: jenny@rstudio.com Summary: Read Excel Files Version: 1.3.1 upstream-ontologist-0.3.6/testdata/readme-command/README000064400000000000000000000002611046102023000212320ustar 00000000000000This is a project. You can clone it by running: git clone https://github.com/blah/blah blah Please report bugs at: https://github.com/OpenPrinting/cups-filters/issues upstream-ontologist-0.3.6/testdata/readme-command/expected.yaml000064400000000000000000000007431046102023000230440ustar 00000000000000Bug-Database: https://github.com/OpenPrinting/cups-filters/issues Bug-Submit: https://github.com/OpenPrinting/cups-filters/issues/new Homepage: https://github.com/blah/blah Name: readme-command Repository: https://github.com/blah/blah.git Repository-Browse: https://github.com/blah/blah Description: > This is a project. You can clone it by running: git clone https://github.com/blah/blah blah Please report bugs at: https://github.com/OpenPrinting/cups-filters/issues upstream-ontologist-0.3.6/testdata/readme-other/README000064400000000000000000000001621046102023000207350ustar 00000000000000This is a project. One of the dependencies is blah, which you can install from: https://github.com/blah/blah.git upstream-ontologist-0.3.6/testdata/readme-other/expected.yaml000064400000000000000000000006521046102023000225460ustar 00000000000000{"Bug-Database": "https://github.com/blah/blah/issues", "Bug-Submit": "https://github.com/blah/blah/issues/new", "Homepage": "https://github.com/blah/blah", "Name": "readme-other", "Repository": "https://github.com/blah/blah.git", "Repository-Browse": "https://github.com/blah/blah", "Description": "This is a project. One of the dependencies is blah, which you can install from:\n\nhttps://github.com/blah/blah.git\n"} upstream-ontologist-0.3.6/testdata/security.md/SECURITY.md000064400000000000000000000000461046102023000215410ustar 00000000000000Please send email to blah@example.com upstream-ontologist-0.3.6/testdata/security.md/debian/control000064400000000000000000000000151046102023000225710ustar 00000000000000Source: blah upstream-ontologist-0.3.6/testdata/security.md/debian/upstream/metadata000064400000000000000000000001471046102023000245370ustar 00000000000000--- Repository: https://github.com/example/blah.git Repository-Browse: https://github.com/example/blah upstream-ontologist-0.3.6/testdata/security.md/expected.yaml000064400000000000000000000000671046102023000224400ustar 00000000000000{"Name": "security.md", "Security-MD": "SECURITY.md"} upstream-ontologist-0.3.6/testdata/setup.py1/README.md000064400000000000000000000000561046102023000206320ustar 00000000000000This is a python project that does something. upstream-ontologist-0.3.6/testdata/setup.py1/expected.yaml000064400000000000000000000012201046102023000220320ustar 00000000000000Author: - !Person name: Jelmer Vernooij email: jelmer@jelmer.uk Bug-Database: https://github.com/jelmer/upstream-ontologist/issues Bug-Submit: https://github.com/jelmer/upstream-ontologist/issues/new Contact: Jelmer Vernooij Description: "This is a python project that does something.\n" Homepage: https://github.com/jelmer/upstream-ontologist Maintainer: !Person name: Jelmer Vernooij email: jelmer@jelmer.uk Name: upstream-ontologist Repository: https://github.com/jelmer/upstream-ontologist.git Repository-Browse: https://github.com/jelmer/upstream-ontologist Summary: tracking of upstream project metadata Version: 0.1.35 upstream-ontologist-0.3.6/testdata/setup.py1/setup.cfg000064400000000000000000000024261046102023000211770ustar 00000000000000[metadata] name = upstream-ontologist version = 0.1.35 author = Jelmer Vernooij author_email = jelmer@jelmer.uk maintainer = Jelmer Vernooij maintainer_email = jelmer@jelmer.uk url = https://github.com/jelmer/upstream-ontologist description = tracking of upstream project metadata long_description = file:README.md long_description_content_type = text/markdown project_urls = Repository=https://github.com/jelmer/upstream-ontologist.git [options] python_requires = >= 3.7 packages = upstream_ontologist upstream_ontologist.debian install_requires = python_debian typing_extensions;python_version<="3.7" ruamel.yaml # Ideally this would be an optional dependency breezy>=3.3.0 tests_require = breezy>=3.3.0 [options.entry_points] console_scripts = guess-upstream-metadata=upstream_ontologist.__main__:main autodoap=upstream_ontologist.doap:main autocodemeta=upstream_ontologist.codemeta:main [options.extras_require] cargo = tomlkit debian_changelog = python-debianbts httplib2>=0.7.8 python_debian debian_watch = debmutate[watch]>=0.59 debian_rules = debmutate pyproject = tomlkit homepage = bs4 readme = docutils lxml bs4 markdown pygments setup.cfg = setuptools [options.package_data] upstream_ontologist = py.typed upstream-ontologist-0.3.6/testdata/setup.py1/setup.py000075500000000000000000000001731046102023000210700ustar 00000000000000#!/usr/bin/python3 from setuptools import setup setup(data_files=[("share/man/man1", ["man/guess-upstream-metadata.1"])]) upstream-ontologist-0.3.6/testdata/travis.yml/.travis.yml000064400000000000000000000010011046102023000217130ustar 00000000000000language: go go_import_path: github.com/ethereum/go-ethereum sudo: false jobs: allow_failures: - stage: build os: osx go: 1.17.x env: - azure-osx - azure-ios - cocoapods-ios include: # This builder only tests code linters on latest version of Go - stage: lint os: linux dist: bionic go: 1.19.x env: - lint git: submodules: false # avoid cloning ethereum/tests script: - go run build/ci.go lint upstream-ontologist-0.3.6/testdata/travis.yml/expected.yaml000064400000000000000000000001011046102023000222670ustar 00000000000000Name: travis.yml Go-Import-Path: github.com/ethereum/go-ethereum upstream-ontologist-0.3.6/testdata/watch/debian/watch000064400000000000000000000002261046102023000210630ustar 00000000000000version=4 opts=repack,compression=xz,dversionmangle=s/\+ds//,repacksuffix=+ds \ https://github.com/example/example/releases .*/Toric-(\d\S*)\.tar\.gz upstream-ontologist-0.3.6/testdata/watch/expected.yaml000064400000000000000000000004721046102023000213000ustar 00000000000000{"Bug-Database": "https://github.com/example/example/issues", "Bug-Submit": "https://github.com/example/example/issues/new", "Homepage": "https://github.com/example/example", "Name": "example", "Repository": "https://github.com/example/example.git", "Repository-Browse": "https://github.com/example/example"} upstream-ontologist-0.3.6/testdata/watch-git/debian/watch000064400000000000000000000002411046102023000216410ustar 00000000000000version=3 opts="mode=git, gitmode=shallow, pgpmode=gittag" \ https://git.kernel.org/pub/scm/linux/kernel/git/firmware/linux-firmware.git \ refs/tags/(.*) debian upstream-ontologist-0.3.6/testdata/watch-git/expected.yaml000064400000000000000000000001641046102023000220570ustar 00000000000000{"Name": "watch-git", "Repository": "https://git.kernel.org/pub/scm/linux/kernel/git/firmware/linux-firmware.git"} upstream-ontologist-0.3.6/testdata/watch2/debian/watch000064400000000000000000000002641046102023000211470ustar 00000000000000version=4 opts=repack,compression=xz,dversionmangle=s/\+ds//,repacksuffix=+ds \ https://github.com/example/example-cat/tags \ (?:.*?/)?v?(\d[\d.]*)\.tar\.gz debian uupdate upstream-ontologist-0.3.6/testdata/watch2/expected.yaml000064400000000000000000000005221046102023000213560ustar 00000000000000{"Bug-Database": "https://github.com/example/example-cat/issues", "Bug-Submit": "https://github.com/example/example-cat/issues/new", "Homepage": "https://github.com/example/example-cat", "Name": "example-cat", "Repository": "https://github.com/example/example-cat.git", "Repository-Browse": "https://github.com/example/example-cat"}