cargo-test-support-0.7.3/.cargo_vcs_info.json0000644000000001670000000000100146310ustar { "git": { "sha1": "99624be96e9d213b0e9b1e36451271f24e4a41d8" }, "path_in_vcs": "crates/cargo-test-support" }cargo-test-support-0.7.3/Cargo.lock0000644000001374710000000000100126150ustar # This file is automatically @generated by Cargo. # It is not intended for manual editing. version = 4 [[package]] name = "adler2" version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "512761e0bb2578dd7380c6baaa0f4ce03e84f95e960231d1dec8bf4d7d6e2627" [[package]] name = "aho-corasick" version = "1.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916" dependencies = [ "memchr", ] [[package]] name = "anstream" version = "0.6.18" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8acc5369981196006228e28809f761875c0327210a891e941f4c683b3a99529b" dependencies = [ "anstyle", "anstyle-parse", "anstyle-query", "anstyle-wincon", "colorchoice", "is_terminal_polyfill", "utf8parse", ] [[package]] name = "anstyle" version = "1.0.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "55cc3b69f167a1ef2e161439aa98aed94e6028e5f9a59be9a6ffb47aef1651f9" [[package]] name = "anstyle-lossy" version = "1.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "934ff8719effd2023a48cf63e69536c1c3ced9d3895068f6f5cc9a4ff845e59b" dependencies = [ "anstyle", ] [[package]] name = "anstyle-parse" version = "0.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3b2d16507662817a6a20a9ea92df6652ee4f94f914589377d69f3b21bc5798a9" dependencies = [ "utf8parse", ] [[package]] name = "anstyle-query" version = "1.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "79947af37f4177cfead1110013d678905c37501914fba0efea834c3fe9a8d60c" dependencies = [ "windows-sys 0.59.0", ] [[package]] name = "anstyle-svg" version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d3607949e9f6de49ea4bafe12f5e4fd73613ebf24795e48587302a8cc0e4bb35" dependencies = [ "anstream", "anstyle", "anstyle-lossy", "html-escape", "unicode-width", ] [[package]] name = "anstyle-wincon" version = "3.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ca3534e77181a9cc07539ad51f2141fe32f6c3ffd4df76db8ad92346b003ae4e" dependencies = [ "anstyle", "once_cell", "windows-sys 0.59.0", ] [[package]] name = "anyhow" version = "1.0.96" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6b964d184e89d9b6b67dd2715bc8e74cf3107fb2b529990c90cf517326150bf4" [[package]] name = "base16ct" version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4c7f02d4ea65f2c1853089ffd8d2787bdbc63de2f0d29dedbcf8ccdfa0ccd4cf" [[package]] name = "base64ct" version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8c3c1a368f70d6cf7302d78f8f7093da241fb8e8807c05cc9e51a125895a6d5b" [[package]] name = "bitflags" version = "2.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8f68f53c83ab957f72c32642f3868eec03eb974d1fb82e453128456482613d36" [[package]] name = "block-buffer" version = "0.10.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71" dependencies = [ "generic-array", ] [[package]] name = "bstr" version = "1.11.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "531a9155a481e2ee699d4f98f43c0ca4ff8ee1bfd55c31e9e98fb29d2b176fe0" dependencies = [ "memchr", "serde", ] [[package]] name = "bumpalo" version = "3.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1628fb46dfa0b37568d12e5edd512553eccf6a22a78e8bde00bb4aed84d5bdbf" [[package]] name = "cargo-test-macro" version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "41d5f58450c2ff589e2b50b2c2ac13b9a9d3b92bbec0b888ae7e7dbef715f89a" [[package]] name = "cargo-test-support" version = "0.7.3" dependencies = [ "anstream", "anstyle", "anyhow", "cargo-test-macro", "cargo-util", "crates-io", "filetime", "flate2", "git2", "glob", "itertools", "pasetors", "regex", "serde", "serde_json", "snapbox", "tar", "time", "toml", "url", "walkdir", "windows-sys 0.59.0", ] [[package]] name = "cargo-util" version = "0.2.20" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d767bc85f367f6483a6072430b56f5c0d6ee7636751a21a800526d0711753d76" dependencies = [ "anyhow", "core-foundation", "filetime", "hex", "ignore", "jobserver", "libc", "miow", "same-file", "sha2", "shell-escape", "tempfile", "tracing", "walkdir", "windows-sys 0.59.0", ] [[package]] name = "cc" version = "1.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "be714c154be609ec7f5dad223a33bf1482fff90472de28f7362806e6d4832b8c" dependencies = [ "jobserver", "libc", "shlex", ] [[package]] name = "cfg-if" version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" [[package]] name = "colorchoice" version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5b63caa9aa9397e2d9480a9b13673856c78d8ac123288526c37d7839f2a86990" [[package]] name = "const-oid" version = "0.9.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c2459377285ad874054d797f3ccebf984978aa39129f6eafde5cdc8315b612f8" [[package]] name = "content_inspector" version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b7bda66e858c683005a53a9a60c69a4aca7eeaa45d124526e389f7aec8e62f38" dependencies = [ "memchr", ] [[package]] name = "core-foundation" version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b55271e5c8c478ad3f38ad24ef34923091e0548492a266d19b3c0b4d82574c63" dependencies = [ "core-foundation-sys", "libc", ] [[package]] name = "core-foundation-sys" version = "0.8.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b" [[package]] name = "cpufeatures" version = "0.2.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "59ed5838eebb26a2bb2e58f6d5b5316989ae9d08bab10e0e6d103e656d1b0280" dependencies = [ "libc", ] [[package]] name = "crates-io" version = "0.40.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9c15b946f2bbd53f5be858ed02fcacfeb3646f3ca67b24defc276a01edd10de6" dependencies = [ "curl", "percent-encoding", "serde", "serde_json", "thiserror", "url", ] [[package]] name = "crc32fast" version = "1.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a97769d94ddab943e4510d138150169a2758b5ef3eb191a9ee688de3e23ef7b3" dependencies = [ "cfg-if", ] [[package]] name = "crossbeam-deque" version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9dd111b7b7f7d55b72c0a6ae361660ee5853c9af73f70c3c2ef6858b950e2e51" dependencies = [ "crossbeam-epoch", "crossbeam-utils", ] [[package]] name = "crossbeam-epoch" version = "0.9.18" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5b82ac4a3c2ca9c3460964f020e1402edd5753411d7737aa39c3714ad1b5420e" dependencies = [ "crossbeam-utils", ] [[package]] name = "crossbeam-utils" version = "0.8.21" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28" [[package]] name = "crypto-bigint" version = "0.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0dc92fb57ca44df6db8059111ab3af99a63d5d0f8375d9972e319a379c6bab76" dependencies = [ "generic-array", "rand_core", "subtle", "zeroize", ] [[package]] name = "crypto-common" version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" dependencies = [ "generic-array", "typenum", ] [[package]] name = "ct-codecs" version = "1.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b916ba8ce9e4182696896f015e8a5ae6081b305f74690baa8465e35f5a142ea4" [[package]] name = "curl" version = "0.4.47" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d9fb4d13a1be2b58f14d60adba57c9834b78c62fd86c3e76a148f732686e9265" dependencies = [ "curl-sys", "libc", "openssl-probe", "openssl-sys", "schannel", "socket2", "windows-sys 0.52.0", ] [[package]] name = "curl-sys" version = "0.4.80+curl-8.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "55f7df2eac63200c3ab25bde3b2268ef2ee56af3d238e76d61f01c3c49bff734" dependencies = [ "cc", "libc", "libz-sys", "openssl-sys", "pkg-config", "vcpkg", "windows-sys 0.52.0", ] [[package]] name = "der" version = "0.7.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f55bf8e7b65898637379c1b74eb1551107c8294ed26d855ceb9fd1a09cfc9bc0" dependencies = [ "const-oid", "pem-rfc7468", "zeroize", ] [[package]] name = "deranged" version = "0.3.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b42b6fa04a440b495c8b04d0e71b707c585f83cb9cb28cf8cd0d976c315e31b4" dependencies = [ "powerfmt", "serde", ] [[package]] name = "digest" version = "0.10.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" dependencies = [ "block-buffer", "const-oid", "crypto-common", "subtle", ] [[package]] name = "displaydoc" version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" dependencies = [ "proc-macro2", "quote", "syn", ] [[package]] name = "dunce" version = "1.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "92773504d58c093f6de2459af4af33faa518c13451eb8f2b5698ed3d36e7c813" [[package]] name = "ecdsa" version = "0.16.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ee27f32b5c5292967d2d4a9d7f1e0b0aed2c15daded5a60300e4abb9d8020bca" dependencies = [ "der", "digest", "elliptic-curve", "rfc6979", "signature", "spki", ] [[package]] name = "ed25519-compact" version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e9b3460f44bea8cd47f45a0c70892f1eff856d97cd55358b2f73f663789f6190" dependencies = [ "getrandom 0.2.15", ] [[package]] name = "either" version = "1.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "60b1af1c220855b6ceac025d3f6ecdd2b7c4894bfe9cd9bda4fbb4bc7c0d4cf0" [[package]] name = "elliptic-curve" version = "0.13.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b5e6043086bf7973472e0c7dff2142ea0b680d30e18d9cc40f267efbf222bd47" dependencies = [ "base16ct", "crypto-bigint", "digest", "ff", "generic-array", "group", "hkdf", "pem-rfc7468", "pkcs8", "rand_core", "sec1", "subtle", "zeroize", ] [[package]] name = "equivalent" version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" [[package]] name = "errno" version = "0.3.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "33d852cb9b869c2a9b3df2f71a3074817f01e1844f839a144f5fcef059a4eb5d" dependencies = [ "libc", "windows-sys 0.59.0", ] [[package]] name = "fastrand" version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" [[package]] name = "ff" version = "0.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ded41244b729663b1e574f1b4fb731469f69f79c17667b5d776b16cda0479449" dependencies = [ "rand_core", "subtle", ] [[package]] name = "fiat-crypto" version = "0.2.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "28dea519a9695b9977216879a3ebfddf92f1c08c05d984f8996aecd6ecdc811d" [[package]] name = "filetime" version = "0.2.25" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "35c0522e981e68cbfa8c3f978441a5f34b30b96e146b33cd3359176b50fe8586" dependencies = [ "cfg-if", "libc", "libredox", "windows-sys 0.59.0", ] [[package]] name = "flate2" version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "11faaf5a5236997af9848be0bef4db95824b1d534ebc64d0f0c6cf3e67bd38dc" dependencies = [ "crc32fast", "libz-sys", "miniz_oxide", ] [[package]] name = "form_urlencoded" version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456" dependencies = [ "percent-encoding", ] [[package]] name = "generic-array" version = "0.14.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" dependencies = [ "typenum", "version_check", "zeroize", ] [[package]] name = "getrandom" version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c4567c8db10ae91089c99af84c68c38da3ec2f087c3f82960bcdbf3656b6f4d7" dependencies = [ "cfg-if", "js-sys", "libc", "wasi 0.11.0+wasi-snapshot-preview1", "wasm-bindgen", ] [[package]] name = "getrandom" version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "43a49c392881ce6d5c3b8cb70f98717b7c07aabbdff06687b9030dbfbe2725f8" dependencies = [ "cfg-if", "js-sys", "libc", "wasi 0.13.3+wasi-0.2.2", "wasm-bindgen", "windows-targets 0.52.6", ] [[package]] name = "git2" version = "0.20.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3fda788993cc341f69012feba8bf45c0ba4f3291fcc08e214b4d5a7332d88aff" dependencies = [ "bitflags", "libc", "libgit2-sys", "log", "openssl-probe", "openssl-sys", "url", ] [[package]] name = "glob" version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a8d1add55171497b4705a648c6b583acafb01d58050a51727785f0b2c8e0a2b2" [[package]] name = "globset" version = "0.4.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "15f1ce686646e7f1e19bf7d5533fe443a45dbfb990e00629110797578b42fb19" dependencies = [ "aho-corasick", "bstr", "log", "regex-automata", "regex-syntax", ] [[package]] name = "group" version = "0.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f0f9ef7462f7c099f518d754361858f86d8a07af53ba9af0fe635bbccb151a63" dependencies = [ "ff", "rand_core", "subtle", ] [[package]] name = "hashbrown" version = "0.15.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bf151400ff0baff5465007dd2f3e717f3fe502074ca563069ce3a6629d07b289" [[package]] name = "hex" version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" [[package]] name = "hkdf" version = "0.12.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7b5f8eb2ad728638ea2c7d47a21db23b7b58a72ed6a38256b8a1849f15fbbdf7" dependencies = [ "hmac", ] [[package]] name = "hmac" version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6c49c37c09c17a53d937dfbb742eb3a961d65a994e6bcdcf37e7399d0cc8ab5e" dependencies = [ "digest", ] [[package]] name = "html-escape" version = "0.2.13" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6d1ad449764d627e22bfd7cd5e8868264fc9236e07c752972b4080cd351cb476" dependencies = [ "utf8-width", ] [[package]] name = "icu_collections" version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "db2fa452206ebee18c4b5c2274dbf1de17008e874b4dc4f0aea9d01ca79e4526" dependencies = [ "displaydoc", "yoke", "zerofrom", "zerovec", ] [[package]] name = "icu_locid" version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "13acbb8371917fc971be86fc8057c41a64b521c184808a698c02acc242dbf637" dependencies = [ "displaydoc", "litemap", "tinystr", "writeable", "zerovec", ] [[package]] name = "icu_locid_transform" version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "01d11ac35de8e40fdeda00d9e1e9d92525f3f9d887cdd7aa81d727596788b54e" dependencies = [ "displaydoc", "icu_locid", "icu_locid_transform_data", "icu_provider", "tinystr", "zerovec", ] [[package]] name = "icu_locid_transform_data" version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fdc8ff3388f852bede6b579ad4e978ab004f139284d7b28715f773507b946f6e" [[package]] name = "icu_normalizer" version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "19ce3e0da2ec68599d193c93d088142efd7f9c5d6fc9b803774855747dc6a84f" dependencies = [ "displaydoc", "icu_collections", "icu_normalizer_data", "icu_properties", "icu_provider", "smallvec", "utf16_iter", "utf8_iter", "write16", "zerovec", ] [[package]] name = "icu_normalizer_data" version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f8cafbf7aa791e9b22bec55a167906f9e1215fd475cd22adfcf660e03e989516" [[package]] name = "icu_properties" version = "1.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "93d6020766cfc6302c15dbbc9c8778c37e62c14427cb7f6e601d849e092aeef5" dependencies = [ "displaydoc", "icu_collections", "icu_locid_transform", "icu_properties_data", "icu_provider", "tinystr", "zerovec", ] [[package]] name = "icu_properties_data" version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "67a8effbc3dd3e4ba1afa8ad918d5684b8868b3b26500753effea8d2eed19569" [[package]] name = "icu_provider" version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6ed421c8a8ef78d3e2dbc98a973be2f3770cb42b606e3ab18d6237c4dfde68d9" dependencies = [ "displaydoc", "icu_locid", "icu_provider_macros", "stable_deref_trait", "tinystr", "writeable", "yoke", "zerofrom", "zerovec", ] [[package]] name = "icu_provider_macros" version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1ec89e9337638ecdc08744df490b221a7399bf8d164eb52a665454e60e075ad6" dependencies = [ "proc-macro2", "quote", "syn", ] [[package]] name = "idna" version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "686f825264d630750a544639377bae737628043f20d38bbc029e8f29ea968a7e" dependencies = [ "idna_adapter", "smallvec", "utf8_iter", ] [[package]] name = "idna_adapter" version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "daca1df1c957320b2cf139ac61e7bd64fed304c5040df000a745aa1de3b4ef71" dependencies = [ "icu_normalizer", "icu_properties", ] [[package]] name = "ignore" version = "0.4.23" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6d89fd380afde86567dfba715db065673989d6253f42b88179abd3eae47bda4b" dependencies = [ "crossbeam-deque", "globset", "log", "memchr", "regex-automata", "same-file", "walkdir", "winapi-util", ] [[package]] name = "indexmap" version = "2.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8c9c992b02b5b4c94ea26e32fe5bccb7aa7d9f390ab5c1221ff895bc7ea8b652" dependencies = [ "equivalent", "hashbrown", ] [[package]] name = "is_terminal_polyfill" version = "1.70.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7943c866cc5cd64cbc25b2e01621d07fa8eb2a1a23160ee81ce38704e97b8ecf" [[package]] name = "itertools" version = "0.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2b192c782037fadd9cfa75548310488aabdbf3d2da73885b31bd0abd03351285" dependencies = [ "either", ] [[package]] name = "itoa" version = "1.0.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d75a2a4b1b190afb6f5425f10f6a8f959d2ea0b9c2b1d79553551850539e4674" [[package]] name = "jobserver" version = "0.1.32" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "48d1dbcbbeb6a7fec7e059840aa538bd62aaccf972c7346c4d9d2059312853d0" dependencies = [ "libc", ] [[package]] name = "js-sys" version = "0.3.77" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1cfaf33c695fc6e08064efbc1f72ec937429614f25eef83af942d0e227c3a28f" dependencies = [ "once_cell", "wasm-bindgen", ] [[package]] name = "libc" version = "0.2.170" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "875b3680cb2f8f71bdcf9a30f38d48282f5d3c95cbf9b3fa57269bb5d5c06828" [[package]] name = "libgit2-sys" version = "0.18.0+1.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e1a117465e7e1597e8febea8bb0c410f1c7fb93b1e1cddf34363f8390367ffec" dependencies = [ "cc", "libc", "libssh2-sys", "libz-sys", "openssl-sys", "pkg-config", ] [[package]] name = "libredox" version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c0ff37bd590ca25063e35af745c343cb7a0271906fb7b37e4813e8f79f00268d" dependencies = [ "bitflags", "libc", "redox_syscall", ] [[package]] name = "libssh2-sys" version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "220e4f05ad4a218192533b300327f5150e809b54c4ec83b5a1d91833601811b9" dependencies = [ "cc", "libc", "libz-sys", "openssl-sys", "pkg-config", "vcpkg", ] [[package]] name = "libz-sys" version = "1.1.21" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "df9b68e50e6e0b26f672573834882eb57759f6db9b3be2ea3c35c91188bb4eaa" dependencies = [ "cc", "libc", "pkg-config", "vcpkg", ] [[package]] name = "linux-raw-sys" version = "0.4.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d26c52dbd32dccf2d10cac7725f8eae5296885fb5703b261f7d0a0739ec807ab" [[package]] name = "litemap" version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4ee93343901ab17bd981295f2cf0026d4ad018c7c31ba84549a4ddbb47a45104" [[package]] name = "log" version = "0.4.25" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "04cbf5b083de1c7e0222a7a51dbfdba1cbe1c6ab0b15e29fff3f6c077fd9cd9f" [[package]] name = "memchr" version = "2.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3" [[package]] name = "miniz_oxide" version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8e3e04debbb59698c15bacbb6d93584a8c0ca9cc3213cb423d31f760d8843ce5" dependencies = [ "adler2", ] [[package]] name = "miow" version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "359f76430b20a79f9e20e115b3428614e654f04fab314482fc0fda0ebd3c6044" dependencies = [ "windows-sys 0.48.0", ] [[package]] name = "normalize-line-endings" version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "61807f77802ff30975e01f4f071c8ba10c022052f98b3294119f3e615d13e5be" [[package]] name = "num-conv" version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9" [[package]] name = "once_cell" version = "1.20.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "945462a4b81e43c4e3ba96bd7b49d834c6f61198356aa858733bc4acf3cbe62e" [[package]] name = "openssl-probe" version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d05e27ee213611ffe7d6348b942e8f942b37114c00cc03cec254295a4a17852e" [[package]] name = "openssl-sys" version = "0.9.106" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8bb61ea9811cc39e3c2069f40b8b8e2e70d8569b361f879786cc7ed48b777cdd" dependencies = [ "cc", "libc", "pkg-config", "vcpkg", ] [[package]] name = "orion" version = "0.17.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dd806049e71da4c4a7880466b37afdc5a4c5b35a398b0d4fd9ff5d278d3b4db9" dependencies = [ "fiat-crypto", "subtle", "zeroize", ] [[package]] name = "p384" version = "0.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fe42f1670a52a47d448f14b6a5c61dd78fce51856e68edaa38f7ae3a46b8d6b6" dependencies = [ "ecdsa", "elliptic-curve", "primeorder", "sha2", ] [[package]] name = "pasetors" version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c54944fa25a6e7c9c5b3315f118d360cc00d555cf53bb2b2fdf32dd31c71b729" dependencies = [ "ct-codecs", "ed25519-compact", "getrandom 0.3.1", "orion", "p384", "rand_core", "regex", "serde", "serde_json", "sha2", "subtle", "time", "zeroize", ] [[package]] name = "pem-rfc7468" version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "88b39c9bfcfc231068454382784bb460aae594343fb030d46e9f50a645418412" dependencies = [ "base64ct", ] [[package]] name = "percent-encoding" version = "2.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" [[package]] name = "pin-project-lite" version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3b3cff922bd51709b605d9ead9aa71031d81447142d828eb4a6eba76fe619f9b" [[package]] name = "pkcs8" version = "0.10.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f950b2377845cebe5cf8b5165cb3cc1a5e0fa5cfa3e1f7f55707d8fd82e0a7b7" dependencies = [ "der", "spki", ] [[package]] name = "pkg-config" version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "953ec861398dccce10c670dfeaf3ec4911ca479e9c02154b3a215178c5f566f2" [[package]] name = "powerfmt" version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391" [[package]] name = "primeorder" version = "0.13.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "353e1ca18966c16d9deb1c69278edbc5f194139612772bd9537af60ac231e1e6" dependencies = [ "elliptic-curve", ] [[package]] name = "proc-macro2" version = "1.0.93" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "60946a68e5f9d28b0dc1c21bb8a97ee7d018a8b322fa57838ba31cc878e22d99" dependencies = [ "unicode-ident", ] [[package]] name = "quote" version = "1.0.38" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0e4dccaaaf89514f546c693ddc140f729f958c247918a13380cccc6078391acc" dependencies = [ "proc-macro2", ] [[package]] name = "rand_core" version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" dependencies = [ "getrandom 0.2.15", ] [[package]] name = "redox_syscall" version = "0.5.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "03a862b389f93e68874fbf580b9de08dd02facb9a788ebadaf4a3fd33cf58834" dependencies = [ "bitflags", ] [[package]] name = "regex" version = "1.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191" dependencies = [ "aho-corasick", "memchr", "regex-automata", "regex-syntax", ] [[package]] name = "regex-automata" version = "0.4.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "809e8dc61f6de73b46c85f4c96486310fe304c434cfa43669d7b40f711150908" dependencies = [ "aho-corasick", "memchr", "regex-syntax", ] [[package]] name = "regex-syntax" version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c" [[package]] name = "rfc6979" version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f8dd2a808d456c4a54e300a23e9f5a67e122c3024119acbfd73e3bf664491cb2" dependencies = [ "hmac", "subtle", ] [[package]] name = "rustix" version = "0.38.44" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fdb5bc1ae2baa591800df16c9ca78619bf65c0488b41b96ccec5d11220d8c154" dependencies = [ "bitflags", "errno", "libc", "linux-raw-sys", "windows-sys 0.59.0", ] [[package]] name = "ryu" version = "1.0.19" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6ea1a2d0a644769cc99faa24c3ad26b379b786fe7c36fd3c546254801650e6dd" [[package]] name = "same-file" version = "1.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502" dependencies = [ "winapi-util", ] [[package]] name = "schannel" version = "0.1.27" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1f29ebaa345f945cec9fbbc532eb307f0fdad8161f281b6369539c8d84876b3d" dependencies = [ "windows-sys 0.59.0", ] [[package]] name = "sec1" version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d3e97a565f76233a6003f9f5c54be1d9c5bdfa3eccfb189469f11ec4901c47dc" dependencies = [ "base16ct", "der", "generic-array", "pkcs8", "subtle", "zeroize", ] [[package]] name = "serde" version = "1.0.218" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e8dfc9d19bdbf6d17e22319da49161d5d0108e4188e8b680aef6299eed22df60" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" version = "1.0.218" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f09503e191f4e797cb8aac08e9a4a4695c5edf6a2e70e376d961ddd5c969f82b" dependencies = [ "proc-macro2", "quote", "syn", ] [[package]] name = "serde_json" version = "1.0.139" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "44f86c3acccc9c65b153fe1b85a3be07fe5515274ec9f0653b4a0875731c72a6" dependencies = [ "itoa", "memchr", "ryu", "serde", ] [[package]] name = "serde_spanned" version = "0.6.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "87607cb1398ed59d48732e575a4c28a7a8ebf2454b964fe3f224f2afc07909e1" dependencies = [ "serde", ] [[package]] name = "sha2" version = "0.10.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8" dependencies = [ "cfg-if", "cpufeatures", "digest", ] [[package]] name = "shell-escape" version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "45bb67a18fa91266cc7807181f62f9178a6873bfad7dc788c42e6430db40184f" [[package]] name = "shlex" version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" [[package]] name = "signature" version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "77549399552de45a898a580c1b41d445bf730df867cc44e6c0233bbc4b8329de" dependencies = [ "digest", "rand_core", ] [[package]] name = "similar" version = "2.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bbbb5d9659141646ae647b42fe094daf6c6192d1620870b449d9557f748b2daa" [[package]] name = "smallvec" version = "1.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67" [[package]] name = "snapbox" version = "0.6.21" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "96dcfc4581e3355d70ac2ee14cfdf81dce3d85c85f1ed9e2c1d3013f53b3436b" dependencies = [ "anstream", "anstyle", "anstyle-svg", "content_inspector", "dunce", "filetime", "normalize-line-endings", "regex", "serde", "serde_json", "similar", "snapbox-macros", "tempfile", "walkdir", ] [[package]] name = "snapbox-macros" version = "0.3.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "16569f53ca23a41bb6f62e0a5084aa1661f4814a67fa33696a79073e03a664af" dependencies = [ "anstream", ] [[package]] name = "socket2" version = "0.5.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c970269d99b64e60ec3bd6ad27270092a5394c4e309314b18ae3fe575695fbe8" dependencies = [ "libc", "windows-sys 0.52.0", ] [[package]] name = "spki" version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d91ed6c858b01f942cd56b37a94b3e0a1798290327d1236e4d9cf4eaca44d29d" dependencies = [ "base64ct", "der", ] [[package]] name = "stable_deref_trait" version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" [[package]] name = "subtle" version = "2.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" [[package]] name = "syn" version = "2.0.98" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "36147f1a48ae0ec2b5b3bc5b537d267457555a10dc06f3dbc8cb11ba3006d3b1" dependencies = [ "proc-macro2", "quote", "unicode-ident", ] [[package]] name = "synstructure" version = "0.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c8af7666ab7b6390ab78131fb5b0fce11d6b7a6951602017c35fa82800708971" dependencies = [ "proc-macro2", "quote", "syn", ] [[package]] name = "tar" version = "0.4.44" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1d863878d212c87a19c1a610eb53bb01fe12951c0501cf5a0d65f724914a667a" dependencies = [ "filetime", "libc", ] [[package]] name = "tempfile" version = "3.17.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "22e5a0acb1f3f55f65cc4a866c361b2fb2a0ff6366785ae6fbb5f85df07ba230" dependencies = [ "cfg-if", "fastrand", "getrandom 0.3.1", "once_cell", "rustix", "windows-sys 0.59.0", ] [[package]] name = "thiserror" version = "2.0.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d452f284b73e6d76dd36758a0c8684b1d5be31f92b89d07fd5822175732206fc" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" version = "2.0.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "26afc1baea8a989337eeb52b6e72a039780ce45c3edfcc9c5b9d112feeb173c2" dependencies = [ "proc-macro2", "quote", "syn", ] [[package]] name = "time" version = "0.3.37" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "35e7868883861bd0e56d9ac6efcaaca0d6d5d82a2a7ec8209ff492c07cf37b21" dependencies = [ "deranged", "itoa", "num-conv", "powerfmt", "serde", "time-core", "time-macros", ] [[package]] name = "time-core" version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ef927ca75afb808a4d64dd374f00a2adf8d0fcff8e7b184af886c3c87ec4a3f3" [[package]] name = "time-macros" version = "0.2.19" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2834e6017e3e5e4b9834939793b282bc03b37a3336245fa820e35e233e2a85de" dependencies = [ "num-conv", "time-core", ] [[package]] name = "tinystr" version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9117f5d4db391c1cf6927e7bea3db74b9a1c1add8f7eda9ffd5364f40f57b82f" dependencies = [ "displaydoc", "zerovec", ] [[package]] name = "toml" version = "0.8.20" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cd87a5cdd6ffab733b2f74bc4fd7ee5fff6634124999ac278c35fc78c6120148" dependencies = [ "serde", "serde_spanned", "toml_datetime", "toml_edit", ] [[package]] name = "toml_datetime" version = "0.6.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0dd7358ecb8fc2f8d014bf86f6f638ce72ba252a2c3a2572f2a795f1d23efb41" dependencies = [ "serde", ] [[package]] name = "toml_edit" version = "0.22.24" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "17b4795ff5edd201c7cd6dca065ae59972ce77d1b80fa0a84d94950ece7d1474" dependencies = [ "indexmap", "serde", "serde_spanned", "toml_datetime", "winnow", ] [[package]] name = "tracing" version = "0.1.41" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "784e0ac535deb450455cbfa28a6f0df145ea1bb7ae51b821cf5e7927fdcfbdd0" dependencies = [ "pin-project-lite", "tracing-core", ] [[package]] name = "tracing-core" version = "0.1.33" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e672c95779cf947c5311f83787af4fa8fffd12fb27e4993211a84bdfd9610f9c" dependencies = [ "once_cell", ] [[package]] name = "typenum" version = "1.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825" [[package]] name = "unicode-ident" version = "1.0.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "00e2473a93778eb0bad35909dff6a10d28e63f792f16ed15e404fca9d5eeedbe" [[package]] name = "unicode-width" version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1fc81956842c57dac11422a97c3b8195a1ff727f06e85c84ed2e8aa277c9a0fd" [[package]] name = "url" version = "2.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "32f8b686cadd1473f4bd0117a5d28d36b1ade384ea9b5069a1c40aefed7fda60" dependencies = [ "form_urlencoded", "idna", "percent-encoding", ] [[package]] name = "utf16_iter" version = "1.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c8232dd3cdaed5356e0f716d285e4b40b932ac434100fe9b7e0e8e935b9e6246" [[package]] name = "utf8-width" version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "86bd8d4e895da8537e5315b8254664e6b769c4ff3db18321b297a1e7004392e3" [[package]] name = "utf8_iter" version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be" [[package]] name = "utf8parse" version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" [[package]] name = "vcpkg" version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" [[package]] name = "version_check" version = "0.9.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" [[package]] name = "walkdir" version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "29790946404f91d9c5d06f9874efddea1dc06c5efe94541a7d6863108e3a5e4b" dependencies = [ "same-file", "winapi-util", ] [[package]] name = "wasi" version = "0.11.0+wasi-snapshot-preview1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" [[package]] name = "wasi" version = "0.13.3+wasi-0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "26816d2e1a4a36a2940b96c5296ce403917633dff8f3440e9b236ed6f6bacad2" dependencies = [ "wit-bindgen-rt", ] [[package]] name = "wasm-bindgen" version = "0.2.100" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1edc8929d7499fc4e8f0be2262a241556cfc54a0bea223790e71446f2aab1ef5" dependencies = [ "cfg-if", "once_cell", "wasm-bindgen-macro", ] [[package]] name = "wasm-bindgen-backend" version = "0.2.100" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2f0a0651a5c2bc21487bde11ee802ccaf4c51935d0d3d42a6101f98161700bc6" dependencies = [ "bumpalo", "log", "proc-macro2", "quote", "syn", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-macro" version = "0.2.100" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7fe63fc6d09ed3792bd0897b314f53de8e16568c2b3f7982f468c0bf9bd0b407" dependencies = [ "quote", "wasm-bindgen-macro-support", ] [[package]] name = "wasm-bindgen-macro-support" version = "0.2.100" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8ae87ea40c9f689fc23f209965b6fb8a99ad69aeeb0231408be24920604395de" dependencies = [ "proc-macro2", "quote", "syn", "wasm-bindgen-backend", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-shared" version = "0.2.100" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1a05d73b933a847d6cccdda8f838a22ff101ad9bf93e33684f39c1f5f0eece3d" dependencies = [ "unicode-ident", ] [[package]] name = "winapi-util" version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb" dependencies = [ "windows-sys 0.59.0", ] [[package]] name = "windows-sys" version = "0.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" dependencies = [ "windows-targets 0.48.5", ] [[package]] name = "windows-sys" version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" dependencies = [ "windows-targets 0.52.6", ] [[package]] name = "windows-sys" version = "0.59.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" dependencies = [ "windows-targets 0.52.6", ] [[package]] name = "windows-targets" version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c" dependencies = [ "windows_aarch64_gnullvm 0.48.5", "windows_aarch64_msvc 0.48.5", "windows_i686_gnu 0.48.5", "windows_i686_msvc 0.48.5", "windows_x86_64_gnu 0.48.5", "windows_x86_64_gnullvm 0.48.5", "windows_x86_64_msvc 0.48.5", ] [[package]] name = "windows-targets" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" dependencies = [ "windows_aarch64_gnullvm 0.52.6", "windows_aarch64_msvc 0.52.6", "windows_i686_gnu 0.52.6", "windows_i686_gnullvm", "windows_i686_msvc 0.52.6", "windows_x86_64_gnu 0.52.6", "windows_x86_64_gnullvm 0.52.6", "windows_x86_64_msvc 0.52.6", ] [[package]] name = "windows_aarch64_gnullvm" version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" [[package]] name = "windows_aarch64_gnullvm" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" [[package]] name = "windows_aarch64_msvc" version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" [[package]] name = "windows_aarch64_msvc" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" [[package]] name = "windows_i686_gnu" version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" [[package]] name = "windows_i686_gnu" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" [[package]] name = "windows_i686_gnullvm" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" [[package]] name = "windows_i686_msvc" version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" [[package]] name = "windows_i686_msvc" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" [[package]] name = "windows_x86_64_gnu" version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" [[package]] name = "windows_x86_64_gnu" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" [[package]] name = "windows_x86_64_gnullvm" version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" [[package]] name = "windows_x86_64_gnullvm" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" [[package]] name = "windows_x86_64_msvc" version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" [[package]] name = "windows_x86_64_msvc" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" [[package]] name = "winnow" version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "86e376c75f4f43f44db463cf729e0d3acbf954d13e22c51e26e4c264b4ab545f" dependencies = [ "memchr", ] [[package]] name = "wit-bindgen-rt" version = "0.33.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3268f3d866458b787f390cf61f4bbb563b922d091359f9608842999eaee3943c" dependencies = [ "bitflags", ] [[package]] name = "write16" version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d1890f4022759daae28ed4fe62859b1236caebfc61ede2f63ed4e695f3f6d936" [[package]] name = "writeable" version = "0.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1e9df38ee2d2c3c5948ea468a8406ff0db0b29ae1ffde1bcf20ef305bcc95c51" [[package]] name = "yoke" version = "0.7.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "120e6aef9aa629e3d4f52dc8cc43a015c7724194c97dfaf45180d2daf2b77f40" dependencies = [ "serde", "stable_deref_trait", "yoke-derive", "zerofrom", ] [[package]] name = "yoke-derive" version = "0.7.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2380878cad4ac9aac1e2435f3eb4020e8374b5f13c296cb75b4620ff8e229154" dependencies = [ "proc-macro2", "quote", "syn", "synstructure", ] [[package]] name = "zerofrom" version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cff3ee08c995dee1859d998dea82f7374f2826091dd9cd47def953cae446cd2e" dependencies = [ "zerofrom-derive", ] [[package]] name = "zerofrom-derive" version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "595eed982f7d355beb85837f651fa22e90b3c044842dc7f2c2842c086f295808" dependencies = [ "proc-macro2", "quote", "syn", "synstructure", ] [[package]] name = "zeroize" version = "1.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ced3678a2879b30306d323f4542626697a464a97c0a07c9aebf7ebca65cd4dde" [[package]] name = "zerovec" version = "0.10.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "aa2b893d79df23bfb12d5461018d408ea19dfafe76c2c7ef6d4eba614f8ff079" dependencies = [ "yoke", "zerofrom", "zerovec-derive", ] [[package]] name = "zerovec-derive" version = "0.10.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6eafa6dfb17584ea3e2bd6e76e0cc15ad7af12b09abdd1ca55961bed9b1063c6" dependencies = [ "proc-macro2", "quote", "syn", ] cargo-test-support-0.7.3/Cargo.toml0000644000000050130000000000100126220ustar # THIS FILE IS AUTOMATICALLY GENERATED BY CARGO # # When uploading crates to the registry Cargo will automatically # "normalize" Cargo.toml files for maximal compatibility # with all versions of Cargo and also rewrite `path` dependencies # to registry (e.g., crates.io) dependencies. # # If you are reading this file be aware that the original Cargo.toml # will likely look very different (and much more reasonable). # See Cargo.toml.orig for the original contents. [package] edition = "2021" rust-version = "1.85" name = "cargo-test-support" version = "0.7.3" build = "build.rs" autolib = false autobins = false autoexamples = false autotests = false autobenches = false description = "Testing framework for Cargo's testsuite." homepage = "https://github.com/rust-lang/cargo" readme = "README.md" license = "MIT OR Apache-2.0" repository = "https://github.com/rust-lang/cargo" [lib] name = "cargo_test_support" path = "src/lib.rs" [dependencies.anstream] version = "0.6.18" [dependencies.anstyle] version = "1.0.10" [dependencies.anyhow] version = "1.0.95" [dependencies.cargo-test-macro] version = "0.4.2" [dependencies.cargo-util] version = "0.2.20" [dependencies.crates-io] version = "0.40.10" [dependencies.filetime] version = "0.2.25" [dependencies.flate2] version = "1.0.35" features = ["zlib"] default-features = false [dependencies.git2] version = "0.20.0" [dependencies.glob] version = "0.3.2" [dependencies.itertools] version = "0.14.0" [dependencies.pasetors] version = "0.7.2" features = [ "v3", "paserk", "std", "serde", ] [dependencies.regex] version = "1.11.1" [dependencies.serde] version = "1.0.217" features = ["derive"] [dependencies.serde_json] version = "1.0.138" [dependencies.snapbox] version = "0.6.21" features = [ "diff", "dir", "term-svg", "regex", "json", ] [dependencies.tar] version = "0.4.43" default-features = false [dependencies.time] version = "0.3.37" features = [ "parsing", "formatting", "serde", ] [dependencies.toml] version = "0.8.20" [dependencies.url] version = "2.5.4" [dependencies.walkdir] version = "2.5.0" [target."cfg(windows)".dependencies.windows-sys] version = "0.59" features = ["Win32_Storage_FileSystem"] [lints.clippy] dbg_macro = "warn" disallowed_methods = "warn" print_stderr = "warn" print_stdout = "warn" self_named_module_files = "warn" [lints.clippy.all] level = "allow" priority = -2 [lints.clippy.correctness] level = "warn" priority = -1 [lints.rust] rust_2018_idioms = "warn" [lints.rustdoc] private_intra_doc_links = "allow" cargo-test-support-0.7.3/Cargo.toml.orig000064400000000000000000000016731046102023000163130ustar 00000000000000[package] name = "cargo-test-support" version = "0.7.3" edition.workspace = true rust-version = "1.85" # MSRV:1 license.workspace = true homepage.workspace = true repository.workspace = true description = "Testing framework for Cargo's testsuite." [dependencies] anstream.workspace = true anstyle.workspace = true anyhow.workspace = true cargo-test-macro.workspace = true cargo-util.workspace = true crates-io.workspace = true filetime.workspace = true flate2.workspace = true git2.workspace = true glob.workspace = true itertools.workspace = true pasetors.workspace = true regex.workspace = true serde = { workspace = true, features = ["derive"] } serde_json.workspace = true snapbox.workspace = true tar.workspace = true time.workspace = true toml.workspace = true url.workspace = true walkdir.workspace = true [target.'cfg(windows)'.dependencies] windows-sys = { workspace = true, features = ["Win32_Storage_FileSystem"] } [lints] workspace = true cargo-test-support-0.7.3/LICENSE-APACHE000064400000000000000000000251541046102023000153500ustar 00000000000000 Apache License Version 2.0, January 2004 https://www.apache.org/licenses/LICENSE-2.0 TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS APPENDIX: How to apply the Apache License to your work. To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. Copyright [yyyy] [name of copyright owner] Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at https://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. cargo-test-support-0.7.3/LICENSE-MIT000064400000000000000000000017771046102023000150650ustar 00000000000000Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. cargo-test-support-0.7.3/README.md000064400000000000000000000003001046102023000146650ustar 00000000000000> This crate is maintained by the Cargo team, primarily for use by Cargo > and not intended for external use. This > crate may make major changes to its APIs or be deprecated without warning. cargo-test-support-0.7.3/build.rs000064400000000000000000000004251046102023000150630ustar 00000000000000#![allow(clippy::disallowed_methods)] fn main() { println!("cargo:rustc-check-cfg=cfg(emulate_second_only_system)"); println!( "cargo:rustc-env=NATIVE_ARCH={}", std::env::var("TARGET").unwrap() ); println!("cargo:rerun-if-changed=build.rs"); } cargo-test-support-0.7.3/containers/apache/Dockerfile000064400000000000000000000014131046102023000207740ustar 00000000000000FROM httpd:2.4-alpine RUN apk add --no-cache git git-daemon openssl COPY bar /repos/bar WORKDIR /repos/bar RUN git config --global user.email "testuser@example.com" &&\ git config --global user.name "Test User" &&\ git config --system --add safe.directory '*' &&\ git init -b master . &&\ git add Cargo.toml src &&\ git commit -m "Initial commit" &&\ cd .. &&\ git clone --bare bar bar.git &&\ rm -rf bar WORKDIR / EXPOSE 443 WORKDIR /usr/local/apache2/conf COPY httpd-cargo.conf . RUN cat httpd-cargo.conf >> httpd.conf RUN openssl req -x509 -nodes -days 3650 -newkey rsa:2048 \ -keyout server.key -out server.crt \ -subj "/emailAddress=webmaster@example.com/C=US/ST=California/L=San Francisco/O=Rust/OU=Cargo/CN=127.0.0.1" WORKDIR / cargo-test-support-0.7.3/containers/apache/httpd-cargo.conf000064400000000000000000000005461046102023000220730ustar 00000000000000SetEnv GIT_PROJECT_ROOT /repos SetEnv GIT_HTTP_EXPORT_ALL ScriptAlias /repos /usr/libexec/git-core/git-http-backend/ LoadModule cgid_module modules/mod_cgid.so Require all granted Include conf/extra/httpd-ssl.conf LoadModule ssl_module modules/mod_ssl.so LoadModule socache_shmcb_module modules/mod_socache_shmcb.so cargo-test-support-0.7.3/containers/sshd/Dockerfile000064400000000000000000000014551046102023000205220ustar 00000000000000FROM alpine:3.21 RUN apk add --no-cache openssh git RUN ssh-keygen -A RUN addgroup -S testuser && adduser -S testuser -G testuser -s /bin/ash # NOTE: Ideally the password should be set to *, but I am uncertain how to do # that in alpine. It shouldn't matter since PermitEmptyPasswords is "no". RUN passwd -u testuser RUN mkdir /repos && chown testuser /repos COPY --chown=testuser:testuser bar /repos/bar USER testuser WORKDIR /repos/bar RUN git config --global user.email "testuser@example.com" &&\ git config --global user.name "Test User" &&\ git init -b master . &&\ git add Cargo.toml src &&\ git commit -m "Initial commit" &&\ cd .. &&\ git clone --bare bar bar.git &&\ rm -rf bar WORKDIR / USER root EXPOSE 22 ENTRYPOINT ["/usr/sbin/sshd", "-D", "-E", "/var/log/auth.log"] cargo-test-support-0.7.3/src/compare.rs000064400000000000000000000537771046102023000162230ustar 00000000000000//! Routines for comparing and diffing output. //! //! # Deprecated comparisons //! //! Cargo's tests are in transition from internal-only pattern and normalization routines used in //! asserts like [`crate::Execs::with_stdout_contains`] to [`assert_e2e`] and [`assert_ui`]. //! //! ## Patterns //! //! Many of these functions support special markup to assist with comparing //! text that may vary or is otherwise uninteresting for the test at hand. The //! supported patterns are: //! //! - `[..]` is a wildcard that matches 0 or more characters on the same line //! (similar to `.*` in a regex). It is non-greedy. //! - `[EXE]` optionally adds `.exe` on Windows (empty string on other //! platforms). //! - `[ROOT]` is the path to the test directory's root. //! - `[CWD]` is the working directory of the process that was run. //! - There is a wide range of substitutions (such as `[COMPILING]` or //! `[WARNING]`) to match cargo's "status" output and allows you to ignore //! the alignment. See the source of `substitute_macros` for a complete list //! of substitutions. //! - `[DIRTY-MSVC]` (only when the line starts with it) would be replaced by //! `[DIRTY]` when `cfg(target_env = "msvc")` or the line will be ignored otherwise. //! Tests that work around [issue 7358](https://github.com/rust-lang/cargo/issues/7358) //! can use this to avoid duplicating the `with_stderr` call like: //! `if cfg!(target_env = "msvc") {e.with_stderr("...[DIRTY]...");} else {e.with_stderr("...");}`. //! //! ## Normalization //! //! In addition to the patterns described above, the strings are normalized //! in such a way to avoid unwanted differences. The normalizations are: //! //! - Raw tab characters are converted to the string ``. This is helpful //! so that raw tabs do not need to be written in the expected string, and //! to avoid confusion of tabs vs spaces. //! - Backslashes are converted to forward slashes to deal with Windows paths. //! This helps so that all tests can be written assuming forward slashes. //! Other heuristics are applied to try to ensure Windows-style paths aren't //! a problem. //! - Carriage returns are removed, which can help when running on Windows. use crate::cross_compile::try_alternate; use crate::paths; use crate::rustc_host; use anyhow::{bail, Result}; use snapbox::Data; use snapbox::IntoData; use std::fmt; use std::path::Path; use std::path::PathBuf; use std::str; /// This makes it easier to write regex replacements that are guaranteed to only /// get compiled once macro_rules! regex { ($re:literal $(,)?) => {{ static RE: std::sync::OnceLock = std::sync::OnceLock::new(); RE.get_or_init(|| regex::Regex::new($re).unwrap()) }}; } /// Assertion policy for UI tests /// /// This emphasizes showing as much content as possible at the cost of more brittleness /// /// # Snapshots /// /// Updating of snapshots is controlled with the `SNAPSHOTS` environment variable: /// /// - `skip`: do not run the tests /// - `ignore`: run the tests but ignore their failure /// - `verify`: run the tests /// - `overwrite`: update the snapshots based on the output of the tests /// /// # Patterns /// /// - `[..]` is a character wildcard, stopping at line breaks /// - `\n...\n` is a multi-line wildcard /// - `[EXE]` matches the exe suffix for the current platform /// - `[ROOT]` matches [`paths::root()`][crate::paths::root] /// - `[ROOTURL]` matches [`paths::root()`][crate::paths::root] as a URL /// /// # Normalization /// /// In addition to the patterns described above, text is normalized /// in such a way to avoid unwanted differences. The normalizations are: /// /// - Backslashes are converted to forward slashes to deal with Windows paths. /// This helps so that all tests can be written assuming forward slashes. /// Other heuristics are applied to try to ensure Windows-style paths aren't /// a problem. /// - Carriage returns are removed, which can help when running on Windows. /// /// # Example /// /// ```no_run /// # use cargo_test_support::compare::assert_e2e; /// # use cargo_test_support::file; /// # let p = cargo_test_support::project().build(); /// # let stdout = ""; /// assert_e2e().eq(stdout, file!["stderr.term.svg"]); /// ``` /// ```console /// $ SNAPSHOTS=overwrite cargo test /// ``` pub fn assert_ui() -> snapbox::Assert { let mut subs = snapbox::Redactions::new(); subs.extend(MIN_LITERAL_REDACTIONS.into_iter().cloned()) .unwrap(); add_test_support_redactions(&mut subs); add_regex_redactions(&mut subs); snapbox::Assert::new() .action_env(snapbox::assert::DEFAULT_ACTION_ENV) .redact_with(subs) } /// Assertion policy for functional end-to-end tests /// /// This emphasizes showing as much content as possible at the cost of more brittleness /// /// # Snapshots /// /// Updating of snapshots is controlled with the `SNAPSHOTS` environment variable: /// /// - `skip`: do not run the tests /// - `ignore`: run the tests but ignore their failure /// - `verify`: run the tests /// - `overwrite`: update the snapshots based on the output of the tests /// /// # Patterns /// /// - `[..]` is a character wildcard, stopping at line breaks /// - `\n...\n` is a multi-line wildcard /// - `[EXE]` matches the exe suffix for the current platform /// - `[ROOT]` matches [`paths::root()`][crate::paths::root] /// - `[ROOTURL]` matches [`paths::root()`][crate::paths::root] as a URL /// /// # Normalization /// /// In addition to the patterns described above, text is normalized /// in such a way to avoid unwanted differences. The normalizations are: /// /// - Backslashes are converted to forward slashes to deal with Windows paths. /// This helps so that all tests can be written assuming forward slashes. /// Other heuristics are applied to try to ensure Windows-style paths aren't /// a problem. /// - Carriage returns are removed, which can help when running on Windows. /// /// # Example /// /// ```no_run /// # use cargo_test_support::compare::assert_e2e; /// # use cargo_test_support::str; /// # let p = cargo_test_support::project().build(); /// assert_e2e().eq(p.read_lockfile(), str![]); /// ``` /// ```console /// $ SNAPSHOTS=overwrite cargo test /// ``` pub fn assert_e2e() -> snapbox::Assert { let mut subs = snapbox::Redactions::new(); subs.extend(MIN_LITERAL_REDACTIONS.into_iter().cloned()) .unwrap(); subs.extend(E2E_LITERAL_REDACTIONS.into_iter().cloned()) .unwrap(); add_test_support_redactions(&mut subs); add_regex_redactions(&mut subs); snapbox::Assert::new() .action_env(snapbox::assert::DEFAULT_ACTION_ENV) .redact_with(subs) } fn add_test_support_redactions(subs: &mut snapbox::Redactions) { let root = paths::root(); // Use `from_file_path` instead of `from_dir_path` so the trailing slash is // put in the users output, rather than hidden in the variable let root_url = url::Url::from_file_path(&root).unwrap().to_string(); subs.insert("[ROOT]", root).unwrap(); subs.insert("[ROOTURL]", root_url).unwrap(); subs.insert("[HOST_TARGET]", rustc_host()).unwrap(); if let Some(alt_target) = try_alternate() { subs.insert("[ALT_TARGET]", alt_target).unwrap(); } } fn add_regex_redactions(subs: &mut snapbox::Redactions) { // For e2e tests subs.insert( "[ELAPSED]", regex!(r"\[FINISHED\].*in (?[0-9]+(\.[0-9]+)?(m [0-9]+)?)s"), ) .unwrap(); // for UI tests subs.insert( "[ELAPSED]", regex!(r"Finished.*in (?[0-9]+(\.[0-9]+)?(m [0-9]+)?)s"), ) .unwrap(); // output from libtest subs.insert( "[ELAPSED]", regex!(r"; finished in (?[0-9]+(\.[0-9]+)?(m [0-9]+)?)s"), ) .unwrap(); subs.insert( "[FILE_NUM]", regex!(r"\[(REMOVED|SUMMARY)\] (?[1-9][0-9]*) files"), ) .unwrap(); subs.insert( "[FILE_SIZE]", regex!(r"(?[0-9]+(\.[0-9]+)?([a-zA-Z]i)?)B\s"), ) .unwrap(); subs.insert( "[HASH]", regex!(r"home/\.cargo/registry/(cache|index|src)/-(?[a-z0-9]+)"), ) .unwrap(); subs.insert( "[HASH]", regex!(r"\.cargo/target/(?[0-9a-f]{2}/[0-9a-f]{14})"), ) .unwrap(); subs.insert("[HASH]", regex!(r"/[a-z0-9\-_]+-(?[0-9a-f]{16})")) .unwrap(); subs.insert( "[AVG_ELAPSED]", regex!(r"(?[0-9]+(\.[0-9]+)?) ns/iter"), ) .unwrap(); subs.insert( "[JITTER]", regex!(r"ns/iter \(\+/- (?[0-9]+(\.[0-9]+)?)\)"), ) .unwrap(); // Following 3 subs redact: // "1719325877.527949100s, 61549498ns after last build at 1719325877.466399602s" // "1719503592.218193216s, 1h 1s after last build at 1719499991.982681034s" // into "[DIRTY_REASON_NEW_TIME], [DIRTY_REASON_DIFF] after last build at [DIRTY_REASON_OLD_TIME]" subs.insert( "[TIME_DIFF_AFTER_LAST_BUILD]", regex!(r"(?[0-9]+(\.[0-9]+)?s, (\s?[0-9]+(\.[0-9]+)?(s|ns|h))+ after last build at [0-9]+(\.[0-9]+)?s)"), ) .unwrap(); } static MIN_LITERAL_REDACTIONS: &[(&str, &str)] = &[ ("[EXE]", std::env::consts::EXE_SUFFIX), ("[BROKEN_PIPE]", "Broken pipe (os error 32)"), ("[BROKEN_PIPE]", "The pipe is being closed. (os error 232)"), // Unix message for an entity was not found ("[NOT_FOUND]", "No such file or directory (os error 2)"), // Windows message for an entity was not found ( "[NOT_FOUND]", "The system cannot find the file specified. (os error 2)", ), ( "[NOT_FOUND]", "The system cannot find the path specified. (os error 3)", ), ("[NOT_FOUND]", "Access is denied. (os error 5)"), ("[NOT_FOUND]", "program not found"), // Unix message for exit status ("[EXIT_STATUS]", "exit status"), // Windows message for exit status ("[EXIT_STATUS]", "exit code"), ]; static E2E_LITERAL_REDACTIONS: &[(&str, &str)] = &[ ("[RUNNING]", " Running"), ("[COMPILING]", " Compiling"), ("[CHECKING]", " Checking"), ("[COMPLETED]", " Completed"), ("[CREATED]", " Created"), ("[CREATING]", " Creating"), ("[CREDENTIAL]", " Credential"), ("[DOWNGRADING]", " Downgrading"), ("[FINISHED]", " Finished"), ("[ERROR]", "error:"), ("[WARNING]", "warning:"), ("[NOTE]", "note:"), ("[HELP]", "help:"), ("[DOCUMENTING]", " Documenting"), ("[SCRAPING]", " Scraping"), ("[FRESH]", " Fresh"), ("[DIRTY]", " Dirty"), ("[LOCKING]", " Locking"), ("[UPDATING]", " Updating"), ("[UPGRADING]", " Upgrading"), ("[ADDING]", " Adding"), ("[REMOVING]", " Removing"), ("[REMOVED]", " Removed"), ("[UNCHANGED]", " Unchanged"), ("[DOCTEST]", " Doc-tests"), ("[PACKAGING]", " Packaging"), ("[PACKAGED]", " Packaged"), ("[DOWNLOADING]", " Downloading"), ("[DOWNLOADED]", " Downloaded"), ("[UPLOADING]", " Uploading"), ("[UPLOADED]", " Uploaded"), ("[VERIFYING]", " Verifying"), ("[ARCHIVING]", " Archiving"), ("[INSTALLING]", " Installing"), ("[REPLACING]", " Replacing"), ("[UNPACKING]", " Unpacking"), ("[SUMMARY]", " Summary"), ("[FIXED]", " Fixed"), ("[FIXING]", " Fixing"), ("[IGNORED]", " Ignored"), ("[INSTALLED]", " Installed"), ("[REPLACED]", " Replaced"), ("[BUILDING]", " Building"), ("[LOGIN]", " Login"), ("[LOGOUT]", " Logout"), ("[YANK]", " Yank"), ("[OWNER]", " Owner"), ("[MIGRATING]", " Migrating"), ("[EXECUTABLE]", " Executable"), ("[SKIPPING]", " Skipping"), ("[WAITING]", " Waiting"), ("[PUBLISHED]", " Published"), ("[BLOCKING]", " Blocking"), ("[GENERATED]", " Generated"), ("[OPENING]", " Opening"), ]; /// Checks that the given string contains the given contiguous lines /// somewhere. /// /// See [Patterns](index.html#patterns) for more information on pattern matching. pub(crate) fn match_contains( expected: &str, actual: &str, redactions: &snapbox::Redactions, ) -> Result<()> { let expected = normalize_expected(expected, redactions); let actual = normalize_actual(actual, redactions); let e: Vec<_> = expected.lines().map(|line| WildStr::new(line)).collect(); let a: Vec<_> = actual.lines().collect(); if e.len() == 0 { bail!("expected length must not be zero"); } for window in a.windows(e.len()) { if e == window { return Ok(()); } } bail!( "expected to find:\n\ {}\n\n\ did not find in output:\n\ {}", expected, actual ); } /// Checks that the given string does not contain the given contiguous lines /// anywhere. /// /// See [Patterns](index.html#patterns) for more information on pattern matching. pub(crate) fn match_does_not_contain( expected: &str, actual: &str, redactions: &snapbox::Redactions, ) -> Result<()> { if match_contains(expected, actual, redactions).is_ok() { bail!( "expected not to find:\n\ {}\n\n\ but found in output:\n\ {}", expected, actual ); } else { Ok(()) } } /// Checks that the given string has a line that contains the given patterns, /// and that line also does not contain the `without` patterns. /// /// See [Patterns](index.html#patterns) for more information on pattern matching. /// /// See [`crate::Execs::with_stderr_line_without`] for an example and cautions /// against using. pub(crate) fn match_with_without( actual: &str, with: &[String], without: &[String], redactions: &snapbox::Redactions, ) -> Result<()> { let actual = normalize_actual(actual, redactions); let norm = |s: &String| format!("[..]{}[..]", normalize_expected(s, redactions)); let with: Vec<_> = with.iter().map(norm).collect(); let without: Vec<_> = without.iter().map(norm).collect(); let with_wild: Vec<_> = with.iter().map(|w| WildStr::new(w)).collect(); let without_wild: Vec<_> = without.iter().map(|w| WildStr::new(w)).collect(); let matches: Vec<_> = actual .lines() .filter(|line| with_wild.iter().all(|with| with == line)) .filter(|line| !without_wild.iter().any(|without| without == line)) .collect(); match matches.len() { 0 => bail!( "Could not find expected line in output.\n\ With contents: {:?}\n\ Without contents: {:?}\n\ Actual stderr:\n\ {}\n", with, without, actual ), 1 => Ok(()), _ => bail!( "Found multiple matching lines, but only expected one.\n\ With contents: {:?}\n\ Without contents: {:?}\n\ Matching lines:\n\ {}\n", with, without, itertools::join(matches, "\n") ), } } /// Normalizes the output so that it can be compared against the expected value. fn normalize_actual(content: &str, redactions: &snapbox::Redactions) -> String { use snapbox::filter::Filter as _; let content = snapbox::filter::FilterPaths.filter(content.into_data()); let content = snapbox::filter::FilterNewlines.filter(content); let content = content.render().expect("came in as a String"); let content = redactions.redact(&content); content } /// Normalizes the expected string so that it can be compared against the actual output. fn normalize_expected(content: &str, redactions: &snapbox::Redactions) -> String { use snapbox::filter::Filter as _; let content = snapbox::filter::FilterPaths.filter(content.into_data()); let content = snapbox::filter::FilterNewlines.filter(content); // Remove any conditionally absent redactions like `[EXE]` let content = content.render().expect("came in as a String"); let content = redactions.clear_unused(&content); content.into_owned() } /// A single line string that supports `[..]` wildcard matching. struct WildStr<'a> { has_meta: bool, line: &'a str, } impl<'a> WildStr<'a> { fn new(line: &'a str) -> WildStr<'a> { WildStr { has_meta: line.contains("[..]"), line, } } } impl PartialEq<&str> for WildStr<'_> { fn eq(&self, other: &&str) -> bool { if self.has_meta { meta_cmp(self.line, other) } else { self.line == *other } } } fn meta_cmp(a: &str, mut b: &str) -> bool { for (i, part) in a.split("[..]").enumerate() { match b.find(part) { Some(j) => { if i == 0 && j != 0 { return false; } b = &b[j + part.len()..]; } None => return false, } } b.is_empty() || a.ends_with("[..]") } impl fmt::Display for WildStr<'_> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.write_str(&self.line) } } impl fmt::Debug for WildStr<'_> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{:?}", self.line) } } pub struct InMemoryDir { files: Vec<(PathBuf, Data)>, } impl InMemoryDir { pub fn paths(&self) -> impl Iterator { self.files.iter().map(|(p, _)| p.as_path()) } #[track_caller] pub fn assert_contains(&self, expected: &Self) { use std::fmt::Write as _; let assert = assert_e2e(); let mut errs = String::new(); for (path, expected_data) in &expected.files { let actual_data = self .files .iter() .find_map(|(p, d)| (path == p).then(|| d.clone())) .unwrap_or_else(|| Data::new()); if let Err(err) = assert.try_eq(Some(&path.display()), actual_data, expected_data.clone()) { let _ = write!(&mut errs, "{err}"); } } if !errs.is_empty() { panic!("{errs}") } } } impl FromIterator<(P, D)> for InMemoryDir where P: Into, D: IntoData, { fn from_iter>(files: I) -> Self { let files = files .into_iter() .map(|(p, d)| (p.into(), d.into_data())) .collect(); Self { files } } } impl From<[(P, D); N]> for InMemoryDir where P: Into, D: IntoData, { fn from(files: [(P, D); N]) -> Self { let files = files .into_iter() .map(|(p, d)| (p.into(), d.into_data())) .collect(); Self { files } } } impl From> for InMemoryDir where P: Into, D: IntoData, { fn from(files: std::collections::HashMap) -> Self { let files = files .into_iter() .map(|(p, d)| (p.into(), d.into_data())) .collect(); Self { files } } } impl From> for InMemoryDir where P: Into, D: IntoData, { fn from(files: std::collections::BTreeMap) -> Self { let files = files .into_iter() .map(|(p, d)| (p.into(), d.into_data())) .collect(); Self { files } } } impl From<()> for InMemoryDir { fn from(_files: ()) -> Self { let files = Vec::new(); Self { files } } } /// Create an `impl _ for InMemoryDir` for a generic tuple /// /// Must pass in names for each tuple parameter for /// - internal variable name /// - `Path` type /// - `Data` type macro_rules! impl_from_tuple_for_inmemorydir { ($($var:ident $path:ident $data:ident),+) => { impl<$($path: Into, $data: IntoData),+> From<($(($path, $data)),+ ,)> for InMemoryDir { fn from(files: ($(($path, $data)),+,)) -> Self { let ($($var),+ ,) = files; let files = [$(($var.0.into(), $var.1.into_data())),+]; files.into() } } }; } /// Extend `impl_from_tuple_for_inmemorydir` to generate for the specified tuple and all smaller /// tuples macro_rules! impl_from_tuples_for_inmemorydir { ($var1:ident $path1:ident $data1:ident, $($var:ident $path:ident $data:ident),+) => { impl_from_tuples_for_inmemorydir!(__impl $var1 $path1 $data1; $($var $path $data),+); }; (__impl $($var:ident $path:ident $data:ident),+; $var1:ident $path1:ident $data1:ident $(,$var2:ident $path2:ident $data2:ident)*) => { impl_from_tuple_for_inmemorydir!($($var $path $data),+); impl_from_tuples_for_inmemorydir!(__impl $($var $path $data),+, $var1 $path1 $data1; $($var2 $path2 $data2),*); }; (__impl $($var:ident $path:ident $data:ident),+;) => { impl_from_tuple_for_inmemorydir!($($var $path $data),+); } } // Generate for tuples of size `1..=7` impl_from_tuples_for_inmemorydir!( s1 P1 D1, s2 P2 D2, s3 P3 D3, s4 P4 D4, s5 P5 D5, s6 P6 D6, s7 P7 D7 ); #[cfg(test)] mod test { use snapbox::assert_data_eq; use snapbox::prelude::*; use snapbox::str; use super::*; #[test] fn wild_str_cmp() { for (a, b) in &[ ("a b", "a b"), ("a[..]b", "a b"), ("a[..]", "a b"), ("[..]", "a b"), ("[..]b", "a b"), ] { assert_eq!(WildStr::new(a), b); } for (a, b) in &[("[..]b", "c"), ("b", "c"), ("b", "cb")] { assert_ne!(WildStr::new(a), b); } } #[test] fn redact_elapsed_time() { let mut subs = snapbox::Redactions::new(); add_regex_redactions(&mut subs); assert_data_eq!( subs.redact("[FINISHED] `release` profile [optimized] target(s) in 5.5s"), str!["[FINISHED] `release` profile [optimized] target(s) in [ELAPSED]s"].raw() ); assert_data_eq!( subs.redact("[FINISHED] `release` profile [optimized] target(s) in 1m 05s"), str!["[FINISHED] `release` profile [optimized] target(s) in [ELAPSED]s"].raw() ); } } cargo-test-support-0.7.3/src/containers.rs000064400000000000000000000217611046102023000167260ustar 00000000000000//! Support for testing using Docker containers. //! //! The [`Container`] type is a builder for configuring a container to run. //! After you call `launch`, you can use the [`ContainerHandle`] to interact //! with the running container. //! //! Tests using containers must use `#[cargo_test(container_test)]` to disable //! them unless the `CARGO_CONTAINER_TESTS` environment variable is set. use cargo_util::ProcessBuilder; use std::collections::HashMap; use std::io::Read; use std::path::PathBuf; use std::process::Command; use std::sync::atomic::{AtomicUsize, Ordering}; use std::sync::Mutex; use tar::Header; /// A builder for configuring a container to run. pub struct Container { /// The host directory that forms the basis of the Docker image. build_context: PathBuf, /// Files to copy over to the image. files: Vec, } /// A handle to a running container. /// /// You can use this to interact with the container. pub struct ContainerHandle { /// The name of the container. name: String, /// The IP address of the container. /// /// NOTE: This is currently unused, but may be useful so I left it in. /// This can only be used on Linux. macOS and Windows docker doesn't allow /// direct connection to the container. pub ip_address: String, /// Port mappings of `container_port` to `host_port` for ports exposed via EXPOSE. pub port_mappings: HashMap, } impl Container { pub fn new(context_dir: &str) -> Container { assert!(std::env::var_os("CARGO_CONTAINER_TESTS").is_some()); let mut build_context = PathBuf::from(env!("CARGO_MANIFEST_DIR")); build_context.push("containers"); build_context.push(context_dir); Container { build_context, files: Vec::new(), } } /// Adds a file to be copied into the container. pub fn file(mut self, file: MkFile) -> Self { self.files.push(file); self } /// Starts the container. pub fn launch(mut self) -> ContainerHandle { static NEXT_ID: AtomicUsize = AtomicUsize::new(0); let id = NEXT_ID.fetch_add(1, Ordering::SeqCst); let name = format!("cargo_test_{id}"); remove_if_exists(&name); self.create_container(&name); self.copy_files(&name); self.start_container(&name); let info = self.container_inspect(&name); let ip_address = if cfg!(target_os = "linux") { info[0]["NetworkSettings"]["IPAddress"] .as_str() .unwrap() .to_string() } else { // macOS and Windows can't make direct connections to the // container. It only works through exposed ports or mapped ports. "127.0.0.1".to_string() }; let port_mappings = self.port_mappings(&info); self.wait_till_ready(&port_mappings); ContainerHandle { name, ip_address, port_mappings, } } fn create_container(&self, name: &str) { static BUILD_LOCK: Mutex<()> = Mutex::new(()); let image_base = self.build_context.file_name().unwrap(); let image_name = format!("cargo-test-{}", image_base.to_str().unwrap()); let _lock = BUILD_LOCK .lock() .map_err(|_| panic!("previous docker build failed, unable to run test")); ProcessBuilder::new("docker") .args(&["build", "--tag", image_name.as_str()]) .arg(&self.build_context) .exec_with_output() .unwrap(); ProcessBuilder::new("docker") .args(&[ "container", "create", "--publish-all", "--rm", "--name", name, ]) .arg(image_name) .exec_with_output() .unwrap(); } fn copy_files(&mut self, name: &str) { if self.files.is_empty() { return; } let mut ar = tar::Builder::new(Vec::new()); ar.sparse(false); let files = std::mem::replace(&mut self.files, Vec::new()); for mut file in files { ar.append_data(&mut file.header, &file.path, file.contents.as_slice()) .unwrap(); } let ar = ar.into_inner().unwrap(); ProcessBuilder::new("docker") .args(&["cp", "-"]) .arg(format!("{name}:/")) .stdin(ar) .exec_with_output() .unwrap(); } fn start_container(&self, name: &str) { ProcessBuilder::new("docker") .args(&["container", "start"]) .arg(name) .exec_with_output() .unwrap(); } fn container_inspect(&self, name: &str) -> serde_json::Value { let output = ProcessBuilder::new("docker") .args(&["inspect", name]) .exec_with_output() .unwrap(); serde_json::from_slice(&output.stdout).unwrap() } /// Returns the mapping of container_port->host_port for ports that were /// exposed with EXPOSE. fn port_mappings(&self, info: &serde_json::Value) -> HashMap { info[0]["NetworkSettings"]["Ports"] .as_object() .unwrap() .iter() .map(|(key, value)| { let key = key .strip_suffix("/tcp") .expect("expected TCP only ports") .parse() .unwrap(); let values = value.as_array().unwrap(); let value = values .iter() .find(|value| value["HostIp"].as_str().unwrap() == "0.0.0.0") .expect("expected localhost IP"); let host_port = value["HostPort"].as_str().unwrap().parse().unwrap(); (key, host_port) }) .collect() } fn wait_till_ready(&self, port_mappings: &HashMap) { for port in port_mappings.values() { let mut ok = false; for _ in 0..30 { match std::net::TcpStream::connect(format!("127.0.0.1:{port}")) { Ok(_) => { ok = true; break; } Err(e) => { if e.kind() != std::io::ErrorKind::ConnectionRefused { panic!("unexpected localhost connection error: {e:?}"); } std::thread::sleep(std::time::Duration::new(1, 0)); } } } if !ok { panic!("no listener on localhost port {port}"); } } } } impl ContainerHandle { /// Executes a program inside a running container. pub fn exec(&self, args: &[&str]) -> std::process::Output { ProcessBuilder::new("docker") .args(&["container", "exec", &self.name]) .args(args) .exec_with_output() .unwrap() } /// Returns the contents of a file inside the container. pub fn read_file(&self, path: &str) -> String { let output = ProcessBuilder::new("docker") .args(&["cp", &format!("{}:{}", self.name, path), "-"]) .exec_with_output() .unwrap(); let mut ar = tar::Archive::new(output.stdout.as_slice()); let mut entry = ar.entries().unwrap().next().unwrap().unwrap(); let mut contents = String::new(); entry.read_to_string(&mut contents).unwrap(); contents } } impl Drop for ContainerHandle { fn drop(&mut self) { // To help with debugging, this will keep the container alive. if std::env::var_os("CARGO_CONTAINER_TEST_KEEP").is_some() { return; } remove_if_exists(&self.name); } } fn remove_if_exists(name: &str) { if let Err(e) = Command::new("docker") .args(&["container", "rm", "--force", name]) .output() { panic!("failed to run docker: {e}"); } } /// Builder for configuring a file to copy into a container. pub struct MkFile { path: String, contents: Vec, header: Header, } impl MkFile { /// Defines a file to add to the container. /// /// This should be passed to `Container::file`. /// /// The path is the path inside the container to create the file. pub fn path(path: &str) -> MkFile { MkFile { path: path.to_string(), contents: Vec::new(), header: Header::new_gnu(), } } pub fn contents(mut self, contents: impl Into>) -> Self { self.contents = contents.into(); self.header.set_size(self.contents.len() as u64); self } pub fn mode(mut self, mode: u32) -> Self { self.header.set_mode(mode); self } pub fn uid(mut self, uid: u64) -> Self { self.header.set_uid(uid); self } pub fn gid(mut self, gid: u64) -> Self { self.header.set_gid(gid); self } } cargo-test-support-0.7.3/src/cross_compile.rs000064400000000000000000000233621046102023000174210ustar 00000000000000//! Support for cross-compile tests with the `--target` flag. //! //! Note that cross-testing is very limited. You need to install the //! "alternate" target to the host (32-bit for 64-bit hosts or vice-versa). //! //! Set `CFG_DISABLE_CROSS_TESTS=1` environment variable to disable these tests //! if you are unable to use the alternate target. Unfortunately 32-bit //! support on macOS is going away, so macOS users are out of luck. //! //! These tests are all disabled on rust-lang/rust's CI, but run in Cargo's CI. use crate::{basic_manifest, main_file, project}; use cargo_util::ProcessError; use std::env; use std::fmt::Write; use std::process::{Command, Output}; use std::sync::atomic::{AtomicBool, Ordering}; use std::sync::Once; /// Whether or not the resulting cross binaries can run on the host. static CAN_RUN_ON_HOST: AtomicBool = AtomicBool::new(false); pub fn disabled() -> bool { // First, disable if requested. match env::var("CFG_DISABLE_CROSS_TESTS") { Ok(ref s) if *s == "1" => return true, _ => {} } // Cross tests are only tested to work on macos, linux, and MSVC windows. if !(cfg!(target_os = "macos") || cfg!(target_os = "linux") || cfg!(target_env = "msvc")) { return true; } // It's not particularly common to have a cross-compilation setup, so // try to detect that before we fail a bunch of tests through no fault // of the user. static CAN_BUILD_CROSS_TESTS: AtomicBool = AtomicBool::new(false); static CHECK: Once = Once::new(); let cross_target = alternate(); let run_cross_test = || -> anyhow::Result { let p = project() .at("cross_test") .file("Cargo.toml", &basic_manifest("cross_test", "1.0.0")) .file("src/main.rs", &main_file(r#""testing!""#, &[])) .build(); let build_result = p .cargo("build --target") .arg(&cross_target) .exec_with_output(); if build_result.is_ok() { CAN_BUILD_CROSS_TESTS.store(true, Ordering::SeqCst); } let result = p .cargo("run --target") .arg(&cross_target) .exec_with_output(); if result.is_ok() { CAN_RUN_ON_HOST.store(true, Ordering::SeqCst); } build_result }; CHECK.call_once(|| { drop(run_cross_test()); }); if CAN_BUILD_CROSS_TESTS.load(Ordering::SeqCst) { // We were able to compile a simple project, so the user has the // necessary `std::` bits installed. Therefore, tests should not // be disabled. return false; } // We can't compile a simple cross project. We want to warn the user // by failing a single test and having the remainder of the cross tests // pass. We don't use `std::sync::Once` here because panicking inside its // `call_once` method would poison the `Once` instance, which is not what // we want. static HAVE_WARNED: AtomicBool = AtomicBool::new(false); if HAVE_WARNED.swap(true, Ordering::SeqCst) { // We are some other test and somebody else is handling the warning. // Just disable the current test. return true; } // We are responsible for warning the user, which we do by panicking. let mut message = format!( " Cannot cross compile to {}. This failure can be safely ignored. If you would prefer to not see this failure, you can set the environment variable CFG_DISABLE_CROSS_TESTS to \"1\". Alternatively, you can install the necessary libraries to enable cross compilation tests. Cross compilation tests depend on your host platform. ", cross_target ); if cfg!(target_os = "linux") { message.push_str( " Linux cross tests target i686-unknown-linux-gnu, which requires the ability to build and run 32-bit targets. This requires the 32-bit libraries to be installed. For example, on Ubuntu, run `sudo apt install gcc-multilib` to install the necessary libraries. ", ); } else if cfg!(all(target_os = "macos", target_arch = "aarch64")) { message.push_str( " macOS on aarch64 cross tests to target x86_64-apple-darwin. This should be natively supported via Xcode, nothing additional besides the rustup target should be needed. ", ); } else if cfg!(target_os = "macos") { message.push_str( " macOS on x86_64 cross tests to target x86_64-apple-ios, which requires the iOS SDK to be installed. This should be included with Xcode automatically. If you are using the Xcode command line tools, you'll need to install the full Xcode app (from the Apple App Store), and switch to it with this command: sudo xcode-select --switch /Applications/Xcode.app/Contents/Developer Some cross-tests want to *run* the executables on the host. These tests will be ignored if this is not possible. On macOS, this means you need an iOS simulator installed to run these tests. To install a simulator, open Xcode, go to preferences > Components, and download the latest iOS simulator. ", ); } else if cfg!(target_os = "windows") { message.push_str( " Windows cross tests target i686-pc-windows-msvc, which requires the ability to build and run 32-bit targets. This should work automatically if you have properly installed Visual Studio build tools. ", ); } else { // The check at the top should prevent this. panic!("platform should have been skipped"); } let rustup_available = Command::new("rustup").output().is_ok(); if rustup_available { write!( message, " Make sure that the appropriate `rustc` target is installed with rustup: rustup target add {} ", cross_target ) .unwrap(); } else { write!( message, " rustup does not appear to be installed. Make sure that the appropriate `rustc` target is installed for the target `{}`. ", cross_target ) .unwrap(); } // Show the actual error message. match run_cross_test() { Ok(_) => message.push_str("\nUh oh, second run succeeded?\n"), Err(err) => match err.downcast_ref::() { Some(proc_err) => write!(message, "\nTest error: {}\n", proc_err).unwrap(), None => write!(message, "\nUnexpected non-process error: {}\n", err).unwrap(), }, } panic!("{}", message); } /// The arch triple of the test-running host. pub fn native() -> &'static str { env!("NATIVE_ARCH") } pub fn native_arch() -> &'static str { match native() .split("-") .next() .expect("Target triple has unexpected format") { "x86_64" => "x86_64", "aarch64" => "aarch64", "i686" => "x86", _ => panic!("This test should be gated on cross_compile::disabled."), } } /// The alternate target-triple to build with. /// /// Only use this function on tests that check `cross_compile::disabled`. pub fn alternate() -> &'static str { try_alternate().expect("This test should be gated on cross_compile::disabled.") } /// A possible alternate target-triple to build with. pub(crate) fn try_alternate() -> Option<&'static str> { if cfg!(all(target_os = "macos", target_arch = "aarch64")) { Some("x86_64-apple-darwin") } else if cfg!(target_os = "macos") { Some("x86_64-apple-ios") } else if cfg!(target_os = "linux") { Some("i686-unknown-linux-gnu") } else if cfg!(all(target_os = "windows", target_env = "msvc")) { Some("i686-pc-windows-msvc") } else if cfg!(all(target_os = "windows", target_env = "gnu")) { Some("i686-pc-windows-gnu") } else { None } } pub fn alternate_arch() -> &'static str { if cfg!(target_os = "macos") { "x86_64" } else { "x86" } } /// A target-triple that is neither the host nor the target. /// /// Rustc may not work with it and it's alright, apart from being a /// valid target triple it is supposed to be used only as a /// placeholder for targets that should not be considered. pub fn unused() -> &'static str { "wasm32-unknown-unknown" } /// Whether or not the host can run cross-compiled executables. pub fn can_run_on_host() -> bool { if disabled() { return false; } // macos is currently configured to cross compile to x86_64-apple-ios // which requires a simulator to run. Azure's CI image appears to have the // SDK installed, but are not configured to launch iOS images with a // simulator. if cfg!(target_os = "macos") { if CAN_RUN_ON_HOST.load(Ordering::SeqCst) { return true; } else { println!("Note: Cannot run on host, skipping."); return false; } } else { assert!(CAN_RUN_ON_HOST.load(Ordering::SeqCst)); return true; } } /// Check if the given target has been installed. /// /// Generally [`disabled`] should be used to check if cross-compilation is allowed. /// And [`alternate`] to get the cross target. /// /// You should only use this as a last resort to skip tests, /// because it doesn't report skipped tests as ignored. pub fn requires_target_installed(target: &str) -> bool { let has_target = std::process::Command::new("rustup") .args(["target", "list", "--installed"]) .output() .ok() .map(|output| { String::from_utf8(output.stdout) .map(|stdout| stdout.contains(target)) .unwrap_or_default() }) .unwrap_or_default(); if !has_target { let msg = format!("to run this test, run `rustup target add {target} --toolchain `",); if cargo_util::is_ci() { panic!("{msg}"); } else { eprintln!("{msg}"); } } has_target } cargo-test-support-0.7.3/src/git.rs000064400000000000000000000170231046102023000153400ustar 00000000000000//! # Git Testing Support //! //! ## Creating a git dependency //! [`new()`] is an easy way to create a new git repository containing a //! project that you can then use as a dependency. It will automatically add all //! the files you specify in the project and commit them to the repository. //! //! ### Example: //! //! ```no_run //! # use cargo_test_support::project; //! # use cargo_test_support::basic_manifest; //! # use cargo_test_support::git; //! let git_project = git::new("dep1", |project| { //! project //! .file("Cargo.toml", &basic_manifest("dep1", "1.0.0")) //! .file("src/lib.rs", r#"pub fn f() { println!("hi!"); } "#) //! }); //! //! // Use the `url()` method to get the file url to the new repository. //! let p = project() //! .file("Cargo.toml", &format!(r#" //! [package] //! name = "a" //! version = "1.0.0" //! //! [dependencies] //! dep1 = {{ git = '{}' }} //! "#, git_project.url())) //! .file("src/lib.rs", "extern crate dep1;") //! .build(); //! ``` //! //! ## Manually creating repositories //! //! [`repo()`] can be used to create a [`RepoBuilder`] which provides a way of //! adding files to a blank repository and committing them. //! //! If you want to then manipulate the repository (such as adding new files or //! tags), you can use `git2::Repository::open()` to open the repository and then //! use some of the helper functions in this file to interact with the repository. use crate::{paths::CargoPathExt, project, Project, ProjectBuilder, SymlinkBuilder}; use std::fs; use std::path::{Path, PathBuf}; use std::sync::Once; use url::Url; /// Manually construct a [`Repository`] /// /// See also [`new`], [`repo`] #[must_use] pub struct RepoBuilder { repo: git2::Repository, files: Vec, } /// See [`new`] pub struct Repository(git2::Repository); /// Create a [`RepoBuilder`] to build a new git repository. /// /// Call [`RepoBuilder::build()`] to finalize and create the repository. pub fn repo(p: &Path) -> RepoBuilder { RepoBuilder::init(p) } impl RepoBuilder { pub fn init(p: &Path) -> RepoBuilder { t!(fs::create_dir_all(p.parent().unwrap())); let repo = init(p); RepoBuilder { repo, files: Vec::new(), } } /// Add a file to the repository. pub fn file(self, path: &str, contents: &str) -> RepoBuilder { let mut me = self.nocommit_file(path, contents); me.files.push(PathBuf::from(path)); me } /// Create a symlink to a directory pub fn nocommit_symlink_dir>(self, dst: T, src: T) -> Self { let workdir = self.repo.workdir().unwrap(); SymlinkBuilder::new_dir(workdir.join(dst), workdir.join(src)).mk(); self } /// Add a file that will be left in the working directory, but not added /// to the repository. pub fn nocommit_file(self, path: &str, contents: &str) -> RepoBuilder { let dst = self.repo.workdir().unwrap().join(path); t!(fs::create_dir_all(dst.parent().unwrap())); t!(fs::write(&dst, contents)); self } /// Create the repository and commit the new files. pub fn build(self) -> Repository { { let mut index = t!(self.repo.index()); for file in self.files.iter() { t!(index.add_path(file)); } t!(index.write()); let id = t!(index.write_tree()); let tree = t!(self.repo.find_tree(id)); let sig = t!(self.repo.signature()); t!(self .repo .commit(Some("HEAD"), &sig, &sig, "Initial commit", &tree, &[])); } let RepoBuilder { repo, .. } = self; Repository(repo) } } impl Repository { pub fn root(&self) -> &Path { self.0.workdir().unwrap() } pub fn url(&self) -> Url { self.0.workdir().unwrap().to_url() } pub fn revparse_head(&self) -> String { self.0 .revparse_single("HEAD") .expect("revparse HEAD") .id() .to_string() } } /// *(`git2`)* Initialize a new repository at the given path. pub fn init(path: &Path) -> git2::Repository { default_search_path(); let repo = t!(git2::Repository::init(path)); default_repo_cfg(&repo); repo } fn default_search_path() { use crate::paths::global_root; use git2::{opts::set_search_path, ConfigLevel}; static INIT: Once = Once::new(); INIT.call_once(|| unsafe { let path = global_root().join("blank_git_search_path"); t!(set_search_path(ConfigLevel::System, &path)); t!(set_search_path(ConfigLevel::Global, &path)); t!(set_search_path(ConfigLevel::XDG, &path)); t!(set_search_path(ConfigLevel::ProgramData, &path)); }) } fn default_repo_cfg(repo: &git2::Repository) { let mut cfg = t!(repo.config()); t!(cfg.set_str("user.email", "foo@bar.com")); t!(cfg.set_str("user.name", "Foo Bar")); } /// Create a new [`Project`] in a git [`Repository`] pub fn new(name: &str, callback: F) -> Project where F: FnOnce(ProjectBuilder) -> ProjectBuilder, { new_repo(name, callback).0 } /// Create a new [`Project`] with access to the [`Repository`] pub fn new_repo(name: &str, callback: F) -> (Project, git2::Repository) where F: FnOnce(ProjectBuilder) -> ProjectBuilder, { let mut git_project = project().at(name); git_project = callback(git_project); let git_project = git_project.build(); let repo = init(&git_project.root()); add(&repo); commit(&repo); (git_project, repo) } /// *(`git2`)* Add all files in the working directory to the git index pub fn add(repo: &git2::Repository) { let mut index = t!(repo.index()); t!(index.add_all(["*"].iter(), git2::IndexAddOption::DEFAULT, None)); t!(index.write()); } /// *(`git2`)* Add a git submodule to the repository pub fn add_submodule<'a>( repo: &'a git2::Repository, url: &str, path: &Path, ) -> git2::Submodule<'a> { let path = path.to_str().unwrap().replace(r"\", "/"); let mut s = t!(repo.submodule(url, Path::new(&path), false)); let subrepo = t!(s.open()); default_repo_cfg(&subrepo); t!(subrepo.remote_add_fetch("origin", "refs/heads/*:refs/heads/*")); let mut origin = t!(subrepo.find_remote("origin")); t!(origin.fetch(&Vec::::new(), None, None)); t!(subrepo.checkout_head(None)); t!(s.add_finalize()); s } /// *(`git2`)* Commit changes to the git repository pub fn commit(repo: &git2::Repository) -> git2::Oid { let tree_id = t!(t!(repo.index()).write_tree()); let sig = t!(repo.signature()); let mut parents = Vec::new(); if let Some(parent) = repo.head().ok().map(|h| h.target().unwrap()) { parents.push(t!(repo.find_commit(parent))) } let parents = parents.iter().collect::>(); t!(repo.commit( Some("HEAD"), &sig, &sig, "test", &t!(repo.find_tree(tree_id)), &parents )) } /// *(`git2`)* Create a new tag in the git repository pub fn tag(repo: &git2::Repository, name: &str) { let head = repo.head().unwrap().target().unwrap(); t!(repo.tag( name, &t!(repo.find_object(head, None)), &t!(repo.signature()), "make a new tag", false )); } /// Returns true if gitoxide is globally activated. /// /// That way, tests that normally use `git2` can transparently use `gitoxide`. pub fn cargo_uses_gitoxide() -> bool { std::env::var_os("__CARGO_USE_GITOXIDE_INSTEAD_OF_GIT2").map_or(false, |value| value == "1") } cargo-test-support-0.7.3/src/install.rs000064400000000000000000000016241046102023000162230ustar 00000000000000//! Helpers for testing `cargo install` use std::env::consts::EXE_SUFFIX; use std::path::Path; /// Used by `cargo install` tests to assert an executable binary /// has been installed. Example usage: /// ```no_run /// use cargo_test_support::install::assert_has_installed_exe; /// use cargo_test_support::paths; /// /// assert_has_installed_exe(paths::cargo_home(), "foo"); /// ``` #[track_caller] pub fn assert_has_installed_exe>(path: P, name: &'static str) { assert!(check_has_installed_exe(path, name)); } #[track_caller] pub fn assert_has_not_installed_exe>(path: P, name: &'static str) { assert!(!check_has_installed_exe(path, name)); } fn check_has_installed_exe>(path: P, name: &'static str) -> bool { path.as_ref().join("bin").join(exe(name)).is_file() } /// `$name$EXE` pub fn exe(name: &str) -> String { format!("{}{}", name, EXE_SUFFIX) } cargo-test-support-0.7.3/src/lib.rs000064400000000000000000001517301046102023000153270ustar 00000000000000//! # Cargo test support. //! //! See for a guide on writing tests. //! //! There are two places you can find API documentation //! //! - : //! targeted at external tool developers testing cargo-related code //! - Released with every rustc release //! - : //! targeted at cargo contributors //! - Updated on each update of the `cargo` submodule in `rust-lang/rust` //! //! > This crate is maintained by the Cargo team, primarily for use by Cargo //! > and not intended for external use. This //! > crate may make major changes to its APIs or be deprecated without warning. //! //! # Example //! //! ```rust,no_run //! use cargo_test_support::prelude::*; //! use cargo_test_support::str; //! use cargo_test_support::project; //! //! #[cargo_test] //! fn some_test() { //! let p = project() //! .file("src/main.rs", r#"fn main() { println!("hi!"); }"#) //! .build(); //! //! p.cargo("run --bin foo") //! .with_stderr_data(str![[r#" //! [COMPILING] foo [..] //! [FINISHED] [..] //! [RUNNING] `target/debug/foo` //! "#]]) //! .with_stdout_data(str![["hi!"]]) //! .run(); //! } //! ``` #![allow(clippy::disallowed_methods)] #![allow(clippy::print_stderr)] #![allow(clippy::print_stdout)] use std::env; use std::ffi::OsStr; use std::fmt::Write; use std::fs; use std::os; use std::path::{Path, PathBuf}; use std::process::{Command, Output}; use std::sync::OnceLock; use std::thread::JoinHandle; use std::time::{self, Duration}; use anyhow::{bail, Result}; use cargo_util::{is_ci, ProcessError}; use snapbox::IntoData as _; use url::Url; use self::paths::CargoPathExt; /// Unwrap a `Result` with a useful panic message /// /// # Example /// /// ```rust /// use cargo_test_support::t; /// t!(std::fs::read_to_string("Cargo.toml")); /// ``` #[macro_export] macro_rules! t { ($e:expr) => { match $e { Ok(e) => e, Err(e) => $crate::panic_error(&format!("failed running {}", stringify!($e)), e), } }; } pub use cargo_util::ProcessBuilder; pub use snapbox::file; pub use snapbox::str; pub use snapbox::utils::current_dir; /// `panic!`, reporting the specified error , see also [`t!`] #[track_caller] pub fn panic_error(what: &str, err: impl Into) -> ! { let err = err.into(); pe(what, err); #[track_caller] fn pe(what: &str, err: anyhow::Error) -> ! { let mut result = format!("{}\nerror: {}", what, err); for cause in err.chain().skip(1) { let _ = writeln!(result, "\nCaused by:"); let _ = write!(result, "{}", cause); } panic!("\n{}", result); } } pub use cargo_test_macro::cargo_test; pub mod compare; pub mod containers; pub mod cross_compile; pub mod git; pub mod install; pub mod paths; pub mod publish; pub mod registry; pub mod tools; pub mod prelude { pub use crate::cargo_test; pub use crate::paths::CargoPathExt; pub use crate::ArgLineCommandExt; pub use crate::CargoCommandExt; pub use crate::ChannelChangerCommandExt; pub use crate::TestEnvCommandExt; pub use snapbox::IntoData; } /* * * ===== Builders ===== * */ #[derive(PartialEq, Clone)] struct FileBuilder { path: PathBuf, body: String, executable: bool, } impl FileBuilder { pub fn new(path: PathBuf, body: &str, executable: bool) -> FileBuilder { FileBuilder { path, body: body.to_string(), executable: executable, } } fn mk(&mut self) { if self.executable { let mut path = self.path.clone().into_os_string(); write!(path, "{}", env::consts::EXE_SUFFIX).unwrap(); self.path = path.into(); } self.dirname().mkdir_p(); fs::write(&self.path, &self.body) .unwrap_or_else(|e| panic!("could not create file {}: {}", self.path.display(), e)); #[cfg(unix)] if self.executable { use std::os::unix::fs::PermissionsExt; let mut perms = fs::metadata(&self.path).unwrap().permissions(); let mode = perms.mode(); perms.set_mode(mode | 0o111); fs::set_permissions(&self.path, perms).unwrap(); } } fn dirname(&self) -> &Path { self.path.parent().unwrap() } } #[derive(PartialEq, Clone)] struct SymlinkBuilder { dst: PathBuf, src: PathBuf, src_is_dir: bool, } impl SymlinkBuilder { pub fn new(dst: PathBuf, src: PathBuf) -> SymlinkBuilder { SymlinkBuilder { dst, src, src_is_dir: false, } } pub fn new_dir(dst: PathBuf, src: PathBuf) -> SymlinkBuilder { SymlinkBuilder { dst, src, src_is_dir: true, } } #[cfg(unix)] fn mk(&self) { self.dirname().mkdir_p(); t!(os::unix::fs::symlink(&self.dst, &self.src)); } #[cfg(windows)] fn mk(&mut self) { self.dirname().mkdir_p(); if self.src_is_dir { t!(os::windows::fs::symlink_dir(&self.dst, &self.src)); } else { if let Some(ext) = self.dst.extension() { if ext == env::consts::EXE_EXTENSION { self.src.set_extension(ext); } } t!(os::windows::fs::symlink_file(&self.dst, &self.src)); } } fn dirname(&self) -> &Path { self.src.parent().unwrap() } } /// A cargo project to run tests against. /// /// See [`ProjectBuilder`] or [`Project::from_template`] to get started. pub struct Project { root: PathBuf, } /// Create a project to run tests against /// /// - Creates a [`basic_manifest`] if one isn't supplied /// /// To get started, see: /// - [`project`] /// - [`project_in`] /// - [`project_in_home`] /// - [`Project::from_template`] #[must_use] pub struct ProjectBuilder { root: Project, files: Vec, symlinks: Vec, no_manifest: bool, } impl ProjectBuilder { /// Root of the project /// /// ex: `$CARGO_TARGET_TMPDIR/cit/t0/foo` pub fn root(&self) -> PathBuf { self.root.root() } /// Project's debug dir /// /// ex: `$CARGO_TARGET_TMPDIR/cit/t0/foo/target/debug` pub fn target_debug_dir(&self) -> PathBuf { self.root.target_debug_dir() } /// Create project in `root` pub fn new(root: PathBuf) -> ProjectBuilder { ProjectBuilder { root: Project { root }, files: vec![], symlinks: vec![], no_manifest: false, } } /// Create project, relative to [`paths::root`] pub fn at>(mut self, path: P) -> Self { self.root = Project { root: paths::root().join(path), }; self } /// Adds a file to the project. pub fn file>(mut self, path: B, body: &str) -> Self { self._file(path.as_ref(), body, false); self } /// Adds an executable file to the project. pub fn executable>(mut self, path: B, body: &str) -> Self { self._file(path.as_ref(), body, true); self } fn _file(&mut self, path: &Path, body: &str, executable: bool) { self.files.push(FileBuilder::new( self.root.root().join(path), body, executable, )); } /// Adds a symlink to a file to the project. pub fn symlink(mut self, dst: impl AsRef, src: impl AsRef) -> Self { self.symlinks.push(SymlinkBuilder::new( self.root.root().join(dst), self.root.root().join(src), )); self } /// Create a symlink to a directory pub fn symlink_dir(mut self, dst: impl AsRef, src: impl AsRef) -> Self { self.symlinks.push(SymlinkBuilder::new_dir( self.root.root().join(dst), self.root.root().join(src), )); self } pub fn no_manifest(mut self) -> Self { self.no_manifest = true; self } /// Creates the project. pub fn build(mut self) -> Project { // First, clean the directory if it already exists self.rm_root(); // Create the empty directory self.root.root().mkdir_p(); let manifest_path = self.root.root().join("Cargo.toml"); if !self.no_manifest && self.files.iter().all(|fb| fb.path != manifest_path) { self._file( Path::new("Cargo.toml"), &basic_manifest("foo", "0.0.1"), false, ) } let past = time::SystemTime::now() - Duration::new(1, 0); let ftime = filetime::FileTime::from_system_time(past); for file in self.files.iter_mut() { file.mk(); if is_coarse_mtime() { // Place the entire project 1 second in the past to ensure // that if cargo is called multiple times, the 2nd call will // see targets as "fresh". Without this, if cargo finishes in // under 1 second, the second call will see the mtime of // source == mtime of output and consider it dirty. filetime::set_file_times(&file.path, ftime, ftime).unwrap(); } } for symlink in self.symlinks.iter_mut() { symlink.mk(); } let ProjectBuilder { root, .. } = self; root } fn rm_root(&self) { self.root.root().rm_rf() } } impl Project { /// Copy the test project from a fixed state pub fn from_template(template_path: impl AsRef) -> Self { let root = paths::root(); let project_root = root.join("case"); snapbox::dir::copy_template(template_path.as_ref(), &project_root).unwrap(); Self { root: project_root } } /// Root of the project /// /// ex: `$CARGO_TARGET_TMPDIR/cit/t0/foo` pub fn root(&self) -> PathBuf { self.root.clone() } /// Project's target dir /// /// ex: `$CARGO_TARGET_TMPDIR/cit/t0/foo/target` pub fn build_dir(&self) -> PathBuf { self.root().join("target") } /// Project's debug dir /// /// ex: `$CARGO_TARGET_TMPDIR/cit/t0/foo/target/debug` pub fn target_debug_dir(&self) -> PathBuf { self.build_dir().join("debug") } /// File url for root /// /// ex: `file://$CARGO_TARGET_TMPDIR/cit/t0/foo` pub fn url(&self) -> Url { use paths::CargoPathExt; self.root().to_url() } /// Path to an example built as a library. /// /// `kind` should be one of: "lib", "rlib", "staticlib", "dylib", "proc-macro" /// /// ex: `$CARGO_TARGET_TMPDIR/cit/t0/foo/target/debug/examples/libex.rlib` pub fn example_lib(&self, name: &str, kind: &str) -> PathBuf { self.target_debug_dir() .join("examples") .join(paths::get_lib_filename(name, kind)) } /// Path to a dynamic library. /// ex: `/path/to/cargo/target/cit/t0/foo/target/debug/examples/libex.dylib` pub fn dylib(&self, name: &str) -> PathBuf { self.target_debug_dir().join(format!( "{}{name}{}", env::consts::DLL_PREFIX, env::consts::DLL_SUFFIX )) } /// Path to a debug binary. /// /// ex: `$CARGO_TARGET_TMPDIR/cit/t0/foo/target/debug/foo` pub fn bin(&self, b: &str) -> PathBuf { self.build_dir() .join("debug") .join(&format!("{}{}", b, env::consts::EXE_SUFFIX)) } /// Path to a release binary. /// /// ex: `$CARGO_TARGET_TMPDIR/cit/t0/foo/target/release/foo` pub fn release_bin(&self, b: &str) -> PathBuf { self.build_dir() .join("release") .join(&format!("{}{}", b, env::consts::EXE_SUFFIX)) } /// Path to a debug binary for a specific target triple. /// /// ex: `$CARGO_TARGET_TMPDIR/cit/t0/foo/target/i686-apple-darwin/debug/foo` pub fn target_bin(&self, target: &str, b: &str) -> PathBuf { self.build_dir().join(target).join("debug").join(&format!( "{}{}", b, env::consts::EXE_SUFFIX )) } /// Returns an iterator of paths within [`Project::root`] matching the glob pattern pub fn glob>(&self, pattern: P) -> glob::Paths { let pattern = self.root().join(pattern); glob::glob(pattern.to_str().expect("failed to convert pattern to str")) .expect("failed to glob") } /// Overwrite a file with new content /// // # Example: /// /// ```no_run /// # let p = cargo_test_support::project().build(); /// p.change_file("src/lib.rs", "fn new_fn() {}"); /// ``` pub fn change_file(&self, path: impl AsRef, body: &str) { FileBuilder::new(self.root().join(path), body, false).mk() } /// Creates a `ProcessBuilder` to run a program in the project /// and wrap it in an Execs to assert on the execution. /// /// # Example: /// /// ```no_run /// # use cargo_test_support::str; /// # let p = cargo_test_support::project().build(); /// p.process(&p.bin("foo")) /// .with_stdout_data(str!["bar\n"]) /// .run(); /// ``` pub fn process>(&self, program: T) -> Execs { let mut p = process(program); p.cwd(self.root()); execs().with_process_builder(p) } /// Creates a `ProcessBuilder` to run cargo. /// /// Arguments can be separated by spaces. /// /// For `cargo run`, see [`Project::rename_run`]. /// /// # Example: /// /// ```no_run /// # let p = cargo_test_support::project().build(); /// p.cargo("build --bin foo").run(); /// ``` pub fn cargo(&self, cmd: &str) -> Execs { let cargo = cargo_exe(); let mut execs = self.process(&cargo); if let Some(ref mut p) = execs.process_builder { p.env("CARGO", cargo); p.arg_line(cmd); } execs } /// Safely run a process after `cargo build`. /// /// Windows has a problem where a process cannot be reliably /// be replaced, removed, or renamed immediately after executing it. /// The action may fail (with errors like Access is denied), or /// it may succeed, but future attempts to use the same filename /// will fail with "Already Exists". /// /// If you have a test that needs to do `cargo run` multiple /// times, you should instead use `cargo build` and use this /// method to run the executable. Each time you call this, /// use a new name for `dst`. /// See rust-lang/cargo#5481. pub fn rename_run(&self, src: &str, dst: &str) -> Execs { let src = self.bin(src); let dst = self.bin(dst); fs::rename(&src, &dst) .unwrap_or_else(|e| panic!("Failed to rename `{:?}` to `{:?}`: {}", src, dst, e)); self.process(dst) } /// Returns the contents of `Cargo.lock`. pub fn read_lockfile(&self) -> String { self.read_file("Cargo.lock") } /// Returns the contents of a path in the project root pub fn read_file(&self, path: impl AsRef) -> String { let full = self.root().join(path); fs::read_to_string(&full) .unwrap_or_else(|e| panic!("could not read file {}: {}", full.display(), e)) } /// Modifies `Cargo.toml` to remove all commented lines. pub fn uncomment_root_manifest(&self) { let contents = self.read_file("Cargo.toml").replace("#", ""); fs::write(self.root().join("Cargo.toml"), contents).unwrap(); } pub fn symlink(&self, src: impl AsRef, dst: impl AsRef) { let src = self.root().join(src.as_ref()); let dst = self.root().join(dst.as_ref()); #[cfg(unix)] { if let Err(e) = os::unix::fs::symlink(&src, &dst) { panic!("failed to symlink {:?} to {:?}: {:?}", src, dst, e); } } #[cfg(windows)] { if src.is_dir() { if let Err(e) = os::windows::fs::symlink_dir(&src, &dst) { panic!("failed to symlink {:?} to {:?}: {:?}", src, dst, e); } } else { if let Err(e) = os::windows::fs::symlink_file(&src, &dst) { panic!("failed to symlink {:?} to {:?}: {:?}", src, dst, e); } } } } } /// Generates a project layout, see [`ProjectBuilder`] pub fn project() -> ProjectBuilder { ProjectBuilder::new(paths::root().join("foo")) } /// Generates a project layout in given directory, see [`ProjectBuilder`] pub fn project_in(dir: impl AsRef) -> ProjectBuilder { ProjectBuilder::new(paths::root().join(dir).join("foo")) } /// Generates a project layout inside our fake home dir, see [`ProjectBuilder`] pub fn project_in_home(name: impl AsRef) -> ProjectBuilder { ProjectBuilder::new(paths::home().join(name)) } // === Helpers === /// Generate a `main.rs` printing the specified text /// /// ```rust /// # use cargo_test_support::main_file; /// # mod dep { /// # fn bar() -> &'static str { /// # "world" /// # } /// # } /// main_file( /// r#""hello {}", dep::bar()"#, /// &[] /// ); /// ``` pub fn main_file(println: &str, externed_deps: &[&str]) -> String { let mut buf = String::new(); for dep in externed_deps.iter() { buf.push_str(&format!("extern crate {};\n", dep)); } buf.push_str("fn main() { println!("); buf.push_str(println); buf.push_str("); }\n"); buf } /// Path to the cargo binary pub fn cargo_exe() -> PathBuf { snapbox::cmd::cargo_bin("cargo") } /// This is the raw output from the process. /// /// This is similar to `std::process::Output`, however the `status` is /// translated to the raw `code`. This is necessary because `ProcessError` /// does not have access to the raw `ExitStatus` because `ProcessError` needs /// to be serializable (for the Rustc cache), and `ExitStatus` does not /// provide a constructor. pub struct RawOutput { pub code: Option, pub stdout: Vec, pub stderr: Vec, } /// Run and verify a [`ProcessBuilder`] /// /// Construct with /// - [`execs`] /// - [`cargo_process`] /// - [`Project`] methods #[must_use] #[derive(Clone)] pub struct Execs { ran: bool, process_builder: Option, expect_stdin: Option, expect_exit_code: Option, expect_stdout_data: Option, expect_stderr_data: Option, expect_stdout_contains: Vec, expect_stderr_contains: Vec, expect_stdout_not_contains: Vec, expect_stderr_not_contains: Vec, expect_stderr_with_without: Vec<(Vec, Vec)>, stream_output: bool, assert: snapbox::Assert, } impl Execs { pub fn with_process_builder(mut self, p: ProcessBuilder) -> Execs { self.process_builder = Some(p); self } } /// # Configure assertions impl Execs { /// Verifies that stdout is equal to the given lines. /// /// See [`compare::assert_e2e`] for assertion details. /// ///
/// /// Prefer passing in [`str!`] for `expected` to get snapshot updating. /// /// If `format!` is needed for content that changes from run to run that you don't care about, /// consider whether you could have [`compare::assert_e2e`] redact the content. /// If nothing else, a wildcard (`[..]`, `...`) may be useful. /// /// However, `""` may be preferred for intentionally empty output so people don't accidentally /// bless a change. /// ///
/// /// # Examples /// /// ```no_run /// use cargo_test_support::prelude::*; /// use cargo_test_support::str; /// use cargo_test_support::execs; /// /// execs().with_stdout_data(str![r#" /// Hello world! /// "#]); /// ``` /// /// Non-deterministic compiler output /// ```no_run /// use cargo_test_support::prelude::*; /// use cargo_test_support::str; /// use cargo_test_support::execs; /// /// execs().with_stdout_data(str![r#" /// [COMPILING] foo /// [COMPILING] bar /// "#].unordered()); /// ``` /// /// jsonlines /// ```no_run /// use cargo_test_support::prelude::*; /// use cargo_test_support::str; /// use cargo_test_support::execs; /// /// execs().with_stdout_data(str![r#" /// [ /// {}, /// {} /// ] /// "#].is_json().against_jsonlines()); /// ``` pub fn with_stdout_data(&mut self, expected: impl snapbox::IntoData) -> &mut Self { self.expect_stdout_data = Some(expected.into_data()); self } /// Verifies that stderr is equal to the given lines. /// /// See [`compare::assert_e2e`] for assertion details. /// ///
/// /// Prefer passing in [`str!`] for `expected` to get snapshot updating. /// /// If `format!` is needed for content that changes from run to run that you don't care about, /// consider whether you could have [`compare::assert_e2e`] redact the content. /// If nothing else, a wildcard (`[..]`, `...`) may be useful. /// /// However, `""` may be preferred for intentionally empty output so people don't accidentally /// bless a change. /// ///
/// /// # Examples /// /// ```no_run /// use cargo_test_support::prelude::*; /// use cargo_test_support::str; /// use cargo_test_support::execs; /// /// execs().with_stderr_data(str![r#" /// Hello world! /// "#]); /// ``` /// /// Non-deterministic compiler output /// ```no_run /// use cargo_test_support::prelude::*; /// use cargo_test_support::str; /// use cargo_test_support::execs; /// /// execs().with_stderr_data(str![r#" /// [COMPILING] foo /// [COMPILING] bar /// "#].unordered()); /// ``` /// /// jsonlines /// ```no_run /// use cargo_test_support::prelude::*; /// use cargo_test_support::str; /// use cargo_test_support::execs; /// /// execs().with_stderr_data(str![r#" /// [ /// {}, /// {} /// ] /// "#].is_json().against_jsonlines()); /// ``` pub fn with_stderr_data(&mut self, expected: impl snapbox::IntoData) -> &mut Self { self.expect_stderr_data = Some(expected.into_data()); self } /// Writes the given lines to stdin. pub fn with_stdin(&mut self, expected: S) -> &mut Self { self.expect_stdin = Some(expected.to_string()); self } /// Verifies the exit code from the process. /// /// This is not necessary if the expected exit code is `0`. pub fn with_status(&mut self, expected: i32) -> &mut Self { self.expect_exit_code = Some(expected); self } /// Removes exit code check for the process. /// /// By default, the expected exit code is `0`. pub fn without_status(&mut self) -> &mut Self { self.expect_exit_code = None; self } /// Verifies that stdout contains the given contiguous lines somewhere in /// its output. /// /// See [`compare`] for supported patterns. /// ///
/// /// Prefer [`Execs::with_stdout_data`] where possible. /// - `expected` cannot be snapshotted /// - `expected` can end up being ambiguous, causing the assertion to succeed when it should fail /// ///
pub fn with_stdout_contains(&mut self, expected: S) -> &mut Self { self.expect_stdout_contains.push(expected.to_string()); self } /// Verifies that stderr contains the given contiguous lines somewhere in /// its output. /// /// See [`compare`] for supported patterns. /// ///
/// /// Prefer [`Execs::with_stderr_data`] where possible. /// - `expected` cannot be snapshotted /// - `expected` can end up being ambiguous, causing the assertion to succeed when it should fail /// ///
pub fn with_stderr_contains(&mut self, expected: S) -> &mut Self { self.expect_stderr_contains.push(expected.to_string()); self } /// Verifies that stdout does not contain the given contiguous lines. /// /// See [`compare`] for supported patterns. /// /// See note on [`Self::with_stderr_does_not_contain`]. /// ///
/// /// Prefer [`Execs::with_stdout_data`] where possible. /// - `expected` cannot be snapshotted /// - The absence of `expected` can either mean success or that the string being looked for /// changed. /// /// To mitigate this, consider matching this up with /// [`Execs::with_stdout_contains`]. /// ///
pub fn with_stdout_does_not_contain(&mut self, expected: S) -> &mut Self { self.expect_stdout_not_contains.push(expected.to_string()); self } /// Verifies that stderr does not contain the given contiguous lines. /// /// See [`compare`] for supported patterns. /// ///
/// /// Prefer [`Execs::with_stdout_data`] where possible. /// - `expected` cannot be snapshotted /// - The absence of `expected` can either mean success or that the string being looked for /// changed. /// /// To mitigate this, consider either matching this up with /// [`Execs::with_stdout_contains`] or replace it /// with [`Execs::with_stderr_line_without`]. /// ///
pub fn with_stderr_does_not_contain(&mut self, expected: S) -> &mut Self { self.expect_stderr_not_contains.push(expected.to_string()); self } /// Verify that a particular line appears in stderr with and without the /// given substrings. Exactly one line must match. /// /// The substrings are matched as `contains`. /// ///
/// /// Prefer [`Execs::with_stdout_data`] where possible. /// - `with` cannot be snapshotted /// - The absence of `without` can either mean success or that the string being looked for /// changed. /// ///
/// /// # Example /// /// ```no_run /// use cargo_test_support::execs; /// /// execs().with_stderr_line_without( /// &[ /// "[RUNNING] `rustc --crate-name build_script_build", /// "-C opt-level=3", /// ], /// &["-C debuginfo", "-C incremental"], /// ); /// ``` /// /// This will check that a build line includes `-C opt-level=3` but does /// not contain `-C debuginfo` or `-C incremental`. /// pub fn with_stderr_line_without( &mut self, with: &[S], without: &[S], ) -> &mut Self { let with = with.iter().map(|s| s.to_string()).collect(); let without = without.iter().map(|s| s.to_string()).collect(); self.expect_stderr_with_without.push((with, without)); self } } /// # Configure the process impl Execs { /// Forward subordinate process stdout/stderr to the terminal. /// Useful for printf debugging of the tests. /// CAUTION: CI will fail if you leave this in your test! #[allow(unused)] pub fn stream(&mut self) -> &mut Self { self.stream_output = true; self } pub fn arg>(&mut self, arg: T) -> &mut Self { if let Some(ref mut p) = self.process_builder { p.arg(arg); } self } pub fn args>(&mut self, args: &[T]) -> &mut Self { if let Some(ref mut p) = self.process_builder { p.args(args); } self } pub fn cwd>(&mut self, path: T) -> &mut Self { if let Some(ref mut p) = self.process_builder { if let Some(cwd) = p.get_cwd() { let new_path = cwd.join(path.as_ref()); p.cwd(new_path); } else { p.cwd(path); } } self } pub fn env>(&mut self, key: &str, val: T) -> &mut Self { if let Some(ref mut p) = self.process_builder { p.env(key, val); } self } pub fn env_remove(&mut self, key: &str) -> &mut Self { if let Some(ref mut p) = self.process_builder { p.env_remove(key); } self } /// Enables nightly features for testing /// /// The list of reasons should be why nightly cargo is needed. If it is /// because of an unstable feature put the name of the feature as the reason, /// e.g. `&["print-im-a-teapot"]` pub fn masquerade_as_nightly_cargo(&mut self, reasons: &[&str]) -> &mut Self { if let Some(ref mut p) = self.process_builder { p.masquerade_as_nightly_cargo(reasons); } self } /// Overrides the crates.io URL for testing. /// /// Can be used for testing crates-io functionality where alt registries /// cannot be used. pub fn replace_crates_io(&mut self, url: &Url) -> &mut Self { if let Some(ref mut p) = self.process_builder { p.env("__CARGO_TEST_CRATES_IO_URL_DO_NOT_USE_THIS", url.as_str()); } self } pub fn overlay_registry(&mut self, url: &Url, path: &str) -> &mut Self { if let Some(ref mut p) = self.process_builder { let env_value = format!("{}={}", url, path); p.env( "__CARGO_TEST_DEPENDENCY_CONFUSION_VULNERABILITY_DO_NOT_USE_THIS", env_value, ); } self } pub fn enable_split_debuginfo_packed(&mut self) -> &mut Self { self.env("CARGO_PROFILE_DEV_SPLIT_DEBUGINFO", "packed") .env("CARGO_PROFILE_TEST_SPLIT_DEBUGINFO", "packed") .env("CARGO_PROFILE_RELEASE_SPLIT_DEBUGINFO", "packed") .env("CARGO_PROFILE_BENCH_SPLIT_DEBUGINFO", "packed"); self } pub fn enable_mac_dsym(&mut self) -> &mut Self { if cfg!(target_os = "macos") { return self.enable_split_debuginfo_packed(); } self } } /// # Run and verify the process impl Execs { pub fn exec_with_output(&mut self) -> Result { self.ran = true; // TODO avoid unwrap let p = (&self.process_builder).clone().unwrap(); p.exec_with_output() } pub fn build_command(&mut self) -> Command { self.ran = true; // TODO avoid unwrap let p = (&self.process_builder).clone().unwrap(); p.build_command() } #[track_caller] pub fn run(&mut self) -> RawOutput { self.ran = true; let mut p = (&self.process_builder).clone().unwrap(); if let Some(stdin) = self.expect_stdin.take() { p.stdin(stdin); } match self.match_process(&p) { Err(e) => panic_error(&format!("test failed running {}", p), e), Ok(output) => output, } } /// Runs the process, checks the expected output, and returns the first /// JSON object on stdout. #[track_caller] pub fn run_json(&mut self) -> serde_json::Value { let output = self.run(); serde_json::from_slice(&output.stdout).unwrap_or_else(|e| { panic!( "\nfailed to parse JSON: {}\n\ output was:\n{}\n", e, String::from_utf8_lossy(&output.stdout) ); }) } #[track_caller] pub fn run_output(&mut self, output: &Output) { self.ran = true; if let Err(e) = self.match_output(output.status.code(), &output.stdout, &output.stderr) { panic_error("process did not return the expected result", e) } } #[track_caller] fn verify_checks_output(&self, stdout: &[u8], stderr: &[u8]) { if self.expect_exit_code.unwrap_or(0) != 0 && self.expect_stdin.is_none() && self.expect_stdout_data.is_none() && self.expect_stderr_data.is_none() && self.expect_stdout_contains.is_empty() && self.expect_stderr_contains.is_empty() && self.expect_stdout_not_contains.is_empty() && self.expect_stderr_not_contains.is_empty() && self.expect_stderr_with_without.is_empty() { panic!( "`with_status()` is used, but no output is checked.\n\ The test must check the output to ensure the correct error is triggered.\n\ --- stdout\n{}\n--- stderr\n{}", String::from_utf8_lossy(stdout), String::from_utf8_lossy(stderr), ); } } #[track_caller] fn match_process(&self, process: &ProcessBuilder) -> Result { println!("running {}", process); let res = if self.stream_output { if is_ci() { panic!("`.stream()` is for local debugging") } process.exec_with_streaming( &mut |out| { println!("{}", out); Ok(()) }, &mut |err| { eprintln!("{}", err); Ok(()) }, true, ) } else { process.exec_with_output() }; match res { Ok(out) => { self.match_output(out.status.code(), &out.stdout, &out.stderr)?; return Ok(RawOutput { stdout: out.stdout, stderr: out.stderr, code: out.status.code(), }); } Err(e) => { if let Some(ProcessError { stdout: Some(stdout), stderr: Some(stderr), code, .. }) = e.downcast_ref::() { self.match_output(*code, stdout, stderr)?; return Ok(RawOutput { stdout: stdout.to_vec(), stderr: stderr.to_vec(), code: *code, }); } bail!("could not exec process {}: {:?}", process, e) } } } #[track_caller] fn match_output(&self, code: Option, stdout: &[u8], stderr: &[u8]) -> Result<()> { self.verify_checks_output(stdout, stderr); let stdout = std::str::from_utf8(stdout).expect("stdout is not utf8"); let stderr = std::str::from_utf8(stderr).expect("stderr is not utf8"); match self.expect_exit_code { None => {} Some(expected) if code == Some(expected) => {} Some(expected) => bail!( "process exited with code {} (expected {})\n--- stdout\n{}\n--- stderr\n{}", code.unwrap_or(-1), expected, stdout, stderr ), } if let Some(expect_stdout_data) = &self.expect_stdout_data { if let Err(err) = self.assert.try_eq( Some(&"stdout"), stdout.into_data(), expect_stdout_data.clone(), ) { panic!("{err}") } } if let Some(expect_stderr_data) = &self.expect_stderr_data { if let Err(err) = self.assert.try_eq( Some(&"stderr"), stderr.into_data(), expect_stderr_data.clone(), ) { panic!("{err}") } } for expect in self.expect_stdout_contains.iter() { compare::match_contains(expect, stdout, self.assert.redactions())?; } for expect in self.expect_stderr_contains.iter() { compare::match_contains(expect, stderr, self.assert.redactions())?; } for expect in self.expect_stdout_not_contains.iter() { compare::match_does_not_contain(expect, stdout, self.assert.redactions())?; } for expect in self.expect_stderr_not_contains.iter() { compare::match_does_not_contain(expect, stderr, self.assert.redactions())?; } for (with, without) in self.expect_stderr_with_without.iter() { compare::match_with_without(stderr, with, without, self.assert.redactions())?; } Ok(()) } } impl Drop for Execs { fn drop(&mut self) { if !self.ran && !std::thread::panicking() { panic!("forgot to run this command"); } } } /// Run and verify a process, see [`Execs`] pub fn execs() -> Execs { Execs { ran: false, process_builder: None, expect_stdin: None, expect_exit_code: Some(0), expect_stdout_data: None, expect_stderr_data: None, expect_stdout_contains: Vec::new(), expect_stderr_contains: Vec::new(), expect_stdout_not_contains: Vec::new(), expect_stderr_not_contains: Vec::new(), expect_stderr_with_without: Vec::new(), stream_output: false, assert: compare::assert_e2e(), } } /// Generate a basic `Cargo.toml` pub fn basic_manifest(name: &str, version: &str) -> String { format!( r#" [package] name = "{}" version = "{}" authors = [] edition = "2015" "#, name, version ) } /// Generate a `Cargo.toml` with the specified `bin.name` pub fn basic_bin_manifest(name: &str) -> String { format!( r#" [package] name = "{}" version = "0.5.0" authors = ["wycats@example.com"] edition = "2015" [[bin]] name = "{}" "#, name, name ) } /// Generate a `Cargo.toml` with the specified `lib.name` pub fn basic_lib_manifest(name: &str) -> String { format!( r#" [package] name = "{}" version = "0.5.0" authors = ["wycats@example.com"] edition = "2015" [lib] name = "{}" "#, name, name ) } struct RustcInfo { verbose_version: String, host: String, } impl RustcInfo { fn new() -> RustcInfo { let output = ProcessBuilder::new("rustc") .arg("-vV") .exec_with_output() .expect("rustc should exec"); let verbose_version = String::from_utf8(output.stdout).expect("utf8 output"); let host = verbose_version .lines() .filter_map(|line| line.strip_prefix("host: ")) .next() .expect("verbose version has host: field") .to_string(); RustcInfo { verbose_version, host, } } } fn rustc_info() -> &'static RustcInfo { static RUSTC_INFO: OnceLock = OnceLock::new(); RUSTC_INFO.get_or_init(RustcInfo::new) } /// The rustc host such as `x86_64-unknown-linux-gnu`. pub fn rustc_host() -> &'static str { &rustc_info().host } /// The host triple suitable for use in a cargo environment variable (uppercased). pub fn rustc_host_env() -> String { rustc_host().to_uppercase().replace('-', "_") } pub fn is_nightly() -> bool { let vv = &rustc_info().verbose_version; // CARGO_TEST_DISABLE_NIGHTLY is set in rust-lang/rust's CI so that all // nightly-only tests are disabled there. Otherwise, it could make it // difficult to land changes which would need to be made simultaneously in // rust-lang/cargo and rust-lan/rust, which isn't possible. env::var("CARGO_TEST_DISABLE_NIGHTLY").is_err() && (vv.contains("-nightly") || vv.contains("-dev")) } /// Run `$bin` in the test's environment, see [`ProcessBuilder`] /// /// For more on the test environment, see /// - [`paths::root`] /// - [`TestEnvCommandExt`] pub fn process>(bin: T) -> ProcessBuilder { _process(bin.as_ref()) } fn _process(t: &OsStr) -> ProcessBuilder { let mut p = ProcessBuilder::new(t); p.cwd(&paths::root()).test_env(); p } /// Enable nightly features for testing pub trait ChannelChangerCommandExt { /// The list of reasons should be why nightly cargo is needed. If it is /// because of an unstable feature put the name of the feature as the reason, /// e.g. `&["print-im-a-teapot"]`. fn masquerade_as_nightly_cargo(self, _reasons: &[&str]) -> Self; } impl ChannelChangerCommandExt for &mut ProcessBuilder { fn masquerade_as_nightly_cargo(self, _reasons: &[&str]) -> Self { self.env("__CARGO_TEST_CHANNEL_OVERRIDE_DO_NOT_USE_THIS", "nightly") } } impl ChannelChangerCommandExt for snapbox::cmd::Command { fn masquerade_as_nightly_cargo(self, _reasons: &[&str]) -> Self { self.env("__CARGO_TEST_CHANNEL_OVERRIDE_DO_NOT_USE_THIS", "nightly") } } /// Establish a process's test environment pub trait TestEnvCommandExt: Sized { fn test_env(mut self) -> Self { // In general just clear out all cargo-specific configuration already in the // environment. Our tests all assume a "default configuration" unless // specified otherwise. for (k, _v) in env::vars() { if k.starts_with("CARGO_") { self = self.env_remove(&k); } } if env::var_os("RUSTUP_TOOLCHAIN").is_some() { // Override the PATH to avoid executing the rustup wrapper thousands // of times. This makes the testsuite run substantially faster. static RUSTC_DIR: OnceLock = OnceLock::new(); let rustc_dir = RUSTC_DIR.get_or_init(|| { match ProcessBuilder::new("rustup") .args(&["which", "rustc"]) .exec_with_output() { Ok(output) => { let s = std::str::from_utf8(&output.stdout).expect("utf8").trim(); let mut p = PathBuf::from(s); p.pop(); p } Err(e) => { panic!("RUSTUP_TOOLCHAIN was set, but could not run rustup: {}", e); } } }); let path = env::var_os("PATH").unwrap_or_default(); let paths = env::split_paths(&path); let new_path = env::join_paths(std::iter::once(rustc_dir.clone()).chain(paths)).unwrap(); self = self.env("PATH", new_path); } self = self .current_dir(&paths::root()) .env("HOME", paths::home()) .env("CARGO_HOME", paths::cargo_home()) .env("__CARGO_TEST_ROOT", paths::global_root()) // Force Cargo to think it's on the stable channel for all tests, this // should hopefully not surprise us as we add cargo features over time and // cargo rides the trains. .env("__CARGO_TEST_CHANNEL_OVERRIDE_DO_NOT_USE_THIS", "stable") // Keeps cargo within its sandbox. .env("__CARGO_TEST_DISABLE_GLOBAL_KNOWN_HOST", "1") // Set retry sleep to 1 millisecond. .env("__CARGO_TEST_FIXED_RETRY_SLEEP_MS", "1") // Incremental generates a huge amount of data per test, which we // don't particularly need. Tests that specifically need to check // the incremental behavior should turn this back on. .env("CARGO_INCREMENTAL", "0") // Don't read the system git config which is out of our control. .env("GIT_CONFIG_NOSYSTEM", "1") .env_remove("__CARGO_DEFAULT_LIB_METADATA") .env_remove("ALL_PROXY") .env_remove("EMAIL") .env_remove("GIT_AUTHOR_EMAIL") .env_remove("GIT_AUTHOR_NAME") .env_remove("GIT_COMMITTER_EMAIL") .env_remove("GIT_COMMITTER_NAME") .env_remove("http_proxy") .env_remove("HTTPS_PROXY") .env_remove("https_proxy") .env_remove("MAKEFLAGS") .env_remove("MFLAGS") .env_remove("MSYSTEM") // assume cmd.exe everywhere on windows .env_remove("RUSTC") .env_remove("RUST_BACKTRACE") .env_remove("RUSTC_WORKSPACE_WRAPPER") .env_remove("RUSTC_WRAPPER") .env_remove("RUSTDOC") .env_remove("RUSTDOCFLAGS") .env_remove("RUSTFLAGS") .env_remove("SSH_AUTH_SOCK") // ensure an outer agent is never contacted .env_remove("USER") // not set on some rust-lang docker images .env_remove("XDG_CONFIG_HOME") // see #2345 .env_remove("OUT_DIR"); // see #13204 if cfg!(windows) { self = self.env("USERPROFILE", paths::home()); } self } fn current_dir>(self, path: S) -> Self; fn env>(self, key: &str, value: S) -> Self; fn env_remove(self, key: &str) -> Self; } impl TestEnvCommandExt for &mut ProcessBuilder { fn current_dir>(self, path: S) -> Self { let path = path.as_ref(); self.cwd(path) } fn env>(self, key: &str, value: S) -> Self { self.env(key, value) } fn env_remove(self, key: &str) -> Self { self.env_remove(key) } } impl TestEnvCommandExt for snapbox::cmd::Command { fn current_dir>(self, path: S) -> Self { self.current_dir(path) } fn env>(self, key: &str, value: S) -> Self { self.env(key, value) } fn env_remove(self, key: &str) -> Self { self.env_remove(key) } } /// Test the cargo command pub trait CargoCommandExt { fn cargo_ui() -> Self; } impl CargoCommandExt for snapbox::cmd::Command { fn cargo_ui() -> Self { Self::new(cargo_exe()) .with_assert(compare::assert_ui()) .env("CARGO_TERM_COLOR", "always") .test_env() } } /// Add a list of arguments as a line pub trait ArgLineCommandExt: Sized { fn arg_line(mut self, s: &str) -> Self { for mut arg in s.split_whitespace() { if (arg.starts_with('"') && arg.ends_with('"')) || (arg.starts_with('\'') && arg.ends_with('\'')) { arg = &arg[1..(arg.len() - 1).max(1)]; } else if arg.contains(&['"', '\''][..]) { panic!("shell-style argument parsing is not supported") } self = self.arg(arg); } self } fn arg>(self, s: S) -> Self; } impl ArgLineCommandExt for &mut ProcessBuilder { fn arg>(self, s: S) -> Self { self.arg(s) } } impl ArgLineCommandExt for &mut Execs { fn arg>(self, s: S) -> Self { self.arg(s) } } impl ArgLineCommandExt for snapbox::cmd::Command { fn arg>(self, s: S) -> Self { self.arg(s) } } /// Run `cargo $arg_line`, see [`Execs`] pub fn cargo_process(arg_line: &str) -> Execs { let cargo = cargo_exe(); let mut p = process(&cargo); p.env("CARGO", cargo); p.arg_line(arg_line); execs().with_process_builder(p) } /// Run `git $arg_line`, see [`ProcessBuilder`] pub fn git_process(arg_line: &str) -> ProcessBuilder { let mut p = process("git"); p.arg_line(arg_line); p } pub fn sleep_ms(ms: u64) { ::std::thread::sleep(Duration::from_millis(ms)); } /// Returns `true` if the local filesystem has low-resolution mtimes. pub fn is_coarse_mtime() -> bool { // If the filetime crate is being used to emulate HFS then // return `true`, without looking at the actual hardware. cfg!(emulate_second_only_system) || // This should actually be a test that `$CARGO_TARGET_DIR` is on an HFS // filesystem, (or any filesystem with low-resolution mtimes). However, // that's tricky to detect, so for now just deal with CI. cfg!(target_os = "macos") && is_ci() } /// A way for to increase the cut off for all the time based test. /// /// Some CI setups are much slower then the equipment used by Cargo itself. /// Architectures that do not have a modern processor, hardware emulation, etc. pub fn slow_cpu_multiplier(main: u64) -> Duration { static SLOW_CPU_MULTIPLIER: OnceLock = OnceLock::new(); let slow_cpu_multiplier = SLOW_CPU_MULTIPLIER.get_or_init(|| { env::var("CARGO_TEST_SLOW_CPU_MULTIPLIER") .ok() .and_then(|m| m.parse().ok()) .unwrap_or(1) }); Duration::from_secs(slow_cpu_multiplier * main) } #[cfg(windows)] pub fn symlink_supported() -> bool { if is_ci() { // We want to be absolutely sure this runs on CI. return true; } let src = paths::root().join("symlink_src"); fs::write(&src, "").unwrap(); let dst = paths::root().join("symlink_dst"); let result = match os::windows::fs::symlink_file(&src, &dst) { Ok(_) => { fs::remove_file(&dst).unwrap(); true } Err(e) => { eprintln!( "symlinks not supported: {:?}\n\ Windows 10 users should enable developer mode.", e ); false } }; fs::remove_file(&src).unwrap(); return result; } #[cfg(not(windows))] pub fn symlink_supported() -> bool { true } /// The error message for ENOENT. pub fn no_such_file_err_msg() -> String { std::io::Error::from_raw_os_error(2).to_string() } /// Helper to retry a function `n` times. /// /// The function should return `Some` when it is ready. pub fn retry(n: u32, mut f: F) -> R where F: FnMut() -> Option, { let mut count = 0; let start = std::time::Instant::now(); loop { if let Some(r) = f() { return r; } count += 1; if count > n { panic!( "test did not finish within {n} attempts ({:?} total)", start.elapsed() ); } sleep_ms(100); } } #[test] #[should_panic(expected = "test did not finish")] fn retry_fails() { retry(2, || None::<()>); } /// Helper that waits for a thread to finish, up to `n` tenths of a second. pub fn thread_wait_timeout(n: u32, thread: JoinHandle) -> T { retry(n, || thread.is_finished().then_some(())); thread.join().unwrap() } /// Helper that runs some function, and waits up to `n` tenths of a second for /// it to finish. pub fn threaded_timeout(n: u32, f: F) -> R where F: FnOnce() -> R + Send + 'static, R: Send + 'static, { let thread = std::thread::spawn(|| f()); thread_wait_timeout(n, thread) } // Helper for testing dep-info files in the fingerprint dir. #[track_caller] pub fn assert_deps(project: &Project, fingerprint: &str, test_cb: impl Fn(&Path, &[(u8, &str)])) { let mut files = project .glob(fingerprint) .map(|f| f.expect("unwrap glob result")) // Filter out `.json` entries. .filter(|f| f.extension().is_none()); let info_path = files .next() .unwrap_or_else(|| panic!("expected 1 dep-info file at {}, found 0", fingerprint)); assert!(files.next().is_none(), "expected only 1 dep-info file"); let dep_info = fs::read(&info_path).unwrap(); let dep_info = &mut &dep_info[..]; // Consume the magic marker and version. Here they don't really matter. read_usize(dep_info); read_u8(dep_info); read_u8(dep_info); let deps = (0..read_usize(dep_info)) .map(|_| { let ty = read_u8(dep_info); let path = std::str::from_utf8(read_bytes(dep_info)).unwrap(); let checksum_present = read_bool(dep_info); if checksum_present { // Read out the checksum info without using it let _file_len = read_u64(dep_info); let _checksum = read_bytes(dep_info); } (ty, path) }) .collect::>(); test_cb(&info_path, &deps); fn read_usize(bytes: &mut &[u8]) -> usize { let ret = &bytes[..4]; *bytes = &bytes[4..]; u32::from_le_bytes(ret.try_into().unwrap()) as usize } fn read_u8(bytes: &mut &[u8]) -> u8 { let ret = bytes[0]; *bytes = &bytes[1..]; ret } fn read_bool(bytes: &mut &[u8]) -> bool { read_u8(bytes) != 0 } fn read_u64(bytes: &mut &[u8]) -> u64 { let ret = &bytes[..8]; *bytes = &bytes[8..]; u64::from_le_bytes(ret.try_into().unwrap()) } fn read_bytes<'a>(bytes: &mut &'a [u8]) -> &'a [u8] { let n = read_usize(bytes); let ret = &bytes[..n]; *bytes = &bytes[n..]; ret } } pub fn assert_deps_contains(project: &Project, fingerprint: &str, expected: &[(u8, &str)]) { assert_deps(project, fingerprint, |info_path, entries| { for (e_kind, e_path) in expected { let pattern = glob::Pattern::new(e_path).unwrap(); let count = entries .iter() .filter(|(kind, path)| kind == e_kind && pattern.matches(path)) .count(); if count != 1 { panic!( "Expected 1 match of {} {} in {:?}, got {}:\n{:#?}", e_kind, e_path, info_path, count, entries ); } } }) } cargo-test-support-0.7.3/src/paths.rs000064400000000000000000000265501046102023000157010ustar 00000000000000//! Access common paths and manipulate the filesystem use filetime::FileTime; use std::cell::RefCell; use std::env; use std::fs; use std::io::{self, ErrorKind}; use std::path::{Path, PathBuf}; use std::process::Command; use std::sync::atomic::{AtomicUsize, Ordering}; use std::sync::Mutex; use std::sync::OnceLock; static CARGO_INTEGRATION_TEST_DIR: &str = "cit"; static GLOBAL_ROOT: OnceLock>> = OnceLock::new(); /// This is used when running cargo is pre-CARGO_TARGET_TMPDIR /// TODO: Remove when `CARGO_TARGET_TMPDIR` grows old enough. fn global_root_legacy() -> PathBuf { let mut path = t!(env::current_exe()); path.pop(); // chop off exe name path.pop(); // chop off "deps" path.push("tmp"); path.mkdir_p(); path } fn set_global_root(tmp_dir: Option<&'static str>) { let mut lock = GLOBAL_ROOT .get_or_init(|| Default::default()) .lock() .unwrap(); if lock.is_none() { let mut root = match tmp_dir { Some(tmp_dir) => PathBuf::from(tmp_dir), None => global_root_legacy(), }; root.push(CARGO_INTEGRATION_TEST_DIR); *lock = Some(root); } } /// Path to the parent directory of all test [`root`]s /// /// ex: `$CARGO_TARGET_TMPDIR/cit` pub fn global_root() -> PathBuf { let lock = GLOBAL_ROOT .get_or_init(|| Default::default()) .lock() .unwrap(); match lock.as_ref() { Some(p) => p.clone(), None => unreachable!("GLOBAL_ROOT not set yet"), } } // We need to give each test a unique id. The test name could serve this // purpose, but the `test` crate doesn't have a way to obtain the current test // name.[*] Instead, we used the `cargo-test-macro` crate to automatically // insert an init function for each test that sets the test name in a thread // local variable. // // [*] It does set the thread name, but only when running concurrently. If not // running concurrently, all tests are run on the main thread. thread_local! { static TEST_ID: RefCell> = RefCell::new(None); } /// See [`init_root`] pub struct TestIdGuard { _private: (), } /// For test harnesses like [`crate::cargo_test`] pub fn init_root(tmp_dir: Option<&'static str>) -> TestIdGuard { static NEXT_ID: AtomicUsize = AtomicUsize::new(0); let id = NEXT_ID.fetch_add(1, Ordering::SeqCst); TEST_ID.with(|n| *n.borrow_mut() = Some(id)); let guard = TestIdGuard { _private: () }; set_global_root(tmp_dir); let r = root(); r.rm_rf(); r.mkdir_p(); guard } impl Drop for TestIdGuard { fn drop(&mut self) { TEST_ID.with(|n| *n.borrow_mut() = None); } } /// Path to the test's filesystem scratchpad /// /// ex: `$CARGO_TARGET_TMPDIR/cit/t0` pub fn root() -> PathBuf { let id = TEST_ID.with(|n| { n.borrow().expect( "Tests must use the `#[cargo_test]` attribute in \ order to be able to use the crate root.", ) }); let mut root = global_root(); root.push(&format!("t{}", id)); root } /// Path to the current test's `$HOME` /// /// ex: `$CARGO_TARGET_TMPDIR/cit/t0/home` pub fn home() -> PathBuf { let mut path = root(); path.push("home"); path.mkdir_p(); path } /// Path to the current test's `$CARGO_HOME` /// /// ex: `$CARGO_TARGET_TMPDIR/cit/t0/home/.cargo` pub fn cargo_home() -> PathBuf { home().join(".cargo") } /// Common path and file operations pub trait CargoPathExt { fn to_url(&self) -> url::Url; fn rm_rf(&self); fn mkdir_p(&self); /// Returns a list of all files and directories underneath the given /// directory, recursively, including the starting path. fn ls_r(&self) -> Vec; fn move_into_the_past(&self) { self.move_in_time(|sec, nsec| (sec - 3600, nsec)) } fn move_into_the_future(&self) { self.move_in_time(|sec, nsec| (sec + 3600, nsec)) } fn move_in_time(&self, travel_amount: F) where F: Fn(i64, u32) -> (i64, u32); } impl CargoPathExt for Path { fn to_url(&self) -> url::Url { url::Url::from_file_path(self).ok().unwrap() } fn rm_rf(&self) { let meta = match self.symlink_metadata() { Ok(meta) => meta, Err(e) => { if e.kind() == ErrorKind::NotFound { return; } panic!("failed to remove {:?}, could not read: {:?}", self, e); } }; // There is a race condition between fetching the metadata and // actually performing the removal, but we don't care all that much // for our tests. if meta.is_dir() { if let Err(e) = fs::remove_dir_all(self) { panic!("failed to remove {:?}: {:?}", self, e) } } else if let Err(e) = fs::remove_file(self) { panic!("failed to remove {:?}: {:?}", self, e) } } fn mkdir_p(&self) { fs::create_dir_all(self) .unwrap_or_else(|e| panic!("failed to mkdir_p {}: {}", self.display(), e)) } fn ls_r(&self) -> Vec { walkdir::WalkDir::new(self) .sort_by_file_name() .into_iter() .filter_map(|e| e.map(|e| e.path().to_owned()).ok()) .collect() } fn move_in_time(&self, travel_amount: F) where F: Fn(i64, u32) -> (i64, u32), { if self.is_file() { time_travel(self, &travel_amount); } else { recurse(self, &self.join("target"), &travel_amount); } fn recurse(p: &Path, bad: &Path, travel_amount: &F) where F: Fn(i64, u32) -> (i64, u32), { if p.is_file() { time_travel(p, travel_amount) } else if !p.starts_with(bad) { for f in t!(fs::read_dir(p)) { let f = t!(f).path(); recurse(&f, bad, travel_amount); } } } fn time_travel(path: &Path, travel_amount: &F) where F: Fn(i64, u32) -> (i64, u32), { let stat = t!(path.symlink_metadata()); let mtime = FileTime::from_last_modification_time(&stat); let (sec, nsec) = travel_amount(mtime.unix_seconds(), mtime.nanoseconds()); let newtime = FileTime::from_unix_time(sec, nsec); // Sadly change_file_times has a failure mode where a readonly file // cannot have its times changed on windows. do_op(path, "set file times", |path| { filetime::set_file_times(path, newtime, newtime) }); } } } impl CargoPathExt for PathBuf { fn to_url(&self) -> url::Url { self.as_path().to_url() } fn rm_rf(&self) { self.as_path().rm_rf() } fn mkdir_p(&self) { self.as_path().mkdir_p() } fn ls_r(&self) -> Vec { self.as_path().ls_r() } fn move_in_time(&self, travel_amount: F) where F: Fn(i64, u32) -> (i64, u32), { self.as_path().move_in_time(travel_amount) } } fn do_op(path: &Path, desc: &str, mut f: F) where F: FnMut(&Path) -> io::Result<()>, { match f(path) { Ok(()) => {} Err(ref e) if e.kind() == ErrorKind::PermissionDenied => { let mut p = t!(path.metadata()).permissions(); p.set_readonly(false); t!(fs::set_permissions(path, p)); // Unix also requires the parent to not be readonly for example when // removing files let parent = path.parent().unwrap(); let mut p = t!(parent.metadata()).permissions(); p.set_readonly(false); t!(fs::set_permissions(parent, p)); f(path).unwrap_or_else(|e| { panic!("failed to {} {}: {}", desc, path.display(), e); }) } Err(e) => { panic!("failed to {} {}: {}", desc, path.display(), e); } } } /// Get the filename for a library. /// /// `kind` should be one of: /// - `lib` /// - `rlib` /// - `staticlib` /// - `dylib` /// - `proc-macro` /// /// # Examples /// ``` /// # use cargo_test_support::paths::get_lib_filename; /// get_lib_filename("foo", "dylib"); /// ``` /// would return: /// - macOS: `"libfoo.dylib"` /// - Windows: `"foo.dll"` /// - Unix: `"libfoo.so"` pub fn get_lib_filename(name: &str, kind: &str) -> String { let prefix = get_lib_prefix(kind); let extension = get_lib_extension(kind); format!("{}{}.{}", prefix, name, extension) } /// See [`get_lib_filename`] for more details pub fn get_lib_prefix(kind: &str) -> &str { match kind { "lib" | "rlib" => "lib", "staticlib" | "dylib" | "proc-macro" => { if cfg!(windows) { "" } else { "lib" } } _ => unreachable!(), } } /// See [`get_lib_filename`] for more details pub fn get_lib_extension(kind: &str) -> &str { match kind { "lib" | "rlib" => "rlib", "staticlib" => { if cfg!(windows) { "lib" } else { "a" } } "dylib" | "proc-macro" => { if cfg!(windows) { "dll" } else if cfg!(target_os = "macos") { "dylib" } else { "so" } } _ => unreachable!(), } } /// Path to `rustc`s sysroot pub fn sysroot() -> String { let output = Command::new("rustc") .arg("--print=sysroot") .output() .expect("rustc to run"); assert!(output.status.success()); let sysroot = String::from_utf8(output.stdout).unwrap(); sysroot.trim().to_string() } /// Returns true if names such as aux.* are allowed. /// /// Traditionally, Windows did not allow a set of file names (see `is_windows_reserved` /// for a list). More recent versions of Windows have relaxed this restriction. This test /// determines whether we are running in a mode that allows Windows reserved names. #[cfg(windows)] pub fn windows_reserved_names_are_allowed() -> bool { use std::ffi::OsStr; use std::os::windows::ffi::OsStrExt; use std::ptr; use windows_sys::Win32::Storage::FileSystem::GetFullPathNameW; let test_file_name: Vec<_> = OsStr::new("aux.rs").encode_wide().collect(); let buffer_length = unsafe { GetFullPathNameW(test_file_name.as_ptr(), 0, ptr::null_mut(), ptr::null_mut()) }; if buffer_length == 0 { // This means the call failed, so we'll conservatively assume reserved names are not allowed. return false; } let mut buffer = vec![0u16; buffer_length as usize]; let result = unsafe { GetFullPathNameW( test_file_name.as_ptr(), buffer_length, buffer.as_mut_ptr(), ptr::null_mut(), ) }; if result == 0 { // Once again, conservatively assume reserved names are not allowed if the // GetFullPathNameW call failed. return false; } // Under the old rules, a file name like aux.rs would get converted into \\.\aux, so // we detect this case by checking if the string starts with \\.\ // // Otherwise, the filename will be something like C:\Users\Foo\Documents\aux.rs let prefix: Vec<_> = OsStr::new("\\\\.\\").encode_wide().collect(); if buffer.starts_with(&prefix) { false } else { true } } cargo-test-support-0.7.3/src/publish.rs000064400000000000000000000231361046102023000162250ustar 00000000000000//! Helpers for testing `cargo package` / `cargo publish` //! //! # Example //! //! ```no_run //! # use cargo_test_support::registry::RegistryBuilder; //! # use cargo_test_support::publish::validate_upload; //! # use cargo_test_support::project; //! // This replaces `registry::init()` and must be called before `Package::new().publish()` //! let registry = RegistryBuilder::new().http_api().http_index().build(); //! //! let p = project() //! .file( //! "Cargo.toml", //! r#" //! [package] //! name = "foo" //! version = "0.0.1" //! edition = "2015" //! authors = [] //! license = "MIT" //! description = "foo" //! "#, //! ) //! .file("src/main.rs", "fn main() {}") //! .build(); //! //! p.cargo("publish --no-verify") //! .replace_crates_io(registry.index_url()) //! .run(); //! //! validate_upload( //! r#" //! { //! "authors": [], //! "badges": {}, //! "categories": [], //! "deps": [], //! "description": "foo", //! "documentation": null, //! "features": {}, //! "homepage": null, //! "keywords": [], //! "license": "MIT", //! "license_file": null, //! "links": null, //! "name": "foo", //! "readme": null, //! "readme_file": null, //! "repository": null, //! "rust_version": null, //! "vers": "0.0.1" //! } //! "#, //! "foo-0.0.1.crate", //! &["Cargo.lock", "Cargo.toml", "Cargo.toml.orig", "src/main.rs"], //! ); //! ``` use crate::compare::InMemoryDir; use crate::registry::{self, alt_api_path, FeatureMap}; use flate2::read::GzDecoder; use snapbox::prelude::*; use std::collections::HashSet; use std::fs; use std::fs::File; use std::io::{self, prelude::*, SeekFrom}; use std::path::Path; use tar::Archive; fn read_le_u32(mut reader: R) -> io::Result where R: Read, { let mut buf = [0; 4]; reader.read_exact(&mut buf)?; Ok(u32::from_le_bytes(buf)) } /// Check the `cargo publish` API call #[track_caller] pub fn validate_upload(expected_json: &str, expected_crate_name: &str, expected_files: &[&str]) { let new_path = registry::api_path().join("api/v1/crates/new"); _validate_upload( &new_path, expected_json, expected_crate_name, expected_files, (), ); } /// Check the `cargo publish` API call, with file contents #[track_caller] pub fn validate_upload_with_contents( expected_json: &str, expected_crate_name: &str, expected_files: &[&str], expected_contents: impl Into, ) { let new_path = registry::api_path().join("api/v1/crates/new"); _validate_upload( &new_path, expected_json, expected_crate_name, expected_files, expected_contents, ); } /// Check the `cargo publish` API call to the alternative test registry #[track_caller] pub fn validate_alt_upload( expected_json: &str, expected_crate_name: &str, expected_files: &[&str], ) { let new_path = alt_api_path().join("api/v1/crates/new"); _validate_upload( &new_path, expected_json, expected_crate_name, expected_files, (), ); } #[track_caller] fn _validate_upload( new_path: &Path, expected_json: &str, expected_crate_name: &str, expected_files: &[&str], expected_contents: impl Into, ) { let (actual_json, krate_bytes) = read_new_post(new_path); snapbox::assert_data_eq!(actual_json, expected_json.is_json()); // Verify the tarball. validate_crate_contents( &krate_bytes[..], expected_crate_name, expected_files, expected_contents, ); } #[track_caller] fn read_new_post(new_path: &Path) -> (Vec, Vec) { let mut f = File::open(new_path).unwrap(); // 32-bit little-endian integer of length of JSON data. let json_sz = read_le_u32(&mut f).expect("read json length"); let mut json_bytes = vec![0; json_sz as usize]; f.read_exact(&mut json_bytes).expect("read JSON data"); // 32-bit little-endian integer of length of crate file. let crate_sz = read_le_u32(&mut f).expect("read crate length"); let mut krate_bytes = vec![0; crate_sz as usize]; f.read_exact(&mut krate_bytes).expect("read crate data"); // Check at end. let current = f.seek(SeekFrom::Current(0)).unwrap(); assert_eq!(f.seek(SeekFrom::End(0)).unwrap(), current); (json_bytes, krate_bytes) } /// Checks the contents of a `.crate` file. /// /// - `expected_crate_name` should be something like `foo-0.0.1.crate`. /// - `expected_files` should be a complete list of files in the crate /// (relative to `expected_crate_name`). /// - `expected_contents` should be a list of `(file_name, contents)` tuples /// to validate the contents of the given file. Only the listed files will /// be checked (others will be ignored). #[track_caller] pub fn validate_crate_contents( reader: impl Read, expected_crate_name: &str, expected_files: &[&str], expected_contents: impl Into, ) { let expected_contents = expected_contents.into(); validate_crate_contents_( reader, expected_crate_name, expected_files, expected_contents, ) } #[track_caller] fn validate_crate_contents_( reader: impl Read, expected_crate_name: &str, expected_files: &[&str], expected_contents: InMemoryDir, ) { let mut rdr = GzDecoder::new(reader); snapbox::assert_data_eq!(rdr.header().unwrap().filename().unwrap(), { let expected: snapbox::Data = expected_crate_name.into(); expected.raw() }); let mut contents = Vec::new(); rdr.read_to_end(&mut contents).unwrap(); let mut ar = Archive::new(&contents[..]); let base_crate_name = Path::new( expected_crate_name .strip_suffix(".crate") .expect("must end with .crate"), ); let actual_contents: InMemoryDir = ar .entries() .unwrap() .map(|entry| { let mut entry = entry.unwrap(); let name = entry .path() .unwrap() .strip_prefix(base_crate_name) .unwrap() .to_owned(); let mut contents = String::new(); entry.read_to_string(&mut contents).unwrap(); (name, contents) }) .collect(); let actual_files: HashSet<&Path> = actual_contents.paths().collect(); let expected_files: HashSet<&Path> = expected_files.iter().map(|name| Path::new(name)).collect(); let missing: Vec<&&Path> = expected_files.difference(&actual_files).collect(); let extra: Vec<&&Path> = actual_files.difference(&expected_files).collect(); if !missing.is_empty() || !extra.is_empty() { panic!( "uploaded archive does not match.\nMissing: {:?}\nExtra: {:?}\n", missing, extra ); } actual_contents.assert_contains(&expected_contents); } pub(crate) fn create_index_line( name: serde_json::Value, vers: &str, deps: Vec, cksum: &str, features: crate::registry::FeatureMap, yanked: bool, links: Option, rust_version: Option<&str>, v: Option, ) -> String { // This emulates what crates.io does to retain backwards compatibility. let (features, features2) = split_index_features(features.clone()); let mut json = serde_json::json!({ "name": name, "vers": vers, "deps": deps, "cksum": cksum, "features": features, "yanked": yanked, "links": links, }); if let Some(f2) = &features2 { json["features2"] = serde_json::json!(f2); json["v"] = serde_json::json!(2); } if let Some(v) = v { json["v"] = serde_json::json!(v); } if let Some(rust_version) = rust_version { json["rust_version"] = serde_json::json!(rust_version); } json.to_string() } pub(crate) fn write_to_index(registry_path: &Path, name: &str, line: String, local: bool) { let file = cargo_util::registry::make_dep_path(name, false); // Write file/line in the index. let dst = if local { registry_path.join("index").join(&file) } else { registry_path.join(&file) }; let prev = fs::read_to_string(&dst).unwrap_or_default(); t!(fs::create_dir_all(dst.parent().unwrap())); t!(fs::write(&dst, prev + &line[..] + "\n")); // Add the new file to the index. if !local { let repo = t!(git2::Repository::open(®istry_path)); let mut index = t!(repo.index()); t!(index.add_path(Path::new(&file))); t!(index.write()); let id = t!(index.write_tree()); // Commit this change. let tree = t!(repo.find_tree(id)); let sig = t!(repo.signature()); let parent = t!(repo.refname_to_id("refs/heads/master")); let parent = t!(repo.find_commit(parent)); t!(repo.commit( Some("HEAD"), &sig, &sig, "Another commit", &tree, &[&parent] )); } } fn split_index_features(mut features: FeatureMap) -> (FeatureMap, Option) { let mut features2 = FeatureMap::new(); for (feat, values) in features.iter_mut() { if values .iter() .any(|value| value.starts_with("dep:") || value.contains("?/")) { let new_values = values.drain(..).collect(); features2.insert(feat.clone(), new_values); } } if features2.is_empty() { (features, None) } else { (features, Some(features2)) } } cargo-test-support-0.7.3/src/registry.rs000064400000000000000000001637551046102023000164430ustar 00000000000000//! Interact with the [`TestRegistry`] //! //! # Example //! //! ```no_run //! use cargo_test_support::registry::Package; //! use cargo_test_support::project; //! use cargo_test_support::str; //! //! // Publish package "a" depending on "b". //! Package::new("a", "1.0.0") //! .dep("b", "1.0.0") //! .file("src/lib.rs", r#" //! extern crate b; //! pub fn f() -> i32 { b::f() * 2 } //! "#) //! .publish(); //! //! // Publish package "b". //! Package::new("b", "1.0.0") //! .file("src/lib.rs", r#" //! pub fn f() -> i32 { 12 } //! "#) //! .publish(); //! //! // Create a project that uses package "a". //! let p = project() //! .file("Cargo.toml", r#" //! [package] //! name = "foo" //! version = "0.0.1" //! //! [dependencies] //! a = "1.0" //! "#) //! .file("src/main.rs", r#" //! extern crate a; //! fn main() { println!("{}", a::f()); } //! "#) //! .build(); //! //! p.cargo("run").with_stdout_data(str!["24"]).run(); //! ``` use crate::git::repo; use crate::paths; use crate::publish::{create_index_line, write_to_index}; use cargo_util::paths::append; use cargo_util::Sha256; use flate2::write::GzEncoder; use flate2::Compression; use pasetors::keys::{AsymmetricPublicKey, AsymmetricSecretKey}; use pasetors::paserk::FormatAsPaserk; use pasetors::token::UntrustedToken; use std::collections::{BTreeMap, HashMap}; use std::fmt; use std::fs::{self, File}; use std::io::{BufRead, BufReader, Read, Write}; use std::net::{SocketAddr, TcpListener, TcpStream}; use std::path::{Path, PathBuf}; use std::thread::{self, JoinHandle}; use tar::{Builder, Header}; use time::format_description::well_known::Rfc3339; use time::{Duration, OffsetDateTime}; use url::Url; /// Path to the local index for psuedo-crates.io. /// /// This is a Git repo /// initialized with a `config.json` file pointing to `dl_path` for downloads /// and `api_path` for uploads. /// /// ex: `$CARGO_TARGET_TMPDIR/cit/t0/registry` pub fn registry_path() -> PathBuf { generate_path("registry") } /// Path to the local web API uploads /// /// Cargo will place the contents of a web API /// request here. For example, `api/v1/crates/new` is the result of publishing a crate. /// /// ex: `$CARGO_TARGET_TMPDIR/cit/t0/api` pub fn api_path() -> PathBuf { generate_path("api") } /// Path to download `.crate` files using the web API endpoint. /// /// Crates /// should be organized as `{name}/{version}/download` to match the web API /// endpoint. This is rarely used and must be manually set up. /// /// ex: `$CARGO_TARGET_TMPDIR/cit/t0/dl` pub fn dl_path() -> PathBuf { generate_path("dl") } /// Path to the alternative-registry version of [`registry_path`] /// /// ex: `$CARGO_TARGET_TMPDIR/cit/t0/alternative-registry` pub fn alt_registry_path() -> PathBuf { generate_path("alternative-registry") } /// URL to the alternative-registry version of `registry_url` fn alt_registry_url() -> Url { generate_url("alternative-registry") } /// Path to the alternative-registry version of [`dl_path`] /// /// ex: `$CARGO_TARGET_TMPDIR/cit/t0/alternative-dl` pub fn alt_dl_path() -> PathBuf { generate_path("alternative-dl") } /// Path to the alternative-registry version of [`api_path`] /// /// ex: `$CARGO_TARGET_TMPDIR/cit/t0/alternative-api` pub fn alt_api_path() -> PathBuf { generate_path("alternative-api") } fn generate_path(name: &str) -> PathBuf { paths::root().join(name) } fn generate_url(name: &str) -> Url { Url::from_file_path(generate_path(name)).ok().unwrap() } /// Auth-token for publishing, see [`RegistryBuilder::token`] #[derive(Clone)] pub enum Token { Plaintext(String), Keys(String, Option), } impl Token { /// This is a valid PASETO secret key. /// /// This one is already publicly available as part of the text of the RFC so is safe to use for tests. pub fn rfc_key() -> Token { Token::Keys( "k3.secret.fNYVuMvBgOlljt9TDohnaYLblghqaHoQquVZwgR6X12cBFHZLFsaU3q7X3k1Zn36" .to_string(), Some("sub".to_string()), ) } } type RequestCallback = Box Response>; /// Prepare a local [`TestRegistry`] fixture /// /// See also [`init`] and [`alt_init`] pub struct RegistryBuilder { /// If set, configures an alternate registry with the given name. alternative: Option, /// The authorization token for the registry. token: Option, /// If set, the registry requires authorization for all operations. auth_required: bool, /// If set, serves the index over http. http_index: bool, /// If set, serves the API over http. http_api: bool, /// If set, config.json includes 'api' api: bool, /// Write the token in the configuration. configure_token: bool, /// Write the registry in configuration. configure_registry: bool, /// API responders. custom_responders: HashMap, /// Handler for 404 responses. not_found_handler: RequestCallback, /// If nonzero, the git index update to be delayed by the given number of seconds. delayed_index_update: usize, /// Credential provider in configuration credential_provider: Option, } /// A local registry fixture /// /// Most tests won't need to call this directly but instead interact with [`Package`] pub struct TestRegistry { server: Option, index_url: Url, path: PathBuf, api_url: Url, dl_url: Url, token: Token, } impl TestRegistry { pub fn index_url(&self) -> &Url { &self.index_url } pub fn api_url(&self) -> &Url { &self.api_url } pub fn token(&self) -> &str { match &self.token { Token::Plaintext(s) => s, Token::Keys(_, _) => panic!("registry was not configured with a plaintext token"), } } pub fn key(&self) -> &str { match &self.token { Token::Plaintext(_) => panic!("registry was not configured with a secret key"), Token::Keys(s, _) => s, } } /// Shutdown the server thread and wait for it to stop. /// `Drop` automatically stops the server, but this additionally /// waits for the thread to stop. pub fn join(self) { if let Some(mut server) = self.server { server.stop(); let handle = server.handle.take().unwrap(); handle.join().unwrap(); } } } impl RegistryBuilder { #[must_use] pub fn new() -> RegistryBuilder { let not_found = |_req: &Request, _server: &HttpServer| -> Response { Response { code: 404, headers: vec![], body: b"not found".to_vec(), } }; RegistryBuilder { alternative: None, token: None, auth_required: false, http_api: false, http_index: false, api: true, configure_registry: true, configure_token: true, custom_responders: HashMap::new(), not_found_handler: Box::new(not_found), delayed_index_update: 0, credential_provider: None, } } /// Adds a custom HTTP response for a specific url #[must_use] pub fn add_responder Response>( mut self, url: impl Into, responder: R, ) -> Self { self.custom_responders .insert(url.into(), Box::new(responder)); self } #[must_use] pub fn not_found_handler Response>( mut self, responder: R, ) -> Self { self.not_found_handler = Box::new(responder); self } /// Configures the git index update to be delayed by the given number of seconds. #[must_use] pub fn delayed_index_update(mut self, delay: usize) -> Self { self.delayed_index_update = delay; self } /// Sets whether or not to initialize as an alternative registry. #[must_use] pub fn alternative_named(mut self, alt: &str) -> Self { self.alternative = Some(alt.to_string()); self } /// Sets whether or not to initialize as an alternative registry. #[must_use] pub fn alternative(self) -> Self { self.alternative_named("alternative") } /// Prevents placing a token in the configuration #[must_use] pub fn no_configure_token(mut self) -> Self { self.configure_token = false; self } /// Prevents adding the registry to the configuration. #[must_use] pub fn no_configure_registry(mut self) -> Self { self.configure_registry = false; self } /// Sets the token value #[must_use] pub fn token(mut self, token: Token) -> Self { self.token = Some(token); self } /// Sets this registry to require the authentication token for /// all operations. #[must_use] pub fn auth_required(mut self) -> Self { self.auth_required = true; self } /// Operate the index over http #[must_use] pub fn http_index(mut self) -> Self { self.http_index = true; self } /// Operate the api over http #[must_use] pub fn http_api(mut self) -> Self { self.http_api = true; self } /// The registry has no api. #[must_use] pub fn no_api(mut self) -> Self { self.api = false; self } /// The credential provider to configure for this registry. #[must_use] pub fn credential_provider(mut self, provider: &[&str]) -> Self { self.credential_provider = Some(format!("['{}']", provider.join("','"))); self } /// Initializes the registry. #[must_use] pub fn build(self) -> TestRegistry { let config_path = paths::cargo_home().join("config.toml"); t!(fs::create_dir_all(config_path.parent().unwrap())); let prefix = if let Some(alternative) = &self.alternative { format!("{alternative}-") } else { String::new() }; let registry_path = generate_path(&format!("{prefix}registry")); let index_url = generate_url(&format!("{prefix}registry")); let api_url = generate_url(&format!("{prefix}api")); let dl_url = generate_url(&format!("{prefix}dl")); let dl_path = generate_path(&format!("{prefix}dl")); let api_path = generate_path(&format!("{prefix}api")); let token = self .token .unwrap_or_else(|| Token::Plaintext(format!("{prefix}sekrit"))); let (server, index_url, api_url, dl_url) = if !self.http_index && !self.http_api { // No need to start the HTTP server. (None, index_url, api_url, dl_url) } else { let server = HttpServer::new( registry_path.clone(), dl_path, api_path.clone(), token.clone(), self.auth_required, self.custom_responders, self.not_found_handler, self.delayed_index_update, ); let index_url = if self.http_index { server.index_url() } else { index_url }; let api_url = if self.http_api { server.api_url() } else { api_url }; let dl_url = server.dl_url(); (Some(server), index_url, api_url, dl_url) }; let registry = TestRegistry { api_url, index_url, server, dl_url, path: registry_path, token, }; if self.configure_registry { if let Some(alternative) = &self.alternative { append( &config_path, format!( " [registries.{alternative}] index = '{}'", registry.index_url ) .as_bytes(), ) .unwrap(); if let Some(p) = &self.credential_provider { append( &config_path, &format!( " credential-provider = {p} " ) .as_bytes(), ) .unwrap() } } else { append( &config_path, format!( " [source.crates-io] replace-with = 'dummy-registry' [registries.dummy-registry] index = '{}'", registry.index_url ) .as_bytes(), ) .unwrap(); if let Some(p) = &self.credential_provider { append( &config_path, &format!( " [registry] credential-provider = {p} " ) .as_bytes(), ) .unwrap() } } } if self.configure_token { let credentials = paths::cargo_home().join("credentials.toml"); match ®istry.token { Token::Plaintext(token) => { if let Some(alternative) = &self.alternative { append( &credentials, format!( r#" [registries.{alternative}] token = "{token}" "# ) .as_bytes(), ) .unwrap(); } else { append( &credentials, format!( r#" [registry] token = "{token}" "# ) .as_bytes(), ) .unwrap(); } } Token::Keys(key, subject) => { let mut out = if let Some(alternative) = &self.alternative { format!("\n[registries.{alternative}]\n") } else { format!("\n[registry]\n") }; out += &format!("secret-key = \"{key}\"\n"); if let Some(subject) = subject { out += &format!("secret-key-subject = \"{subject}\"\n"); } append(&credentials, out.as_bytes()).unwrap(); } } } let auth = if self.auth_required { r#","auth-required":true"# } else { "" }; let api = if self.api { format!(r#","api":"{}""#, registry.api_url) } else { String::new() }; // Initialize a new registry. repo(®istry.path) .file( "config.json", &format!(r#"{{"dl":"{}"{api}{auth}}}"#, registry.dl_url), ) .build(); fs::create_dir_all(api_path.join("api/v1/crates")).unwrap(); registry } } /// Published package builder for [`TestRegistry`] /// /// This uses "source replacement" using an automatically generated /// `.cargo/config` file to ensure that dependencies will use these packages /// instead of contacting crates.io. See `source-replacement.md` for more /// details on how source replacement works. /// /// Call [`Package::publish`] to finalize and create the package. /// /// If no files are specified, an empty `lib.rs` file is automatically created. /// /// The `Cargo.toml` file is automatically generated based on the methods /// called on `Package` (for example, calling [`Package::dep()`] will add to the /// `[dependencies]` automatically). You may also specify a `Cargo.toml` file /// to override the generated one. /// /// This supports different registry types: /// - Regular source replacement that replaces `crates.io` (the default). /// - A "local registry" which is a subset for vendoring (see /// [`Package::local`]). /// - An "alternative registry" which requires specifying the registry name /// (see [`Package::alternative`]). /// /// This does not support "directory sources". See `directory.rs` for /// `VendorPackage` which implements directory sources. #[must_use] pub struct Package { name: String, vers: String, deps: Vec, files: Vec, yanked: bool, features: FeatureMap, local: bool, alternative: bool, invalid_index_line: bool, index_line: Option, edition: Option, resolver: Option, proc_macro: bool, links: Option, rust_version: Option, cargo_features: Vec, v: Option, } pub(crate) type FeatureMap = BTreeMap>; /// Published package dependency builder, see [`Package::add_dep`] #[derive(Clone)] pub struct Dependency { name: String, vers: String, kind: String, artifact: Option, bindep_target: Option, lib: bool, target: Option, features: Vec, registry: Option, package: Option, optional: bool, default_features: bool, public: bool, } /// Entry with data that corresponds to [`tar::EntryType`]. #[non_exhaustive] enum EntryData { Regular(String), Symlink(PathBuf), } /// A file to be created in a package. struct PackageFile { path: String, contents: EntryData, /// The Unix mode for the file. Note that when extracted on Windows, this /// is mostly ignored since it doesn't have the same style of permissions. mode: u32, /// If `true`, the file is created in the root of the tarfile, used for /// testing invalid packages. extra: bool, } const DEFAULT_MODE: u32 = 0o644; /// Setup a local psuedo-crates.io [`TestRegistry`] /// /// This is implicitly called by [`Package::new`]. /// /// When calling `cargo publish`, see instead [`crate::publish`]. pub fn init() -> TestRegistry { RegistryBuilder::new().build() } /// Setup a local "alternative" [`TestRegistry`] /// /// When calling `cargo publish`, see instead [`crate::publish`]. pub fn alt_init() -> TestRegistry { init(); RegistryBuilder::new().alternative().build() } pub struct HttpServerHandle { addr: SocketAddr, handle: Option>, } impl HttpServerHandle { pub fn index_url(&self) -> Url { Url::parse(&format!("sparse+http://{}/index/", self.addr.to_string())).unwrap() } pub fn api_url(&self) -> Url { Url::parse(&format!("http://{}/", self.addr.to_string())).unwrap() } pub fn dl_url(&self) -> Url { Url::parse(&format!("http://{}/dl", self.addr.to_string())).unwrap() } fn stop(&self) { if let Ok(mut stream) = TcpStream::connect(self.addr) { // shutdown the server let _ = stream.write_all(b"stop"); let _ = stream.flush(); } } } impl Drop for HttpServerHandle { fn drop(&mut self) { self.stop(); } } /// Request to the test http server #[derive(Clone)] pub struct Request { pub url: Url, pub method: String, pub body: Option>, pub authorization: Option, pub if_modified_since: Option, pub if_none_match: Option, } impl fmt::Debug for Request { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { // body is not included as it can produce long debug outputs f.debug_struct("Request") .field("url", &self.url) .field("method", &self.method) .field("authorization", &self.authorization) .field("if_modified_since", &self.if_modified_since) .field("if_none_match", &self.if_none_match) .finish() } } /// Response from the test http server pub struct Response { pub code: u32, pub headers: Vec, pub body: Vec, } pub struct HttpServer { listener: TcpListener, registry_path: PathBuf, dl_path: PathBuf, api_path: PathBuf, addr: SocketAddr, token: Token, auth_required: bool, custom_responders: HashMap, not_found_handler: RequestCallback, delayed_index_update: usize, } /// A helper struct that collects the arguments for [`HttpServer::check_authorized`]. /// Based on looking at the request, these are the fields that the authentication header should attest to. struct Mutation<'a> { mutation: &'a str, name: Option<&'a str>, vers: Option<&'a str>, cksum: Option<&'a str>, } impl HttpServer { pub fn new( registry_path: PathBuf, dl_path: PathBuf, api_path: PathBuf, token: Token, auth_required: bool, custom_responders: HashMap, not_found_handler: RequestCallback, delayed_index_update: usize, ) -> HttpServerHandle { let listener = TcpListener::bind("127.0.0.1:0").unwrap(); let addr = listener.local_addr().unwrap(); let server = HttpServer { listener, registry_path, dl_path, api_path, addr, token, auth_required, custom_responders, not_found_handler, delayed_index_update, }; let handle = Some(thread::spawn(move || server.start())); HttpServerHandle { addr, handle } } fn start(&self) { let mut line = String::new(); 'server: loop { let (socket, _) = self.listener.accept().unwrap(); let mut buf = BufReader::new(socket); line.clear(); if buf.read_line(&mut line).unwrap() == 0 { // Connection terminated. continue; } // Read the "GET path HTTP/1.1" line. let mut parts = line.split_ascii_whitespace(); let method = parts.next().unwrap().to_ascii_lowercase(); if method == "stop" { // Shutdown the server. return; } let addr = self.listener.local_addr().unwrap(); let url = format!( "http://{}/{}", addr, parts.next().unwrap().trim_start_matches('/') ); let url = Url::parse(&url).unwrap(); // Grab headers we care about. let mut if_modified_since = None; let mut if_none_match = None; let mut authorization = None; let mut content_len = None; loop { line.clear(); if buf.read_line(&mut line).unwrap() == 0 { continue 'server; } if line == "\r\n" { // End of headers. line.clear(); break; } let (name, value) = line.split_once(':').unwrap(); let name = name.trim().to_ascii_lowercase(); let value = value.trim().to_string(); match name.as_str() { "if-modified-since" => if_modified_since = Some(value), "if-none-match" => if_none_match = Some(value), "authorization" => authorization = Some(value), "content-length" => content_len = Some(value), _ => {} } } let mut body = None; if let Some(con_len) = content_len { let len = con_len.parse::().unwrap(); let mut content = vec![0u8; len as usize]; buf.read_exact(&mut content).unwrap(); body = Some(content) } let req = Request { authorization, if_modified_since, if_none_match, method, url, body, }; println!("req: {:#?}", req); let response = self.route(&req); let buf = buf.get_mut(); write!(buf, "HTTP/1.1 {}\r\n", response.code).unwrap(); write!(buf, "Content-Length: {}\r\n", response.body.len()).unwrap(); write!(buf, "Connection: close\r\n").unwrap(); for header in response.headers { write!(buf, "{}\r\n", header).unwrap(); } write!(buf, "\r\n").unwrap(); buf.write_all(&response.body).unwrap(); buf.flush().unwrap(); } } fn check_authorized(&self, req: &Request, mutation: Option>) -> bool { let (private_key, private_key_subject) = if mutation.is_some() || self.auth_required { match &self.token { Token::Plaintext(token) => return Some(token) == req.authorization.as_ref(), Token::Keys(private_key, private_key_subject) => { (private_key.as_str(), private_key_subject) } } } else { assert!(req.authorization.is_none(), "unexpected token"); return true; }; macro_rules! t { ($e:expr) => { match $e { Some(e) => e, None => return false, } }; } let secret: AsymmetricSecretKey = private_key.try_into().unwrap(); let public: AsymmetricPublicKey = (&secret).try_into().unwrap(); let pub_key_id: pasetors::paserk::Id = (&public).into(); let mut paserk_pub_key_id = String::new(); FormatAsPaserk::fmt(&pub_key_id, &mut paserk_pub_key_id).unwrap(); // https://github.com/rust-lang/rfcs/blob/master/text/3231-cargo-asymmetric-tokens.md#how-the-registry-server-will-validate-an-asymmetric-token // - The PASETO is in v3.public format. let authorization = t!(&req.authorization); let untrusted_token = t!( UntrustedToken::::try_from(authorization) .ok() ); // - The PASETO validates using the public key it looked up based on the key ID. #[derive(serde::Deserialize, Debug)] struct Footer<'a> { url: &'a str, kip: &'a str, } let footer: Footer<'_> = t!(serde_json::from_slice(untrusted_token.untrusted_footer()).ok()); if footer.kip != paserk_pub_key_id { return false; } let trusted_token = t!( pasetors::version3::PublicToken::verify(&public, &untrusted_token, None, None,) .ok() ); // - The URL matches the registry base URL if footer.url != "https://github.com/rust-lang/crates.io-index" && footer.url != &format!("sparse+http://{}/index/", self.addr.to_string()) { return false; } // - The PASETO is still within its valid time period. #[derive(serde::Deserialize)] struct Message<'a> { iat: &'a str, sub: Option<&'a str>, mutation: Option<&'a str>, name: Option<&'a str>, vers: Option<&'a str>, cksum: Option<&'a str>, _challenge: Option<&'a str>, // todo: PASETO with challenges v: Option, } let message: Message<'_> = t!(serde_json::from_str(trusted_token.payload()).ok()); let token_time = t!(OffsetDateTime::parse(message.iat, &Rfc3339).ok()); let now = OffsetDateTime::now_utc(); if (now - token_time) > Duration::MINUTE { return false; } if private_key_subject.as_deref() != message.sub { return false; } // - If the claim v is set, that it has the value of 1. if let Some(v) = message.v { if v != 1 { return false; } } // - If the server issues challenges, that the challenge has not yet been answered. // todo: PASETO with challenges // - If the operation is a mutation: if let Some(mutation) = mutation { // - That the operation matches the mutation field and is one of publish, yank, or unyank. if message.mutation != Some(mutation.mutation) { return false; } // - That the package, and version match the request. if message.name != mutation.name { return false; } if message.vers != mutation.vers { return false; } // - If the mutation is publish, that the version has not already been published, and that the hash matches the request. if mutation.mutation == "publish" { if message.cksum != mutation.cksum { return false; } } } else { // - If the operation is a read, that the mutation field is not set. if message.mutation.is_some() || message.name.is_some() || message.vers.is_some() || message.cksum.is_some() { return false; } } true } /// Route the request fn route(&self, req: &Request) -> Response { // Check for custom responder if let Some(responder) = self.custom_responders.get(req.url.path()) { return responder(&req, self); } let path: Vec<_> = req.url.path()[1..].split('/').collect(); match (req.method.as_str(), path.as_slice()) { ("get", ["index", ..]) => { if !self.check_authorized(req, None) { self.unauthorized(req) } else { self.index(&req) } } ("get", ["dl", ..]) => { if !self.check_authorized(req, None) { self.unauthorized(req) } else { self.dl(&req) } } // publish ("put", ["api", "v1", "crates", "new"]) => self.check_authorized_publish(req), // The remainder of the operators in the test framework do nothing other than responding 'ok'. // // Note: We don't need to support anything real here because there are no tests that // currently require anything other than publishing via the http api. // yank / unyank ("delete" | "put", ["api", "v1", "crates", crate_name, version, mutation]) => { if !self.check_authorized( req, Some(Mutation { mutation, name: Some(crate_name), vers: Some(version), cksum: None, }), ) { self.unauthorized(req) } else { self.ok(&req) } } // owners ("get" | "put" | "delete", ["api", "v1", "crates", crate_name, "owners"]) => { if !self.check_authorized( req, Some(Mutation { mutation: "owners", name: Some(crate_name), vers: None, cksum: None, }), ) { self.unauthorized(req) } else { self.ok(&req) } } _ => self.not_found(&req), } } /// Unauthorized response pub fn unauthorized(&self, _req: &Request) -> Response { Response { code: 401, headers: vec![ r#"WWW-Authenticate: Cargo login_url="https://test-registry-login/me""#.to_string(), ], body: b"Unauthorized message from server.".to_vec(), } } /// Not found response pub fn not_found(&self, req: &Request) -> Response { (self.not_found_handler)(req, self) } /// Respond OK without doing anything pub fn ok(&self, _req: &Request) -> Response { Response { code: 200, headers: vec![], body: br#"{"ok": true, "msg": "completed!"}"#.to_vec(), } } /// Return an internal server error (HTTP 500) pub fn internal_server_error(&self, _req: &Request) -> Response { Response { code: 500, headers: vec![], body: br#"internal server error"#.to_vec(), } } /// Serve the download endpoint pub fn dl(&self, req: &Request) -> Response { let file = self .dl_path .join(req.url.path().strip_prefix("/dl/").unwrap()); println!("{}", file.display()); if !file.exists() { return self.not_found(req); } return Response { body: fs::read(&file).unwrap(), code: 200, headers: vec![], }; } /// Serve the registry index pub fn index(&self, req: &Request) -> Response { let file = self .registry_path .join(req.url.path().strip_prefix("/index/").unwrap()); if !file.exists() { return self.not_found(req); } else { // Now grab info about the file. let data = fs::read(&file).unwrap(); let etag = Sha256::new().update(&data).finish_hex(); let last_modified = format!("{:?}", file.metadata().unwrap().modified().unwrap()); // Start to construct our response: let mut any_match = false; let mut all_match = true; if let Some(expected) = &req.if_none_match { if &etag != expected { all_match = false; } else { any_match = true; } } if let Some(expected) = &req.if_modified_since { // NOTE: Equality comparison is good enough for tests. if &last_modified != expected { all_match = false; } else { any_match = true; } } if any_match && all_match { return Response { body: Vec::new(), code: 304, headers: vec![], }; } else { return Response { body: data, code: 200, headers: vec![ format!("ETag: \"{}\"", etag), format!("Last-Modified: {}", last_modified), ], }; } } } pub fn check_authorized_publish(&self, req: &Request) -> Response { if let Some(body) = &req.body { // Mimic the publish behavior for local registries by writing out the request // so tests can verify publishes made to either registry type. let path = self.api_path.join("api/v1/crates/new"); t!(fs::create_dir_all(path.parent().unwrap())); t!(fs::write(&path, body)); // Get the metadata of the package let (len, remaining) = body.split_at(4); let json_len = u32::from_le_bytes(len.try_into().unwrap()); let (json, remaining) = remaining.split_at(json_len as usize); let new_crate = serde_json::from_slice::(json).unwrap(); // Get the `.crate` file let (len, remaining) = remaining.split_at(4); let file_len = u32::from_le_bytes(len.try_into().unwrap()); let (file, _remaining) = remaining.split_at(file_len as usize); let file_cksum = cksum(&file); if !self.check_authorized( req, Some(Mutation { mutation: "publish", name: Some(&new_crate.name), vers: Some(&new_crate.vers), cksum: Some(&file_cksum), }), ) { return self.unauthorized(req); } let dst = self .dl_path .join(&new_crate.name) .join(&new_crate.vers) .join("download"); if self.delayed_index_update == 0 { save_new_crate(dst, new_crate, file, file_cksum, &self.registry_path); } else { let delayed_index_update = self.delayed_index_update; let registry_path = self.registry_path.clone(); let file = Vec::from(file); thread::spawn(move || { thread::sleep(std::time::Duration::new(delayed_index_update as u64, 0)); save_new_crate(dst, new_crate, &file, file_cksum, ®istry_path); }); } self.ok(&req) } else { Response { code: 400, headers: vec![], body: b"The request was missing a body".to_vec(), } } } } fn save_new_crate( dst: PathBuf, new_crate: crates_io::NewCrate, file: &[u8], file_cksum: String, registry_path: &Path, ) { // Write the `.crate` t!(fs::create_dir_all(dst.parent().unwrap())); t!(fs::write(&dst, file)); let deps = new_crate .deps .iter() .map(|dep| { let (name, package) = match &dep.explicit_name_in_toml { Some(explicit) => (explicit.to_string(), Some(dep.name.to_string())), None => (dep.name.to_string(), None), }; serde_json::json!({ "name": name, "req": dep.version_req, "features": dep.features, "default_features": dep.default_features, "target": dep.target, "optional": dep.optional, "kind": dep.kind, "registry": dep.registry, "package": package, "artifact": dep.artifact, "bindep_target": dep.bindep_target, "lib": dep.lib, }) }) .collect::>(); let line = create_index_line( serde_json::json!(new_crate.name), &new_crate.vers, deps, &file_cksum, new_crate.features, false, new_crate.links, new_crate.rust_version.as_deref(), None, ); write_to_index(registry_path, &new_crate.name, line, false); } impl Package { /// Creates a new package builder. /// Call `publish()` to finalize and build the package. pub fn new(name: &str, vers: &str) -> Package { let config = paths::cargo_home().join("config.toml"); if !config.exists() { init(); } Package { name: name.to_string(), vers: vers.to_string(), deps: Vec::new(), files: Vec::new(), yanked: false, features: BTreeMap::new(), local: false, alternative: false, invalid_index_line: false, index_line: None, edition: None, resolver: None, proc_macro: false, links: None, rust_version: None, cargo_features: Vec::new(), v: None, } } /// Call with `true` to publish in a "local registry". /// /// See `source-replacement.html#local-registry-sources` for more details /// on local registries. See `local_registry.rs` for the tests that use /// this. pub fn local(&mut self, local: bool) -> &mut Package { self.local = local; self } /// Call with `true` to publish in an "alternative registry". /// /// The name of the alternative registry is called "alternative". /// /// See `src/doc/src/reference/registries.md` for more details on /// alternative registries. See `alt_registry.rs` for the tests that use /// this. /// /// **Requires:** [`alt_init`] pub fn alternative(&mut self, alternative: bool) -> &mut Package { self.alternative = alternative; self } /// Adds a file to the package. pub fn file(&mut self, name: &str, contents: &str) -> &mut Package { self.file_with_mode(name, DEFAULT_MODE, contents) } /// Adds a file with a specific Unix mode. pub fn file_with_mode(&mut self, path: &str, mode: u32, contents: &str) -> &mut Package { self.files.push(PackageFile { path: path.to_string(), contents: EntryData::Regular(contents.into()), mode, extra: false, }); self } /// Adds a symlink to a path to the package. pub fn symlink(&mut self, dst: &str, src: &str) -> &mut Package { self.files.push(PackageFile { path: dst.to_string(), contents: EntryData::Symlink(src.into()), mode: DEFAULT_MODE, extra: false, }); self } /// Adds an "extra" file that is not rooted within the package. /// /// Normal files are automatically placed within a directory named /// `$PACKAGE-$VERSION`. This allows you to override that behavior, /// typically for testing invalid behavior. pub fn extra_file(&mut self, path: &str, contents: &str) -> &mut Package { self.files.push(PackageFile { path: path.to_string(), contents: EntryData::Regular(contents.to_string()), mode: DEFAULT_MODE, extra: true, }); self } /// Adds a normal dependency. Example: /// ```toml /// [dependencies] /// foo = {version = "1.0"} /// ``` pub fn dep(&mut self, name: &str, vers: &str) -> &mut Package { self.add_dep(&Dependency::new(name, vers)) } /// Adds a dependency with the given feature. Example: /// ```toml /// [dependencies] /// foo = {version = "1.0", "features": ["feat1", "feat2"]} /// ``` pub fn feature_dep(&mut self, name: &str, vers: &str, features: &[&str]) -> &mut Package { self.add_dep(Dependency::new(name, vers).enable_features(features)) } /// Adds a platform-specific dependency. Example: /// ```toml /// [target.'cfg(windows)'.dependencies] /// foo = {version = "1.0"} /// ``` pub fn target_dep(&mut self, name: &str, vers: &str, target: &str) -> &mut Package { self.add_dep(Dependency::new(name, vers).target(target)) } /// Adds a dependency to the alternative registry. pub fn registry_dep(&mut self, name: &str, vers: &str) -> &mut Package { self.add_dep(Dependency::new(name, vers).registry("alternative")) } /// Adds a dev-dependency. Example: /// ```toml /// [dev-dependencies] /// foo = {version = "1.0"} /// ``` pub fn dev_dep(&mut self, name: &str, vers: &str) -> &mut Package { self.add_dep(Dependency::new(name, vers).dev()) } /// Adds a build-dependency. Example: /// ```toml /// [build-dependencies] /// foo = {version = "1.0"} /// ``` pub fn build_dep(&mut self, name: &str, vers: &str) -> &mut Package { self.add_dep(Dependency::new(name, vers).build()) } pub fn add_dep(&mut self, dep: &Dependency) -> &mut Package { self.deps.push(dep.clone()); self } /// Specifies whether or not the package is "yanked". pub fn yanked(&mut self, yanked: bool) -> &mut Package { self.yanked = yanked; self } /// Specifies `package.edition` pub fn edition(&mut self, edition: &str) -> &mut Package { self.edition = Some(edition.to_owned()); self } /// Specifies `package.resolver` pub fn resolver(&mut self, resolver: &str) -> &mut Package { self.resolver = Some(resolver.to_owned()); self } /// Specifies whether or not this is a proc macro. pub fn proc_macro(&mut self, proc_macro: bool) -> &mut Package { self.proc_macro = proc_macro; self } /// Adds an entry in the `[features]` section. pub fn feature(&mut self, name: &str, deps: &[&str]) -> &mut Package { let deps = deps.iter().map(|s| s.to_string()).collect(); self.features.insert(name.to_string(), deps); self } /// Specify a minimal Rust version. pub fn rust_version(&mut self, rust_version: &str) -> &mut Package { self.rust_version = Some(rust_version.into()); self } /// Causes the JSON line emitted in the index to be invalid, presumably /// causing Cargo to skip over this version. pub fn invalid_index_line(&mut self, invalid: bool) -> &mut Package { self.invalid_index_line = invalid; self } /// Override the auto-generated index line /// /// This can give more control over error cases than [`Package::invalid_index_line`] pub fn index_line(&mut self, line: &str) -> &mut Package { self.index_line = Some(line.to_owned()); self } pub fn links(&mut self, links: &str) -> &mut Package { self.links = Some(links.to_string()); self } pub fn cargo_feature(&mut self, feature: &str) -> &mut Package { self.cargo_features.push(feature.to_owned()); self } /// Sets the index schema version for this package. /// /// See `cargo::sources::registry::IndexPackage` for more information. pub fn schema_version(&mut self, version: u32) -> &mut Package { self.v = Some(version); self } /// Creates the package and place it in the registry. /// /// This does not actually use Cargo's publishing system, but instead /// manually creates the entry in the registry on the filesystem. /// /// Returns the checksum for the package. pub fn publish(&self) -> String { self.make_archive(); // Figure out what we're going to write into the index. let deps = self .deps .iter() .map(|dep| { // In the index, the `registry` is null if it is from the same registry. // In Cargo.toml, it is None if it is from crates.io. let registry_url = match (self.alternative, dep.registry.as_deref()) { (false, None) => None, (false, Some("alternative")) => Some(alt_registry_url().to_string()), (true, None) => { Some("https://github.com/rust-lang/crates.io-index".to_string()) } (true, Some("alternative")) => None, _ => panic!("registry_dep currently only supports `alternative`"), }; let artifact = if let Some(artifact) = &dep.artifact { serde_json::json!([artifact]) } else { serde_json::json!(null) }; serde_json::json!({ "name": dep.name, "req": dep.vers, "features": dep.features, "default_features": dep.default_features, "target": dep.target, "artifact": artifact, "bindep_target": dep.bindep_target, "lib": dep.lib, "optional": dep.optional, "kind": dep.kind, "registry": registry_url, "package": dep.package, "public": dep.public, }) }) .collect::>(); let cksum = { let c = t!(fs::read(&self.archive_dst())); cksum(&c) }; let line = if let Some(line) = self.index_line.clone() { line } else { let name = if self.invalid_index_line { serde_json::json!(1) } else { serde_json::json!(self.name) }; create_index_line( name, &self.vers, deps, &cksum, self.features.clone(), self.yanked, self.links.clone(), self.rust_version.as_deref(), self.v, ) }; let registry_path = if self.alternative { alt_registry_path() } else { registry_path() }; write_to_index(®istry_path, &self.name, line, self.local); cksum } fn make_archive(&self) { let dst = self.archive_dst(); t!(fs::create_dir_all(dst.parent().unwrap())); let f = t!(File::create(&dst)); let mut a = Builder::new(GzEncoder::new(f, Compression::none())); a.sparse(false); if !self .files .iter() .any(|PackageFile { path, .. }| path == "Cargo.toml") { self.append_manifest(&mut a); } if self.files.is_empty() { self.append( &mut a, "src/lib.rs", DEFAULT_MODE, &EntryData::Regular("".into()), ); } else { for PackageFile { path, contents, mode, extra, } in &self.files { if *extra { self.append_raw(&mut a, path, *mode, contents); } else { self.append(&mut a, path, *mode, contents); } } } } fn append_manifest(&self, ar: &mut Builder) { let mut manifest = String::new(); if !self.cargo_features.is_empty() { let mut features = String::new(); serde::Serialize::serialize( &self.cargo_features, toml::ser::ValueSerializer::new(&mut features), ) .unwrap(); manifest.push_str(&format!("cargo-features = {}\n\n", features)); } manifest.push_str(&format!( r#" [package] name = "{}" version = "{}" authors = [] "#, self.name, self.vers )); if let Some(version) = &self.rust_version { manifest.push_str(&format!("rust-version = \"{}\"\n", version)); } if let Some(edition) = &self.edition { manifest.push_str(&format!("edition = \"{}\"\n", edition)); } if let Some(resolver) = &self.resolver { manifest.push_str(&format!("resolver = \"{}\"\n", resolver)); } if !self.features.is_empty() { let features: Vec = self .features .iter() .map(|(feature, features)| { if features.is_empty() { format!("{} = []", feature) } else { format!( "{} = [{}]", feature, features .iter() .map(|s| format!("\"{}\"", s)) .collect::>() .join(", ") ) } }) .collect(); manifest.push_str(&format!("\n[features]\n{}", features.join("\n"))); } for dep in self.deps.iter() { let target = match dep.target { None => String::new(), Some(ref s) => format!("target.'{}'.", s), }; let kind = match &dep.kind[..] { "build" => "build-", "dev" => "dev-", _ => "", }; manifest.push_str(&format!( r#" [{}{}dependencies.{}] version = "{}" "#, target, kind, dep.name, dep.vers )); if dep.optional { manifest.push_str("optional = true\n"); } if let Some(artifact) = &dep.artifact { manifest.push_str(&format!("artifact = \"{}\"\n", artifact)); } if let Some(target) = &dep.bindep_target { manifest.push_str(&format!("target = \"{}\"\n", target)); } if dep.lib { manifest.push_str("lib = true\n"); } if let Some(registry) = &dep.registry { assert_eq!(registry, "alternative"); manifest.push_str(&format!("registry-index = \"{}\"", alt_registry_url())); } if !dep.default_features { manifest.push_str("default-features = false\n"); } if !dep.features.is_empty() { let mut features = String::new(); serde::Serialize::serialize( &dep.features, toml::ser::ValueSerializer::new(&mut features), ) .unwrap(); manifest.push_str(&format!("features = {}\n", features)); } if let Some(package) = &dep.package { manifest.push_str(&format!("package = \"{}\"\n", package)); } } if self.proc_macro { manifest.push_str("[lib]\nproc-macro = true\n"); } self.append( ar, "Cargo.toml", DEFAULT_MODE, &EntryData::Regular(manifest.into()), ); } fn append(&self, ar: &mut Builder, file: &str, mode: u32, contents: &EntryData) { self.append_raw( ar, &format!("{}-{}/{}", self.name, self.vers, file), mode, contents, ); } fn append_raw( &self, ar: &mut Builder, path: &str, mode: u32, contents: &EntryData, ) { let mut header = Header::new_ustar(); let contents = match contents { EntryData::Regular(contents) => contents.as_str(), EntryData::Symlink(src) => { header.set_entry_type(tar::EntryType::Symlink); t!(header.set_link_name(src)); "" // Symlink has no contents. } }; header.set_size(contents.len() as u64); t!(header.set_path(path)); header.set_mode(mode); header.set_cksum(); t!(ar.append(&header, contents.as_bytes())); } /// Returns the path to the compressed package file. pub fn archive_dst(&self) -> PathBuf { if self.local { let path = if self.alternative { alt_registry_path() } else { registry_path() }; path.join(format!("{}-{}.crate", self.name, self.vers)) } else if self.alternative { alt_dl_path() .join(&self.name) .join(&self.vers) .join("download") } else { dl_path().join(&self.name).join(&self.vers).join("download") } } } /// Generate a checksum pub fn cksum(s: &[u8]) -> String { Sha256::new().update(s).finish_hex() } impl Dependency { pub fn new(name: &str, vers: &str) -> Dependency { Dependency { name: name.to_string(), vers: vers.to_string(), kind: "normal".to_string(), artifact: None, bindep_target: None, lib: false, target: None, features: Vec::new(), package: None, optional: false, registry: None, default_features: true, public: false, } } /// Changes this to `[build-dependencies]`. pub fn build(&mut self) -> &mut Self { self.kind = "build".to_string(); self } /// Changes this to `[dev-dependencies]`. pub fn dev(&mut self) -> &mut Self { self.kind = "dev".to_string(); self } /// Changes this to `[target.$target.dependencies]`. pub fn target(&mut self, target: &str) -> &mut Self { self.target = Some(target.to_string()); self } /// Change the artifact to be of the given kind, like "bin", or "staticlib", /// along with a specific target triple if provided. pub fn artifact(&mut self, kind: &str, target: Option) -> &mut Self { self.artifact = Some(kind.to_string()); self.bindep_target = target; self } /// Adds `registry = $registry` to this dependency. pub fn registry(&mut self, registry: &str) -> &mut Self { self.registry = Some(registry.to_string()); self } /// Adds `features = [ ... ]` to this dependency. pub fn enable_features(&mut self, features: &[&str]) -> &mut Self { self.features.extend(features.iter().map(|s| s.to_string())); self } /// Adds `package = ...` to this dependency. pub fn package(&mut self, pkg: &str) -> &mut Self { self.package = Some(pkg.to_string()); self } /// Changes this to an optional dependency. pub fn optional(&mut self, optional: bool) -> &mut Self { self.optional = optional; self } /// Changes this to an public dependency. pub fn public(&mut self, public: bool) -> &mut Self { self.public = public; self } /// Adds `default-features = false` if the argument is `false`. pub fn default_features(&mut self, default_features: bool) -> &mut Self { self.default_features = default_features; self } } cargo-test-support-0.7.3/src/tools.rs000064400000000000000000000112021046102023000157060ustar 00000000000000//! Common executables that can be reused by various tests. use crate::{basic_manifest, paths, project, Project}; use std::path::{Path, PathBuf}; use std::sync::Mutex; use std::sync::OnceLock; static ECHO_WRAPPER: OnceLock>> = OnceLock::new(); static ECHO: OnceLock>> = OnceLock::new(); static CLIPPY_DRIVER: OnceLock>> = OnceLock::new(); /// Returns the path to an executable that works as a wrapper around rustc. /// /// The wrapper will echo the command line it was called with to stderr. pub fn echo_wrapper() -> PathBuf { let mut lock = ECHO_WRAPPER .get_or_init(|| Default::default()) .lock() .unwrap(); if let Some(path) = &*lock { return path.clone(); } let p = project() .at(paths::global_root().join("rustc-echo-wrapper")) .file("Cargo.toml", &basic_manifest("rustc-echo-wrapper", "1.0.0")) .file( "src/main.rs", r#" use std::fs::read_to_string; use std::path::PathBuf; fn main() { // Handle args from `@path` argfile for rustc let args = std::env::args() .flat_map(|p| if let Some(p) = p.strip_prefix("@") { read_to_string(p).unwrap().lines().map(String::from).collect() } else { vec![p] }) .collect::>(); eprintln!("WRAPPER CALLED: {}", args[1..].join(" ")); let status = std::process::Command::new(&args[1]) .args(&args[2..]).status().unwrap(); std::process::exit(status.code().unwrap_or(1)); } "#, ) .build(); p.cargo("build").run(); let path = p.bin("rustc-echo-wrapper"); *lock = Some(path.clone()); path } /// Returns the path to an executable that prints its arguments. /// /// Do not expect this to be anything fancy. pub fn echo() -> PathBuf { let mut lock = ECHO.get_or_init(|| Default::default()).lock().unwrap(); if let Some(path) = &*lock { return path.clone(); } if let Ok(path) = cargo_util::paths::resolve_executable(Path::new("echo")) { *lock = Some(path.clone()); return path; } // Often on Windows, `echo` is not available. let p = project() .at(paths::global_root().join("basic-echo")) .file("Cargo.toml", &basic_manifest("basic-echo", "1.0.0")) .file( "src/main.rs", r#" fn main() { let mut s = String::new(); let mut it = std::env::args().skip(1).peekable(); while let Some(n) = it.next() { s.push_str(&n); if it.peek().is_some() { s.push(' '); } } println!("{}", s); } "#, ) .build(); p.cargo("build").run(); let path = p.bin("basic-echo"); *lock = Some(path.clone()); path } /// Returns a project which builds a cargo-echo simple subcommand pub fn echo_subcommand() -> Project { let p = project() .at("cargo-echo") .file("Cargo.toml", &basic_manifest("cargo-echo", "0.0.1")) .file( "src/main.rs", r#" fn main() { let args: Vec<_> = ::std::env::args().skip(1).collect(); println!("{}", args.join(" ")); } "#, ) .build(); p.cargo("build").run(); p } /// A wrapper around `rustc` instead of calling `clippy`. pub fn wrapped_clippy_driver() -> PathBuf { let mut lock = CLIPPY_DRIVER .get_or_init(|| Default::default()) .lock() .unwrap(); if let Some(path) = &*lock { return path.clone(); } let clippy_driver = project() .at(paths::global_root().join("clippy-driver")) .file("Cargo.toml", &basic_manifest("clippy-driver", "0.0.1")) .file( "src/main.rs", r#" fn main() { let mut args = std::env::args_os(); let _me = args.next().unwrap(); let rustc = args.next().unwrap(); let status = std::process::Command::new(rustc).args(args).status().unwrap(); std::process::exit(status.code().unwrap_or(1)); } "#, ) .build(); clippy_driver.cargo("build").run(); let path = clippy_driver.bin("clippy-driver"); *lock = Some(path.clone()); path }