tokei-13.0.0/.cargo_vcs_info.json0000644000000001360000000000100122300ustar { "git": { "sha1": "6f3556bd664c6483661d432a1270b1d2a69b82ec" }, "path_in_vcs": "" }tokei-13.0.0/Cargo.lock0000644000001360510000000000100102110ustar # This file is automatically @generated by Cargo. # It is not intended for manual editing. version = 3 [[package]] name = "aho-corasick" version = "1.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916" dependencies = [ "memchr", ] [[package]] name = "android-tzdata" version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e999941b234f3131b00bc13c22d06e8c5ff726d1b6318ac7eb276997bbb4fef0" [[package]] name = "android_system_properties" version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311" dependencies = [ "libc", ] [[package]] name = "anstream" version = "0.6.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "64e15c1ab1f89faffbf04a634d5e1962e9074f2741eef6d97f3c4e322426d526" dependencies = [ "anstyle", "anstyle-parse", "anstyle-query", "anstyle-wincon", "colorchoice", "is_terminal_polyfill", "utf8parse", ] [[package]] name = "anstyle" version = "1.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1bec1de6f59aedf83baf9ff929c98f2ad654b97c9510f4e70cf6f661d49fd5b1" [[package]] name = "anstyle-parse" version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "eb47de1e80c2b463c735db5b217a0ddc39d612e7ac9e2e96a5aed1f57616c1cb" dependencies = [ "utf8parse", ] [[package]] name = "anstyle-query" version = "1.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6d36fc52c7f6c869915e99412912f22093507da8d9e942ceaf66fe4b7c14422a" dependencies = [ "windows-sys 0.52.0", ] [[package]] name = "anstyle-wincon" version = "3.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5bf74e1b6e971609db8ca7a9ce79fd5768ab6ae46441c572e46cf596f59e57f8" dependencies = [ "anstyle", "windows-sys 0.52.0", ] [[package]] name = "arbitrary" version = "1.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7d5a26814d8dcb93b0e5a0ff3c6d80a8843bafb21b39e8e18a6f05471870e110" dependencies = [ "derive_arbitrary", ] [[package]] name = "arrayvec" version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50" [[package]] name = "autocfg" version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0c4b4d0bd25bd0b74681c0ad21497610ce1b7c91b1022cd21c80c6fbdd9476b0" [[package]] name = "bit-set" version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0700ddab506f33b20a03b13996eccd309a48e5ff77d0d95926aa0210fb4e95f1" dependencies = [ "bit-vec", ] [[package]] name = "bit-vec" version = "0.6.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "349f9b6a179ed607305526ca489b34ad0a41aed5f7980fa90eb03160b69598fb" [[package]] name = "bitflags" version = "2.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b048fb63fd8b5923fc5aa7b340d8e156aec7ec02f0c78fa8a6ddc2613f6f71de" [[package]] name = "block-buffer" version = "0.10.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71" dependencies = [ "generic-array", ] [[package]] name = "bstr" version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "40723b8fb387abc38f4f4a37c09073622e41dd12327033091ef8950659e6dc0c" dependencies = [ "memchr", "serde", ] [[package]] name = "bumpalo" version = "3.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "79296716171880943b8470b5f8d03aa55eb2e645a4874bdbb28adb49162e012c" [[package]] name = "byteorder" version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" [[package]] name = "cc" version = "1.1.13" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "72db2f7947ecee9b03b510377e8bb9077afa27176fdbff55c51027e976fdcc48" dependencies = [ "jobserver", "libc", "shlex", ] [[package]] name = "cfg-if" version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" [[package]] name = "chrono" version = "0.4.38" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a21f936df1771bf62b77f047b726c4625ff2e8aa607c01ec06e5a05bd8463401" dependencies = [ "android-tzdata", "iana-time-zone", "num-traits", "windows-targets 0.52.6", ] [[package]] name = "chrono-tz" version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "93698b29de5e97ad0ae26447b344c482a7284c737d9ddc5f9e52b74a336671bb" dependencies = [ "chrono", "chrono-tz-build", "phf", ] [[package]] name = "chrono-tz-build" version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0c088aee841df9c3041febbb73934cfc39708749bf96dc827e3359cd39ef11b1" dependencies = [ "parse-zoneinfo", "phf", "phf_codegen", ] [[package]] name = "clap" version = "4.5.16" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ed6719fffa43d0d87e5fd8caeab59be1554fb028cd30edc88fc4369b17971019" dependencies = [ "clap_builder", "clap_derive", ] [[package]] name = "clap-cargo" version = "0.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "38ae55615695e768a76899c8411b4ebacfbe525e964f94fd24f0007b10b45cd3" dependencies = [ "anstyle", "clap", ] [[package]] name = "clap_builder" version = "4.5.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "216aec2b177652e3846684cbfe25c9964d18ec45234f0f5da5157b207ed1aab6" dependencies = [ "anstream", "anstyle", "clap_lex", "strsim", "terminal_size", ] [[package]] name = "clap_derive" version = "4.5.13" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "501d359d5f3dcaf6ecdeee48833ae73ec6e42723a1e52419c79abf9507eec0a0" dependencies = [ "heck", "proc-macro2", "quote", "syn", ] [[package]] name = "clap_lex" version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1462739cb27611015575c0c11df5df7601141071f07518d56fcc1be504cbec97" [[package]] name = "colorchoice" version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d3fd119d74b830634cea2a0f58bbd0d54540518a14397557951e79340abc28c0" [[package]] name = "colored" version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cbf2150cce219b664a8a70df7a1f933836724b503f8a413af9365b4dcc4d90b8" dependencies = [ "lazy_static", "windows-sys 0.48.0", ] [[package]] name = "core-foundation-sys" version = "0.8.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b" [[package]] name = "cpufeatures" version = "0.2.13" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "51e852e6dc9a5bed1fae92dd2375037bf2b768725bf3be87811edee3249d09ad" dependencies = [ "libc", ] [[package]] name = "crossbeam-channel" version = "0.5.13" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "33480d6946193aa8033910124896ca395333cae7e2d1113d1fef6c3272217df2" dependencies = [ "crossbeam-utils", ] [[package]] name = "crossbeam-deque" version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "613f8cc01fe9cf1a3eb3d7f488fd2fa8388403e97039e2f73692932e291a770d" dependencies = [ "crossbeam-epoch", "crossbeam-utils", ] [[package]] name = "crossbeam-epoch" version = "0.9.18" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5b82ac4a3c2ca9c3460964f020e1402edd5753411d7737aa39c3714ad1b5420e" dependencies = [ "crossbeam-utils", ] [[package]] name = "crossbeam-utils" version = "0.8.20" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "22ec99545bb0ed0ea7bb9b8e1e9122ea386ff8a48c0922e43f36d45ab09e0e80" [[package]] name = "crypto-common" version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" dependencies = [ "generic-array", "typenum", ] [[package]] name = "dashmap" version = "6.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "804c8821570c3f8b70230c2ba75ffa5c0f9a4189b9a432b6656c536712acae28" dependencies = [ "cfg-if", "crossbeam-utils", "hashbrown", "lock_api", "once_cell", "parking_lot_core", "serde", ] [[package]] name = "derive_arbitrary" version = "1.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "67e77553c4162a157adbf834ebae5b415acbecbeafc7a74b0e886657506a7611" dependencies = [ "proc-macro2", "quote", "syn", ] [[package]] name = "deunicode" version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "339544cc9e2c4dc3fc7149fd630c5f22263a4fdf18a98afd0075784968b5cf00" [[package]] name = "digest" version = "0.10.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" dependencies = [ "block-buffer", "crypto-common", ] [[package]] name = "either" version = "1.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "60b1af1c220855b6ceac025d3f6ecdd2b7c4894bfe9cd9bda4fbb4bc7c0d4cf0" [[package]] name = "encoding_rs" version = "0.8.34" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b45de904aa0b010bce2ab45264d0631681847fa7b6f2eaa7dab7619943bc4f59" dependencies = [ "cfg-if", ] [[package]] name = "encoding_rs_io" version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1cc3c5651fb62ab8aa3103998dade57efdd028544bd300516baa31840c252a83" dependencies = [ "encoding_rs", ] [[package]] name = "env_filter" version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4f2c92ceda6ceec50f43169f9ee8424fe2db276791afde7b2cd8bc084cb376ab" dependencies = [ "log", "regex", ] [[package]] name = "env_logger" version = "0.11.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e13fa619b91fb2381732789fc5de83b45675e882f66623b7d8cb4f643017018d" dependencies = [ "anstream", "anstyle", "env_filter", "humantime", "log", ] [[package]] name = "equivalent" version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" [[package]] name = "errno" version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "534c5cf6194dfab3db3242765c03bbe257cf92f22b38f6bc0c58d59108a820ba" dependencies = [ "libc", "windows-sys 0.52.0", ] [[package]] name = "etcetera" version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "136d1b5283a1ab77bd9257427ffd09d8667ced0570b6f938942bc7568ed5b943" dependencies = [ "cfg-if", "home", "windows-sys 0.48.0", ] [[package]] name = "fastrand" version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9fc0510504f03c51ada170672ac806f1f105a88aa97a5281117e1ddc3368e51a" [[package]] name = "fnv" version = "1.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" [[package]] name = "form_urlencoded" version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456" dependencies = [ "percent-encoding", ] [[package]] name = "generic-array" version = "0.14.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" dependencies = [ "typenum", "version_check", ] [[package]] name = "getrandom" version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c4567c8db10ae91089c99af84c68c38da3ec2f087c3f82960bcdbf3656b6f4d7" dependencies = [ "cfg-if", "libc", "wasi", ] [[package]] name = "git2" version = "0.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b903b73e45dc0c6c596f2d37eccece7c1c8bb6e4407b001096387c63d0d93724" dependencies = [ "bitflags", "libc", "libgit2-sys", "log", "url", ] [[package]] name = "globset" version = "0.4.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "57da3b9b5b85bd66f31093f8c408b90a74431672542466497dcbdfdc02034be1" dependencies = [ "aho-corasick", "bstr", "log", "regex-automata", "regex-syntax", ] [[package]] name = "globwalk" version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0bf760ebf69878d9fd8f110c89703d90ce35095324d1f1edcb595c63945ee757" dependencies = [ "bitflags", "ignore", "walkdir", ] [[package]] name = "grep-matcher" version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "47a3141a10a43acfedc7c98a60a834d7ba00dfe7bec9071cbfc19b55b292ac02" dependencies = [ "memchr", ] [[package]] name = "grep-searcher" version = "0.1.13" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ba536ae4f69bec62d8839584dd3153d3028ef31bb229f04e09fb5a9e5a193c54" dependencies = [ "bstr", "encoding_rs", "encoding_rs_io", "grep-matcher", "log", "memchr", "memmap2", ] [[package]] name = "half" version = "1.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1b43ede17f21864e81be2fa654110bf1e793774238d86ef8555c37e6519c0403" [[package]] name = "hashbrown" version = "0.14.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" [[package]] name = "heck" version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" [[package]] name = "hex" version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" [[package]] name = "home" version = "0.5.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e3d1354bf6b7235cb4a0576c2619fd4ed18183f689b12b006a0ee7329eeff9a5" dependencies = [ "windows-sys 0.52.0", ] [[package]] name = "humansize" version = "2.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6cb51c9a029ddc91b07a787f1d86b53ccfa49b0e86688c946ebe8d3555685dd7" dependencies = [ "libm", ] [[package]] name = "humantime" version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4" [[package]] name = "iana-time-zone" version = "0.1.60" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e7ffbb5a1b541ea2561f8c41c087286cc091e21e556a4f09a8f6cbf17b69b141" dependencies = [ "android_system_properties", "core-foundation-sys", "iana-time-zone-haiku", "js-sys", "wasm-bindgen", "windows-core", ] [[package]] name = "iana-time-zone-haiku" version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f31827a206f56af32e590ba56d5d2d085f558508192593743f16b2306495269f" dependencies = [ "cc", ] [[package]] name = "idna" version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "634d9b1461af396cad843f47fdba5597a4f9e6ddd4bfb6ff5d85028c25cb12f6" dependencies = [ "unicode-bidi", "unicode-normalization", ] [[package]] name = "ignore" version = "0.4.22" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b46810df39e66e925525d6e38ce1e7f6e1d208f72dc39757880fcb66e2c58af1" dependencies = [ "crossbeam-deque", "globset", "log", "memchr", "regex-automata", "same-file", "walkdir", "winapi-util", ] [[package]] name = "indexmap" version = "2.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "93ead53efc7ea8ed3cfb0c79fc8023fbb782a5432b52830b6518941cebe6505c" dependencies = [ "equivalent", "hashbrown", ] [[package]] name = "is_terminal_polyfill" version = "1.70.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7943c866cc5cd64cbc25b2e01621d07fa8eb2a1a23160ee81ce38704e97b8ecf" [[package]] name = "itertools" version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b1c173a5686ce8bfa551b3563d0c2170bf24ca44da99c7ca4bfdab5418c3fe57" dependencies = [ "either", ] [[package]] name = "itoa" version = "1.0.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "49f1f14873335454500d59611f1cf4a4b0f786f9ac11f4312a78e4cf2566695b" [[package]] name = "jobserver" version = "0.1.32" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "48d1dbcbbeb6a7fec7e059840aa538bd62aaccf972c7346c4d9d2059312853d0" dependencies = [ "libc", ] [[package]] name = "js-sys" version = "0.3.70" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1868808506b929d7b0cfa8f75951347aa71bb21144b7791bae35d9bccfcfe37a" dependencies = [ "wasm-bindgen", ] [[package]] name = "json5" version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "96b0db21af676c1ce64250b5f40f3ce2cf27e4e47cb91ed91eb6fe9350b430c1" dependencies = [ "pest", "pest_derive", "serde", ] [[package]] name = "lazy_static" version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" [[package]] name = "libc" version = "0.2.156" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a5f43f184355eefb8d17fc948dbecf6c13be3c141f20d834ae842193a448c72a" [[package]] name = "libgit2-sys" version = "0.17.0+1.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "10472326a8a6477c3c20a64547b0059e4b0d086869eee31e6d7da728a8eb7224" dependencies = [ "cc", "libc", "libz-sys", "pkg-config", ] [[package]] name = "libm" version = "0.2.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4ec2a862134d2a7d32d7983ddcdd1c4923530833c9f2ea1a44fc5fa473989058" [[package]] name = "libz-sys" version = "1.1.19" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fdc53a7799a7496ebc9fd29f31f7df80e83c9bda5299768af5f9e59eeea74647" dependencies = [ "cc", "libc", "pkg-config", "vcpkg", ] [[package]] name = "linux-raw-sys" version = "0.4.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "78b3ae25bc7c8c38cec158d1f2757ee79e9b3740fbc7ccf0e59e4b08d793fa89" [[package]] name = "lock_api" version = "0.4.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "07af8b9cdd281b7915f413fa73f29ebd5d55d0d3f0155584dade1ff18cea1b17" dependencies = [ "autocfg", "scopeguard", ] [[package]] name = "log" version = "0.4.22" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a7a70ba024b9dc04c27ea2f0c0548feb474ec5c54bba33a7f72f873a39d07b24" [[package]] name = "memchr" version = "2.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3" [[package]] name = "memmap2" version = "0.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fe751422e4a8caa417e13c3ea66452215d7d63e19e604f4980461212f3ae1322" dependencies = [ "libc", ] [[package]] name = "num-format" version = "0.4.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a652d9771a63711fd3c3deb670acfbe5c30a4072e664d7a3bf5a9e1056ac72c3" dependencies = [ "arrayvec", "itoa", ] [[package]] name = "num-traits" version = "0.2.19" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841" dependencies = [ "autocfg", "libm", ] [[package]] name = "once_cell" version = "1.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92" [[package]] name = "parking_lot" version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f1bf18183cf54e8d6059647fc3063646a1801cf30896933ec2311622cc4b9a27" dependencies = [ "lock_api", "parking_lot_core", ] [[package]] name = "parking_lot_core" version = "0.9.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1e401f977ab385c9e4e3ab30627d6f26d00e2c73eef317493c4ec6d468726cf8" dependencies = [ "cfg-if", "libc", "redox_syscall", "smallvec", "windows-targets 0.52.6", ] [[package]] name = "parse-zoneinfo" version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1f2a05b18d44e2957b88f96ba460715e295bc1d7510468a2f3d3b44535d26c24" dependencies = [ "regex", ] [[package]] name = "percent-encoding" version = "2.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" [[package]] name = "pest" version = "2.7.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cd53dff83f26735fdc1ca837098ccf133605d794cdae66acfc2bfac3ec809d95" dependencies = [ "memchr", "thiserror", "ucd-trie", ] [[package]] name = "pest_derive" version = "2.7.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2a548d2beca6773b1c244554d36fcf8548a8a58e74156968211567250e48e49a" dependencies = [ "pest", "pest_generator", ] [[package]] name = "pest_generator" version = "2.7.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3c93a82e8d145725dcbaf44e5ea887c8a869efdcc28706df2d08c69e17077183" dependencies = [ "pest", "pest_meta", "proc-macro2", "quote", "syn", ] [[package]] name = "pest_meta" version = "2.7.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a941429fea7e08bedec25e4f6785b6ffaacc6b755da98df5ef3e7dcf4a124c4f" dependencies = [ "once_cell", "pest", "sha2", ] [[package]] name = "phf" version = "0.11.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ade2d8b8f33c7333b51bcf0428d37e217e9f32192ae4772156f65063b8ce03dc" dependencies = [ "phf_shared", ] [[package]] name = "phf_codegen" version = "0.11.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e8d39688d359e6b34654d328e262234662d16cc0f60ec8dcbe5e718709342a5a" dependencies = [ "phf_generator", "phf_shared", ] [[package]] name = "phf_generator" version = "0.11.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "48e4cc64c2ad9ebe670cb8fd69dd50ae301650392e81c05f9bfcb2d5bdbc24b0" dependencies = [ "phf_shared", "rand", ] [[package]] name = "phf_shared" version = "0.11.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "90fcb95eef784c2ac79119d1dd819e162b5da872ce6f3c3abe1e8ca1c082f72b" dependencies = [ "siphasher", ] [[package]] name = "pkg-config" version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d231b230927b5e4ad203db57bbcbee2802f6bce620b1e4a9024a07d94e2907ec" [[package]] name = "ppv-lite86" version = "0.2.20" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "77957b295656769bb8ad2b6a6b09d897d94f05c41b069aede1fcdaa675eaea04" dependencies = [ "zerocopy", ] [[package]] name = "proc-macro2" version = "1.0.86" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5e719e8df665df0d1c8fbfd238015744736151d4445ec0836b8e628aae103b77" dependencies = [ "unicode-ident", ] [[package]] name = "proptest" version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b4c2511913b88df1637da85cc8d96ec8e43a3f8bb8ccb71ee1ac240d6f3df58d" dependencies = [ "bit-set", "bit-vec", "bitflags", "lazy_static", "num-traits", "rand", "rand_chacha", "rand_xorshift", "regex-syntax", "rusty-fork", "tempfile", "unarray", ] [[package]] name = "quick-error" version = "1.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0" [[package]] name = "quote" version = "1.0.36" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0fa76aaf39101c457836aec0ce2316dbdc3ab723cdda1c6bd4e6ad4208acaca7" dependencies = [ "proc-macro2", ] [[package]] name = "rand" version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" dependencies = [ "libc", "rand_chacha", "rand_core", ] [[package]] name = "rand_chacha" version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" dependencies = [ "ppv-lite86", "rand_core", ] [[package]] name = "rand_core" version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" dependencies = [ "getrandom", ] [[package]] name = "rand_xorshift" version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d25bf25ec5ae4a3f1b92f929810509a2f53d7dca2f50b794ff57e3face536c8f" dependencies = [ "rand_core", ] [[package]] name = "rayon" version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b418a60154510ca1a002a752ca9714984e21e4241e804d32555251faf8b78ffa" dependencies = [ "either", "rayon-core", ] [[package]] name = "rayon-core" version = "1.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1465873a3dfdaa8ae7cb14b4383657caab0b3e8a0aa9ae8e04b044854c8dfce2" dependencies = [ "crossbeam-deque", "crossbeam-utils", ] [[package]] name = "redox_syscall" version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2a908a6e00f1fdd0dfd9c0eb08ce85126f6d8bbda50017e74bc4a4b7d4a926a4" dependencies = [ "bitflags", ] [[package]] name = "regex" version = "1.10.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4219d74c6b67a3654a9fbebc4b419e22126d13d2f3c4a07ee0cb61ff79a79619" dependencies = [ "aho-corasick", "memchr", "regex-automata", "regex-syntax", ] [[package]] name = "regex-automata" version = "0.4.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "38caf58cc5ef2fed281f89292ef23f6365465ed9a41b7a7754eb4e26496c92df" dependencies = [ "aho-corasick", "memchr", "regex-syntax", ] [[package]] name = "regex-syntax" version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7a66a03ae7c801facd77a29370b4faec201768915ac14a721ba36f20bc9c209b" [[package]] name = "rustix" version = "0.38.34" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "70dc5ec042f7a43c4a73241207cecc9873a06d45debb38b329f8541d85c2730f" dependencies = [ "bitflags", "errno", "libc", "linux-raw-sys", "windows-sys 0.52.0", ] [[package]] name = "rustversion" version = "1.0.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "955d28af4278de8121b7ebeb796b6a45735dc01436d898801014aced2773a3d6" [[package]] name = "rusty-fork" version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cb3dcc6e454c328bb824492db107ab7c0ae8fcffe4ad210136ef014458c1bc4f" dependencies = [ "fnv", "quick-error", "tempfile", "wait-timeout", ] [[package]] name = "ryu" version = "1.0.18" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f3cb5ba0dc43242ce17de99c180e96db90b235b8a9fdc9543c96d2209116bd9f" [[package]] name = "same-file" version = "1.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502" dependencies = [ "winapi-util", ] [[package]] name = "scopeguard" version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" [[package]] name = "serde" version = "1.0.208" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cff085d2cb684faa248efb494c39b68e522822ac0de72ccf08109abde717cfb2" dependencies = [ "serde_derive", ] [[package]] name = "serde_cbor" version = "0.11.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2bef2ebfde456fb76bbcf9f59315333decc4fda0b2b44b420243c11e0f5ec1f5" dependencies = [ "half", "serde", ] [[package]] name = "serde_derive" version = "1.0.208" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "24008e81ff7613ed8e5ba0cfaf24e2c2f1e5b8a0495711e44fcd4882fca62bcf" dependencies = [ "proc-macro2", "quote", "syn", ] [[package]] name = "serde_json" version = "1.0.125" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "83c8e735a073ccf5be70aa8066aa984eaf2fa000db6c8d0100ae605b366d31ed" dependencies = [ "itoa", "memchr", "ryu", "serde", ] [[package]] name = "serde_spanned" version = "0.6.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "eb5b1b31579f3811bf615c144393417496f152e12ac8b7663bf664f4a815306d" dependencies = [ "serde", ] [[package]] name = "serde_yaml" version = "0.9.34+deprecated" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6a8b1a1a2ebf674015cc02edccce75287f1a0130d394307b36743c2f5d504b47" dependencies = [ "indexmap", "itoa", "ryu", "serde", "unsafe-libyaml", ] [[package]] name = "sha2" version = "0.10.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8" dependencies = [ "cfg-if", "cpufeatures", "digest", ] [[package]] name = "shlex" version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" [[package]] name = "siphasher" version = "0.3.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "38b58827f4464d87d377d175e90bf58eb00fd8716ff0a62f80356b5e61555d0d" [[package]] name = "slug" version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "882a80f72ee45de3cc9a5afeb2da0331d58df69e4e7d8eeb5d3c7784ae67e724" dependencies = [ "deunicode", "wasm-bindgen", ] [[package]] name = "smallvec" version = "1.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67" [[package]] name = "strsim" version = "0.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" [[package]] name = "strum" version = "0.26.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8fec0f0aef304996cf250b31b5a10dee7980c85da9d759361292b8bca5a18f06" [[package]] name = "strum_macros" version = "0.26.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4c6bee85a5a24955dc440386795aa378cd9cf82acd5f764469152d2270e581be" dependencies = [ "heck", "proc-macro2", "quote", "rustversion", "syn", ] [[package]] name = "syn" version = "2.0.74" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1fceb41e3d546d0bd83421d3409b1460cc7444cd389341a4c880fe7a042cb3d7" dependencies = [ "proc-macro2", "quote", "unicode-ident", ] [[package]] name = "table_formatter" version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "beef5d3fd5472c911d41286849de6a9aee93327f7fae9fb9148fe9ff0102c17d" dependencies = [ "colored", "itertools", "thiserror", ] [[package]] name = "tempfile" version = "3.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "04cbcdd0c794ebb0d4cf35e88edd2f7d2c4c3e9a5a6dab322839b321c6a87a64" dependencies = [ "cfg-if", "fastrand", "once_cell", "rustix", "windows-sys 0.59.0", ] [[package]] name = "tera" version = "1.20.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ab9d851b45e865f178319da0abdbfe6acbc4328759ff18dafc3a41c16b4cd2ee" dependencies = [ "chrono", "chrono-tz", "globwalk", "humansize", "lazy_static", "percent-encoding", "pest", "pest_derive", "rand", "regex", "serde", "serde_json", "slug", "unic-segment", ] [[package]] name = "term_size" version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1e4129646ca0ed8f45d09b929036bafad5377103edd06e50bf574b353d2b08d9" dependencies = [ "libc", "winapi", ] [[package]] name = "terminal_size" version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "21bebf2b7c9e0a515f6e0f8c51dc0f8e4696391e6f1ff30379559f8365fb0df7" dependencies = [ "rustix", "windows-sys 0.48.0", ] [[package]] name = "thiserror" version = "1.0.63" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c0342370b38b6a11b6cc11d6a805569958d54cfa061a29969c3b5ce2ea405724" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" version = "1.0.63" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a4558b58466b9ad7ca0f102865eccc95938dca1a74a856f2b57b6629050da261" dependencies = [ "proc-macro2", "quote", "syn", ] [[package]] name = "tinyvec" version = "1.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "445e881f4f6d382d5f27c034e25eb92edd7c784ceab92a0937db7f2e9471b938" dependencies = [ "tinyvec_macros", ] [[package]] name = "tinyvec_macros" version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tokei" version = "13.0.0" dependencies = [ "aho-corasick", "arbitrary", "clap", "clap-cargo", "colored", "crossbeam-channel", "dashmap", "encoding_rs_io", "env_logger", "etcetera", "git2", "grep-searcher", "hex", "ignore", "json5", "log", "num-format", "once_cell", "parking_lot", "proptest", "rayon", "regex", "serde", "serde_cbor", "serde_json", "serde_yaml", "strum", "strum_macros", "table_formatter", "tempfile", "tera", "term_size", "toml", ] [[package]] name = "toml" version = "0.8.19" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a1ed1f98e3fdc28d6d910e6737ae6ab1a93bf1985935a1193e68f93eeb68d24e" dependencies = [ "serde", "serde_spanned", "toml_datetime", "toml_edit", ] [[package]] name = "toml_datetime" version = "0.6.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0dd7358ecb8fc2f8d014bf86f6f638ce72ba252a2c3a2572f2a795f1d23efb41" dependencies = [ "serde", ] [[package]] name = "toml_edit" version = "0.22.20" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "583c44c02ad26b0c3f3066fe629275e50627026c51ac2e595cca4c230ce1ce1d" dependencies = [ "indexmap", "serde", "serde_spanned", "toml_datetime", "winnow", ] [[package]] name = "typenum" version = "1.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825" [[package]] name = "ucd-trie" version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ed646292ffc8188ef8ea4d1e0e0150fb15a5c2e12ad9b8fc191ae7a8a7f3c4b9" [[package]] name = "unarray" version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "eaea85b334db583fe3274d12b4cd1880032beab409c0d774be044d4480ab9a94" [[package]] name = "unic-char-property" version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a8c57a407d9b6fa02b4795eb81c5b6652060a15a7903ea981f3d723e6c0be221" dependencies = [ "unic-char-range", ] [[package]] name = "unic-char-range" version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0398022d5f700414f6b899e10b8348231abf9173fa93144cbc1a43b9793c1fbc" [[package]] name = "unic-common" version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "80d7ff825a6a654ee85a63e80f92f054f904f21e7d12da4e22f9834a4aaa35bc" [[package]] name = "unic-segment" version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e4ed5d26be57f84f176157270c112ef57b86debac9cd21daaabbe56db0f88f23" dependencies = [ "unic-ucd-segment", ] [[package]] name = "unic-ucd-segment" version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2079c122a62205b421f499da10f3ee0f7697f012f55b675e002483c73ea34700" dependencies = [ "unic-char-property", "unic-char-range", "unic-ucd-version", ] [[package]] name = "unic-ucd-version" version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "96bd2f2237fe450fcd0a1d2f5f4e91711124f7857ba2e964247776ebeeb7b0c4" dependencies = [ "unic-common", ] [[package]] name = "unicode-bidi" version = "0.3.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "08f95100a766bf4f8f28f90d77e0a5461bbdb219042e7679bebe79004fed8d75" [[package]] name = "unicode-ident" version = "1.0.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" [[package]] name = "unicode-normalization" version = "0.1.23" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a56d1686db2308d901306f92a263857ef59ea39678a5458e7cb17f01415101f5" dependencies = [ "tinyvec", ] [[package]] name = "unsafe-libyaml" version = "0.2.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "673aac59facbab8a9007c7f6108d11f63b603f7cabff99fabf650fea5c32b861" [[package]] name = "url" version = "2.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "22784dbdf76fdde8af1aeda5622b546b422b6fc585325248a2bf9f5e41e94d6c" dependencies = [ "form_urlencoded", "idna", "percent-encoding", ] [[package]] name = "utf8parse" version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" [[package]] name = "vcpkg" version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" [[package]] name = "version_check" version = "0.9.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" [[package]] name = "wait-timeout" version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9f200f5b12eb75f8c1ed65abd4b2db8a6e1b138a20de009dacee265a2498f3f6" dependencies = [ "libc", ] [[package]] name = "walkdir" version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "29790946404f91d9c5d06f9874efddea1dc06c5efe94541a7d6863108e3a5e4b" dependencies = [ "same-file", "winapi-util", ] [[package]] name = "wasi" version = "0.11.0+wasi-snapshot-preview1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" [[package]] name = "wasm-bindgen" version = "0.2.93" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a82edfc16a6c469f5f44dc7b571814045d60404b55a0ee849f9bcfa2e63dd9b5" dependencies = [ "cfg-if", "once_cell", "wasm-bindgen-macro", ] [[package]] name = "wasm-bindgen-backend" version = "0.2.93" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9de396da306523044d3302746f1208fa71d7532227f15e347e2d93e4145dd77b" dependencies = [ "bumpalo", "log", "once_cell", "proc-macro2", "quote", "syn", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-macro" version = "0.2.93" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "585c4c91a46b072c92e908d99cb1dcdf95c5218eeb6f3bf1efa991ee7a68cccf" dependencies = [ "quote", "wasm-bindgen-macro-support", ] [[package]] name = "wasm-bindgen-macro-support" version = "0.2.93" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "afc340c74d9005395cf9dd098506f7f44e38f2b4a21c6aaacf9a105ea5e1e836" dependencies = [ "proc-macro2", "quote", "syn", "wasm-bindgen-backend", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-shared" version = "0.2.93" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c62a0a307cb4a311d3a07867860911ca130c3494e8c2719593806c08bc5d0484" [[package]] name = "winapi" version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" dependencies = [ "winapi-i686-pc-windows-gnu", "winapi-x86_64-pc-windows-gnu", ] [[package]] name = "winapi-i686-pc-windows-gnu" version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" [[package]] name = "winapi-util" version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb" dependencies = [ "windows-sys 0.59.0", ] [[package]] name = "winapi-x86_64-pc-windows-gnu" version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" [[package]] name = "windows-core" version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "33ab640c8d7e35bf8ba19b884ba838ceb4fba93a4e8c65a9059d08afcfc683d9" dependencies = [ "windows-targets 0.52.6", ] [[package]] name = "windows-sys" version = "0.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" dependencies = [ "windows-targets 0.48.5", ] [[package]] name = "windows-sys" version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" dependencies = [ "windows-targets 0.52.6", ] [[package]] name = "windows-sys" version = "0.59.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" dependencies = [ "windows-targets 0.52.6", ] [[package]] name = "windows-targets" version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c" dependencies = [ "windows_aarch64_gnullvm 0.48.5", "windows_aarch64_msvc 0.48.5", "windows_i686_gnu 0.48.5", "windows_i686_msvc 0.48.5", "windows_x86_64_gnu 0.48.5", "windows_x86_64_gnullvm 0.48.5", "windows_x86_64_msvc 0.48.5", ] [[package]] name = "windows-targets" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" dependencies = [ "windows_aarch64_gnullvm 0.52.6", "windows_aarch64_msvc 0.52.6", "windows_i686_gnu 0.52.6", "windows_i686_gnullvm", "windows_i686_msvc 0.52.6", "windows_x86_64_gnu 0.52.6", "windows_x86_64_gnullvm 0.52.6", "windows_x86_64_msvc 0.52.6", ] [[package]] name = "windows_aarch64_gnullvm" version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" [[package]] name = "windows_aarch64_gnullvm" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" [[package]] name = "windows_aarch64_msvc" version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" [[package]] name = "windows_aarch64_msvc" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" [[package]] name = "windows_i686_gnu" version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" [[package]] name = "windows_i686_gnu" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" [[package]] name = "windows_i686_gnullvm" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" [[package]] name = "windows_i686_msvc" version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" [[package]] name = "windows_i686_msvc" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" [[package]] name = "windows_x86_64_gnu" version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" [[package]] name = "windows_x86_64_gnu" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" [[package]] name = "windows_x86_64_gnullvm" version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" [[package]] name = "windows_x86_64_gnullvm" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" [[package]] name = "windows_x86_64_msvc" version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" [[package]] name = "windows_x86_64_msvc" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" [[package]] name = "winnow" version = "0.6.18" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "68a9bda4691f099d435ad181000724da8e5899daa10713c2d432552b9ccd3a6f" dependencies = [ "memchr", ] [[package]] name = "zerocopy" version = "0.7.35" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1b9b4fd18abc82b8136838da5d50bae7bdea537c574d8dc1a34ed098d6c166f0" dependencies = [ "byteorder", "zerocopy-derive", ] [[package]] name = "zerocopy-derive" version = "0.7.35" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e" dependencies = [ "proc-macro2", "quote", "syn", ] tokei-13.0.0/Cargo.toml0000644000000066330000000000100102360ustar # THIS FILE IS AUTOMATICALLY GENERATED BY CARGO # # When uploading crates to the registry Cargo will automatically # "normalize" Cargo.toml files for maximal compatibility # with all versions of Cargo and also rewrite `path` dependencies # to registry (e.g., crates.io) dependencies. # # If you are reading this file be aware that the original Cargo.toml # will likely look very different (and much more reasonable). # See Cargo.toml.orig for the original contents. [package] edition = "2021" rust-version = "1.71" name = "tokei" version = "13.0.0" authors = ["Erin Power "] build = "build.rs" include = [ "Cargo.lock", "Cargo.toml", "LICENCE-APACHE", "LICENCE-MIT", "build.rs", "languages.json", "src/**/*", ] autolib = false autobins = false autoexamples = false autotests = false autobenches = false description = "Count your code, quickly." homepage = "https://tokei.rs" readme = "README.md" keywords = [ "utility", "cli", "cloc", "lines", "statistics", ] categories = [ "command-line-utilities", "development-tools", "visualization", ] license = "MIT OR Apache-2.0" repository = "https://github.com/XAMPPRocky/tokei.git" [features] all = [ "cbor", "yaml", ] cbor = [ "dep:hex", "dep:serde_cbor", ] cli = [ "dep:clap", "dep:colored", "dep:env_logger", "dep:num-format", ] default = ["cli"] yaml = ["dep:serde_yaml"] [lib] name = "tokei" path = "src/lib.rs" [[bin]] name = "tokei" path = "src/main.rs" required-features = ["cli"] [dependencies.aho-corasick] version = "1.1.3" [dependencies.arbitrary] version = "1.3.2" features = ["derive"] [dependencies.clap] version = "4" features = [ "cargo", "string", "wrap_help", ] optional = true [dependencies.clap-cargo] version = "0.13.0" [dependencies.colored] version = "2.1.0" optional = true [dependencies.crossbeam-channel] version = "0.5.13" [dependencies.dashmap] version = "6.0.1" features = ["serde"] [dependencies.encoding_rs_io] version = "0.1.7" [dependencies.env_logger] version = "0.11.5" features = [] optional = true [dependencies.etcetera] version = "0.8.0" [dependencies.grep-searcher] version = "0.1.13" [dependencies.hex] version = "0.4.3" optional = true [dependencies.ignore] version = "0.4.22" [dependencies.log] version = "0.4.22" [dependencies.num-format] version = "0.4.4" optional = true [dependencies.once_cell] version = "1.19.0" [dependencies.parking_lot] version = "0.12.3" [dependencies.rayon] version = "1.10.0" [dependencies.regex] version = "1.10.6" [dependencies.serde] version = "1.0.208" features = [ "derive", "rc", ] [dependencies.serde_cbor] version = "0.11.2" optional = true [dependencies.serde_json] version = "1.0.125" [dependencies.serde_yaml] version = "0.9.34" optional = true [dependencies.table_formatter] version = "0.6.1" [dependencies.term_size] version = "0.3.2" [dependencies.toml] version = "0.8.19" [dev-dependencies.git2] version = "0.19.0" features = [] default-features = false [dev-dependencies.proptest] version = "1.5.0" [dev-dependencies.strum] version = "0.26.3" [dev-dependencies.strum_macros] version = "0.26.4" [dev-dependencies.tempfile] version = "3.12.0" [build-dependencies.ignore] version = "0.4.22" [build-dependencies.json5] version = "0.4.1" [build-dependencies.serde_json] version = "1.0.125" [build-dependencies.tera] version = "1.20.0" [profile.release] lto = "thin" panic = "abort" tokei-13.0.0/Cargo.toml.orig000064400000000000000000000041071046102023000137110ustar 00000000000000[package] authors = ["Erin Power "] build = "build.rs" categories = ["command-line-utilities", "development-tools", "visualization"] description = "Count your code, quickly." homepage = "https://tokei.rs" include = [ "Cargo.lock", "Cargo.toml", "LICENCE-APACHE", "LICENCE-MIT", "build.rs", "languages.json", "src/**/*", ] keywords = ["utility", "cli", "cloc", "lines", "statistics"] license = "MIT OR Apache-2.0" name = "tokei" readme = "README.md" repository = "https://github.com/XAMPPRocky/tokei.git" version = "13.0.0" rust-version = "1.71" edition = "2021" [features] all = ["cbor", "yaml"] cbor = ["dep:hex", "dep:serde_cbor"] cli = ["dep:clap", "dep:colored", "dep:env_logger", "dep:num-format"] default = ["cli"] yaml = ["dep:serde_yaml"] [profile.release] lto = "thin" panic = "abort" [[bin]] name = "tokei" required-features = ["cli"] [build-dependencies] tera = "1.20.0" ignore = "0.4.22" serde_json = "1.0.125" json5 = "0.4.1" [dependencies] aho-corasick = "1.1.3" arbitrary = { version = "1.3.2", features = ["derive"] } clap = { version = "4", optional = true, features = ["cargo", "string", "wrap_help"] } colored = { version = "2.1.0", optional = true } crossbeam-channel = "0.5.13" encoding_rs_io = "0.1.7" grep-searcher = "0.1.13" ignore = "0.4.22" log = "0.4.22" rayon = "1.10.0" serde = { version = "1.0.208", features = ["derive", "rc"] } term_size = "0.3.2" toml = "0.8.19" parking_lot = "0.12.3" dashmap = { version = "6.0.1", features = ["serde"] } num-format = { version = "0.4.4", optional = true } once_cell = "1.19.0" regex = "1.10.6" serde_json = "1.0.125" etcetera = "0.8.0" table_formatter = "0.6.1" clap-cargo = "0.13.0" [dependencies.env_logger] optional = true features = [] version = "0.11.5" [dependencies.hex] optional = true version = "0.4.3" [dependencies.serde_cbor] optional = true version = "0.11.2" [dependencies.serde_yaml] optional = true version = "0.9.34" [dev-dependencies] proptest = "1.5.0" strum = "0.26.3" strum_macros = "0.26.4" tempfile = "3.12.0" git2 = { version = "0.19.0", default-features = false, features = [] } tokei-13.0.0/LICENCE-APACHE000064400000000000000000000010471046102023000127260ustar 00000000000000Copyright 2016 Erin Power Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. tokei-13.0.0/LICENCE-MIT000064400000000000000000000020611046102023000124330ustar 00000000000000MIT License (MIT) Copyright (c) 2016 Erin Power Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. tokei-13.0.0/README.md000064400000000000000000000364031046102023000123050ustar 00000000000000# Tokei ([時計](https://en.wiktionary.org/wiki/%E6%99%82%E8%A8%88)) [![Mean Bean CI](https://github.com/XAMPPRocky/tokei/workflows/Mean%20Bean%20CI/badge.svg)](https://github.com/XAMPPRocky/tokei/actions?query=workflow%3A%22Mean+Bean+CI%22) [![Help Wanted](https://img.shields.io/github/issues/XAMPPRocky/tokei/help%20wanted?color=green)](https://github.com/XAMPPRocky/tokei/issues?q=is%3Aissue+is%3Aopen+label%3A%22help+wanted%22) [![Lines Of Code](https://tokei.rs/b1/github/XAMPPRocky/tokei?category=code)](https://github.com/XAMPPRocky/tokei) [![Documentation](https://docs.rs/tokei/badge.svg)](https://docs.rs/tokei/) ![](https://img.shields.io/crates/d/tokei?label=downloads%20%28crates.io%29) ![](https://img.shields.io/github/downloads/xampprocky/tokei/total?label=downloads%20%28GH%29) ![](https://img.shields.io/homebrew/installs/dy/tokei?color=brightgreen&label=downloads%20%28brew%29) ![Chocolatey Downloads](https://img.shields.io/chocolatey/dt/tokei?label=Downloads%20(Chocolately)) [![dependency status](https://deps.rs/repo/github/XAMPPRocky/tokei/status.svg)](https://deps.rs/repo/github/XAMPPRocky/tokei) [![Packaging status](https://repology.org/badge/tiny-repos/tokei.svg)](https://repology.org/project/tokei/versions) Tokei is a program that displays statistics about your code. Tokei will show the number of files, total lines within those files and code, comments, and blanks grouped by language. ## Example ```console ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ Language Files Lines Code Comments Blanks ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ BASH 4 49 30 10 9 JSON 1 1332 1332 0 0 Shell 1 49 38 1 10 TOML 2 77 64 4 9 ─────────────────────────────────────────────────────────────────────────────── Markdown 5 1355 0 1074 281 |- JSON 1 41 41 0 0 |- Rust 2 53 42 6 5 |- Shell 1 22 18 0 4 (Total) 1471 101 1080 290 ─────────────────────────────────────────────────────────────────────────────── Rust 19 3416 2840 116 460 |- Markdown 12 351 5 295 51 (Total) 3767 2845 411 511 ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ Total 32 6745 4410 1506 829 ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ ``` ## [API Documentation](https://docs.rs/tokei) ## Table of Contents - [Features](#features) - [Installation](#installation) - [Package Managers](#package-managers) - [Manual](#manual) - [Configuration](#configuration) - [How to use Tokei](#how-to-use-tokei) - [Options](#options) - [Badges](#badges) - [Supported Languages](#supported-languages) - [Changelog](CHANGELOG.md) - [Common Issues](#common-issues) - [Canonical Source](#canonical-source) - [Copyright and License](#copyright-and-license) ## Features - Tokei is **very fast**, and is able to count millions of lines of code in seconds. Check out the [11.0.0 release](https://github.com/XAMPPRocky/tokei/releases/v11.0.0) to see how Tokei's speed compares to others. - Tokei is **accurate**, Tokei correctly handles multi line comments, nested comments, and not counting comments that are in strings. Providing an accurate code statistics. - Tokei has huge range of languages, supporting over **150** languages, and their various extensions. - Tokei can output in multiple formats (**CBOR**, **JSON**, **YAML**) allowing Tokei's output to be easily stored, and reused. These can also be reused in tokei combining a previous run's statistics with another set. - Tokei is available on **Mac**, **Linux**, and **Windows**. See [installation instructions](#installation) for how to get Tokei on your platform. - Tokei is also a **library** allowing you to easily integrate it with other projects. - Tokei comes with and without color. Set the env variable NO_COLOR to 1, and it'll be black and white. ## Installation ### Package Managers #### Unix ```console # Alpine Linux (since 3.13) apk add tokei # Arch Linux pacman -S tokei # Cargo cargo install tokei # Conda conda install -c conda-forge tokei # Fedora sudo dnf install tokei # FreeBSD pkg install tokei # NetBSD pkgin install tokei # Nix/NixOS nix-env -i tokei # OpenSUSE sudo zypper install tokei # Void Linux sudo xbps-install tokei ``` #### macOS ```console # Homebrew brew install tokei # MacPorts sudo port selfupdate sudo port install tokei ``` #### Windows ```console # Winget winget install XAMPPRocky.tokei # Scoop scoop install tokei ``` ### Manual #### Downloading You can download prebuilt binaries in the [releases section](https://github.com/XAMPPRocky/tokei/releases). #### Building You can also build and install from source (requires the latest stable [Rust] compiler.) ```console cargo install --git https://github.com/XAMPPRocky/tokei.git tokei ``` [rust]: https://www.rust-lang.org ## Configuration Tokei has a [configuration] file that allows you to change default behaviour. The file can be named `tokei.toml` or `.tokeirc`. Currently tokei looks for this file in three different places. The current directory, your home directory, and your configuration directory. ## How to use Tokei #### Basic usage This is the basic way to use tokei. Which will report on the code in `./foo` and all subfolders. ```shell $ tokei ./foo ``` [configuration]: ./tokei.example.toml #### Multiple folders To have tokei report on multiple folders in the same call simply add a comma, or a space followed by another path. ```shell $ tokei ./foo ./bar ./baz ``` ```shell $ tokei ./foo, ./bar, ./baz ``` #### Excluding folders Tokei will respect all `.gitignore` and `.ignore` files, and you can use the `--exclude` option to exclude any additional files. The `--exclude` flag has the same semantics as `.gitignore`. ```shell $ tokei ./foo --exclude *.rs ``` Paths to exclude can also be listed in a `.tokeignore` file, using the same [syntax](https://git-scm.com/docs/gitignore) as .gitignore files. #### Sorting output By default tokei sorts alphabetically by language name, however using `--sort` tokei can also sort by any of the columns. `blanks, code, comments, lines` ```shell $ tokei ./foo --sort code ``` #### Outputting file statistics By default tokei only outputs the total of the languages, and using `--files` flag tokei can also output individual file statistics. ```shell $ tokei ./foo --files ``` #### Outputting into different formats Tokei normally outputs into a nice human readable format designed for terminals. There is also using the `--output` option various other formats that are more useful for bringing the data into another program. **Note:** This version of tokei was compiled without any serialization formats, to enable serialization, reinstall tokei with the features flag. ```shell ALL: cargo install tokei --features all CBOR: cargo install tokei --features cbor YAML: cargo install tokei --features yaml ``` **Currently supported formats** - JSON `--output json` - YAML `--output yaml` - CBOR `--output cbor` ```shell $ tokei ./foo --output json ``` #### Reading in stored formats Tokei can also take in the outputted formats added in the previous results to its current run. Tokei can take either a path to a file, the format passed in as a value to the option, or from stdin. ```shell $ tokei ./foo --input ./stats.json ``` ## Options ``` USAGE: tokei [FLAGS] [OPTIONS] [--] [input]... FLAGS: -f, --files Will print out statistics on individual files. -h, --help Prints help information --hidden Count hidden files. -l, --languages Prints out supported languages and their extensions. --no-ignore Don't respect ignore files (.gitignore, .ignore, etc.). This implies --no-ignore-parent, --no-ignore-dot, and --no-ignore-vcs. --no-ignore-dot Don't respect .ignore and .tokeignore files, including those in parent directories. --no-ignore-parent Don't respect ignore files (.gitignore, .ignore, etc.) in parent directories. --no-ignore-vcs Don't respect VCS ignore files (.gitignore, .hgignore, etc.), including those in parent directories. -V, --version Prints version information -v, --verbose Set log output level: 1: to show unknown file extensions, 2: reserved for future debugging, 3: enable file level trace. Not recommended on multiple files OPTIONS: -c, --columns Sets a strict column width of the output, only available for terminal output. -e, --exclude ... Ignore all files & directories matching the pattern. -i, --input Gives statistics from a previous tokei run. Can be given a file path, or "stdin" to read from stdin. -o, --output Outputs Tokei in a specific format. Compile with additional features for more format support. [possible values: cbor, json, yaml] -s, --sort Sort languages based on column [possible values: files, lines, blanks, code, comments] -t, --type Filters output by language type, separated by a comma. i.e. -t=Rust,Markdown ARGS: ... The path(s) to the file or directory to be counted. ``` ## Badges Tokei has support for badges. For example [![](https://tokei.rs/b1/github/XAMPPRocky/tokei)](https://github.com/XAMPPRocky/tokei). ``` [![](https://tokei.rs/b1/github/XAMPPRocky/tokei)](https://github.com/XAMPPRocky/tokei). ``` Tokei's URL scheme is as follows. ``` https://tokei.rs/b1/{host: values: github|gitlab}/{Repo Owner eg: XAMPPRocky}/{Repo name eg: tokei} ``` By default the badge will show the repo's LoC (_Lines of Code_), you can also specify for it to show a different category, by using the `?category=` query string. It can be either `code`, `blanks`, `files`, `lines`, `comments`, Example show total lines: ``` [![](https://tokei.rs/b1/github/XAMPPRocky/tokei?category=lines)](https://github.com/XAMPPRocky/tokei). ``` The server code hosted on tokei.rs is in [XAMPPRocky/tokei_rs](https://github.com/XAMPPRocky/tokei_rs) ## Dockerized version Tokei is available in a small `alpine`-based docker image, buildable through [earthly](https://github.com/earthly/earthly): ```bash earthly +docker ``` Once built, one can run the image with: ```bash docker run --rm -v /path/to/analyze:/src tokei . ``` Or, to simply analyze the current folder (linux): ```bash docker run --rm -v $(pwd):/src tokei . ``` ## Supported Languages If there is a language that you would to add to tokei feel free to make a pull request. Languages are defined in [`languages.json`](./languages.json), and you can read how to add and test your language in our [CONTRIBUTING.md](./CONTRIBUTING.md). ``` Abap ActionScript Ada Agda Alex Alloy APL Asn1 Asp AspNet Assembly AssemblyGAS ATS Autoconf AutoHotKey Automake AWK Bash Batch Bazel Bean Bicep Bitbake BQN BrightScript C Cabal Cassius Ceylon CHeader Cil Clojure ClojureC ClojureScript CMake Cobol CoffeeScript Cogent ColdFusion ColdFusionScript Coq Cpp CppHeader Crystal CSharp CShell Css Cuda CUE Cython D D2 DAML Dart DeviceTree Dhall Dockerfile DotNetResource DreamMaker Dust Ebuild EdgeDB Edn Elisp Elixir Elm Elvish EmacsDevEnv Emojicode Erlang Factor FEN Fish FlatBuffers ForgeConfig Forth FortranLegacy FortranModern FreeMarker FSharp Fstar GDB GdScript GdShader Gherkin Gleam Glsl Go Graphql Groovy Gwion Hamlet Handlebars Happy Hare Haskell Haxe Hcl Hex Hex0 Hex1 Hex2 HiCAD hledger Hlsl HolyC Html Hy Idris Ini IntelHex Isabelle Jai Janet Java JavaScript Jq Json Jsx Julia Julius Just KakouneScript KaemFile Kotlin Lean Less Lingua Franca LinkerScript Liquid Lisp LLVM Logtalk Lua Lucius M1Assembly Madlang Max Makefile Markdown Mdx Meson Mint Mlatu ModuleDef MonkeyC MoonScript MsBuild Mustache Nim Nix NotQuitePerl NuGetConfig Nushell ObjectiveC ObjectiveCpp OCaml Odin OpenSCAD OpenQASM Org Oz Pascal Perl Perl6 Pest Phix Php Po Poke Polly Pony PostCss PowerShell Processing Prolog Protobuf PRQL PSL PureScript Pyret Python Qcl Qml R Racket Rakefile Razor Renpy ReStructuredText RON RPMSpecfile Ruby RubyHtml Rust Sass Scala Scheme Scons Sh ShaderLab Slang Sml Solidity SpecmanE Spice Sql SRecode Stata Stratego Svelte Svg Swift Swig SystemVerilog Slint Tact Tcl Templ Tex Text Thrift Toml Tsx Twig TypeScript UMPL UnrealDeveloperMarkdown UnrealPlugin UnrealProject UnrealScript UnrealShader UnrealShaderHeader UrWeb UrWebProject Vala VB6 VBScript Velocity Verilog VerilogArgsFile Vhdl VimScript VisualBasic VisualStudioProject VisualStudioSolution Vue WebAssembly Wolfram Xaml XcodeConfig Xml XSL Xtend Yaml ZenCode Zig ZoKrates Zsh ``` ## Common issues ### Tokei says I have a lot of D code, but I know there is no D code! This is likely due to `gcc` generating `.d` files. Until the D people decide on a different file extension, you can always exclude `.d` files using the `-e --exclude` flag like so ``` $ tokei . -e *.d ``` ## Canonical Source The canonical source of this repo is hosted on [GitHub](https://github.com/XAMPPRocky/tokei). If you have a GitHub account, please make your issues, and pull requests there. ## Related Tools - [tokei-pie](https://github.com/laixintao/tokei-pie): Render tokei's output to interactive sunburst chart. ## Copyright and License (C) Copyright 2015 by XAMPPRocky and contributors See [the graph](https://github.com/XAMPPRocky/tokei/graphs/contributors) for a full list of contributors. Tokei is distributed under the terms of both the MIT license and the Apache License (Version 2.0). See [LICENCE-APACHE](./LICENCE-APACHE), [LICENCE-MIT](./LICENCE-MIT) for more information. tokei-13.0.0/build.rs000064400000000000000000000116521046102023000124720ustar 00000000000000extern crate ignore; extern crate json5; extern crate serde_json; use std::ffi::OsStr; use std::fs; use std::path::Path; use std::{cmp, env, error}; use ignore::Walk; use serde_json::Value; fn main() -> Result<(), Box> { let out_dir = env::var_os("OUT_DIR").expect("No OUT_DIR variable."); generate_languages(&out_dir)?; generate_tests(&out_dir)?; Ok(()) } fn generate_languages(out_dir: &OsStr) -> Result<(), Box> { let mut tera = tera::Tera::default(); let json_string: String = fs::read_to_string("languages.json")?.parse()?; let mut json: Value = json5::from_str(&json_string)?; for (_key, ref mut item) in json .get_mut("languages") .unwrap() .as_object_mut() .unwrap() .iter_mut() { macro_rules! sort_prop { ($prop:expr) => {{ if let Some(ref mut prop) = item.get_mut($prop) { prop.as_array_mut() .unwrap() .sort_unstable_by(compare_json_str_len) } }}; } sort_prop!("quotes"); sort_prop!("verbatim_quotes"); sort_prop!("multi_line"); } let output_path = Path::new(&out_dir).join("language_type.rs"); let rust_code = tera.render_str( &std::fs::read_to_string("src/language/language_type.tera.rs")?, &tera::Context::from_value(json)?, )?; std::fs::write(output_path, rust_code)?; Ok(()) } fn compare_json_str_len(a: &Value, b: &Value) -> cmp::Ordering { let a = a.as_array().expect("a as array"); let b = b.as_array().expect("b as array"); let max_a_size = a.iter().map(|e| e.as_str().unwrap().len()).max().unwrap(); let max_b_size = b.iter().map(|e| e.as_str().unwrap().len()).max().unwrap(); max_b_size.cmp(&max_a_size) } fn generate_tests(out_dir: &OsStr) -> Result<(), Box> { // Length of string literal below by number of languages const INITIAL_BUFFER_SIZE: usize = 989 * 130; let mut string = String::with_capacity(INITIAL_BUFFER_SIZE); generate_tests_batch("./tests/data", None, &mut string)?; generate_tests_batch("./tests/embedding", Some("embedding"), &mut string)?; Ok(fs::write(Path::new(&out_dir).join("tests.rs"), string)?) } fn generate_tests_batch( src_dir: &str, test_module: Option<&str>, string: &mut String, ) -> Result<(), Box> { let walker = Walk::new(src_dir).filter(|p| match p { Ok(ref p) => { if let Ok(ref p) = p.metadata() { p.is_file() } else { false } } _ => false, }); if let Some(test_module) = test_module { string.push_str(&format!( r####" #[cfg(test)] mod {0} {{ use super::*; "####, test_module )); } for path in walker { let path = path?; let path = path.path(); let root = std::path::PathBuf::from(std::env::var("CARGO_MANIFEST_DIR").unwrap()); let name = path.file_stem().unwrap().to_str().unwrap().to_lowercase(); if name == "jupyter" { continue; } string.push_str(&format!( r####" #[test] fn {0}() {{ const _: &str = include_str!(r###"{2}"###); let mut languages = Languages::new(); languages.get_statistics(&["{1}"], &[], &Config::default()); if languages.len() != 1 {{ panic!("wrong languages detected: expected just {0}, found {{:?}}", languages.into_iter().collect::>()); }} let (name, language) = languages.into_iter().next().unwrap(); let mut language = language.summarise(); let contents = fs::read_to_string("{1}").unwrap(); println!("{{}} {1}", name); assert_eq!(get_digit!(LINES, contents), language.lines()); println!("{{}} LINES MATCH", name); assert_eq!(get_digit!(CODE, contents), language.code); println!("{{}} CODE MATCH", name); assert_eq!(get_digit!(COMMENTS, contents), language.comments); println!("{{}} COMMENTS MATCH", name); assert_eq!(get_digit!(BLANKS, contents), language.blanks); println!("{{}} BLANKS MATCH", name); let report = language.reports.pop().unwrap(); let stats = report.stats.summarise(); assert_eq!(language.lines(), stats.lines()); assert_eq!(language.code, stats.code); assert_eq!(language.comments, stats.comments); assert_eq!(language.blanks, stats.blanks); }} "####, name, path.to_string_lossy().replace('\\', "/"), std::fs::canonicalize(root.join(path)).unwrap().display(), )); } if test_module.is_some() { string.push_str("\n}"); } Ok(()) } tokei-13.0.0/languages.json000064400000000000000000001567211046102023000136750ustar 00000000000000{ "languages": { "Abap": { "name": "ABAP", "line_comment": ["*", "\\\""], "extensions": ["abap"] }, "ABNF": { "line_comment": [";"], "extensions": ["abnf"] }, "ActionScript": { "line_comment": ["//"], "multi_line_comments": [["/*", "*/"]], "quotes": [["\\\"", "\\\""]], "extensions": ["as"] }, "Ada": { "line_comment": ["--"], "extensions": ["ada", "adb", "ads", "pad"] }, "Agda": { "nested": true, "line_comment": ["--"], "multi_line_comments": [["{-", "-}"]], "extensions": ["agda"] }, "Alex": { "extensions": ["x"] }, "Alloy": { "line_comment": ["--", "//"], "multi_line_comments": [["/*", "*/"]], "extensions": ["als"] }, "Apl": { "name": "APL", "line_comment": ["⍝"], "extensions": ["apl", "aplf", "apls"], "quotes": [["'", "'"]], }, "Arduino": { "name": "Arduino C++", "line_comment": ["//"], "multi_line_comments": [["/*", "*/"]], "quotes": [["\\\"", "\\\""]], "extensions": ["ino"] }, "Arturo": { "line_comment": [";"], "quotes": [["\\\"", "\\\""]], "extensions": ["art"] }, "AsciiDoc": { "line_comment": ["//"], "multi_line_comments": [["////", "////"]], "extensions": ["adoc", "asciidoc"] }, "Asn1": { "name": "ASN.1", "line_comment": ["--"], "quotes": [["\\\"", "\\\""], ["'", "'"]], "multi_line_comments": [["/*", "*/"]], "extensions": ["asn1"] }, "Asp": { "name": "ASP", "line_comment": ["'", "REM"], "extensions": ["asa", "asp"] }, "AspNet": { "name": "ASP.NET", "multi_line_comments": [[""], ["<%--", "-->"]], "extensions": [ "asax", "ascx", "asmx", "aspx", "master", "sitemap", "webinfo" ] }, "Assembly": { "line_comment": [";"], "quotes": [["\\\"", "\\\""], ["'", "'"]], "extensions": ["asm"] }, "AssemblyGAS": { "name": "GNU Style Assembly", "line_comment": ["//"], "multi_line_comments": [["/*", "*/"]], "quotes": [["\\\"", "\\\""]], "extensions": ["s"] }, "Astro": { "line_comment": ["//"], "multi_line_comments": [["/*", "*/"], [""]], "extensions": ["astro"] }, "Ats": { "name": "ATS", "line_comment": ["//"], "multi_line_comments": [["(*", "*)"], ["/*", "*/"]], "quotes": [["\\\"", "\\\""]], "extensions": [ "dats", "hats", "sats", "atxt" ] }, "Autoconf": { "line_comment": ["#", "dnl"], "extensions": ["in"] }, "Autoit": { "line_comment": [";"], "multi_line_comments": [["#comments-start", "#comments-end"], ["#cs", "#ce"]], "extensions": ["au3"] }, "AutoHotKey": { "line_comment": [";"], "multi_line_comments": [["/*", "*/"]], "extensions": ["ahk"] }, "Automake": { "line_comment": ["#"], "extensions": ["am"] }, "AvaloniaXaml": { "name": "AXAML", "multi_line_comments": [[""]], "quotes": [["\\\"", "\\\""], ["'", "'"]], "extensions": ["axaml"] }, "AWK": { "line_comment": ["#"], "shebangs": ["#!/bin/awk -f"], "extensions": ["awk"] }, "Ballerina": { "line_comment": ["//", "#"], "quotes": [ ["\\\"", "\\\""], ["`", "`"] ], "extensions": ["bal"] }, "Bash": { "name": "BASH", "shebangs": ["#!/bin/bash"], "line_comment": ["#"], "quotes": [["\\\"", "\\\""], ["'", "'"]], "env": ["bash"], "extensions": ["bash"] }, "Batch": { "line_comment": ["REM", "::"], "extensions": ["bat", "btm", "cmd"] }, "Bazel": { "line_comment": ["#"], "doc_quotes": [["\\\"\\\"\\\"", "\\\"\\\"\\\""], ["'''", "'''"]], "quotes": [["\\\"", "\\\""], ["'", "'"]], "extensions": ["bzl", "bazel", "bzlmod"], "filenames": ["build", "workspace", "module"] }, "Bean": { "line_comment": [";"], "quotes": [["\\\"", "\\\""]], "extensions": ["bean", "beancount"] }, "Bicep" : { "line_comment": ["//"], "multi_line_comments": [["/*", "*/"]], "quotes": [["'", "'"], ["'''", "'''"]], "extensions": ["bicep", "bicepparam"] }, "Bitbake": { "name": "Bitbake", "line_comment": ["#"], "quotes": [["\\\"", "\\\""], ["'", "'"]], "extensions": ["bb", "bbclass", "bbappend", "inc"] }, "Bqn": { "name": "BQN", "line_comment": ["#"], "extensions": ["bqn"], "quotes": [["\\\"", "\\\""], ["'", "'"]], }, "BrightScript": { "quotes": [["\\\"", "\\\""]], "line_comment": ["'", "REM"], "extensions": ["brs"] }, "C": { "line_comment": ["//"], "multi_line_comments": [["/*", "*/"]], "quotes": [["\\\"", "\\\""]], "extensions": ["c", "ec", "pgc"] }, "Cabal": { "nested": true, "line_comment": ["--"], "multi_line_comments": [["{-", "-}"]], "extensions": ["cabal"] }, "Cairo": { "line_comment": ["//"], "extensions": ["cairo"], "quotes": [ ["\\\"", "\\\""], ["'", "'"] ] }, "Cangjie": { "line_comment": ["//"], "multi_line_comments": [["/*", "*/"]], "nested": true, "quotes": [["\\\"", "\\\""],["\\\"\\\"\\\"", "\\\"\\\"\\\""]], "verbatim_quotes": [["#\\\"", "\\\"#"],["##\\\"", "\\\"##"],["###\\\"", "\\\"###"], ["#'", "'#"],["##'", "'##"],["###'", "'###"]], "extensions": ["cj"] }, "Cassius": { "line_comment": ["//"], "multi_line_comments": [["/*", "*/"]], "quotes": [["\\\"", "\\\""], ["'", "'"]], "extensions": ["cassius"] }, "Ceylon": { "line_comment": ["//"], "multi_line_comments": [["/*", "*/"]], "quotes": [["\\\"", "\\\""], ["\\\"\\\"\\\"", "\\\"\\\"\\\""]], "extensions": ["ceylon"] }, "Chapel": { "line_comment": ["//"], "multi_line_comments": [["/*", "*/"]], "quotes": [["\\\"", "\\\""], ["'", "'"]], "extensions": ["chpl"] }, "CHeader": { "name": "C Header", "line_comment": ["//"], "multi_line_comments": [["/*", "*/"]], "quotes": [["\\\"", "\\\""]], "extensions": ["h"] }, "Cil": { "name": "CIL (SELinux)", "line_comment": [";"], "quotes": [["\\\"", "\\\""]], "extensions": ["cil"] }, "Circom": { "line_comment": ["//"], "multi_line_comments": [["/*", "*/"]], "extensions": ["circom"] }, "Clojure": { "line_comment": [";"], "quotes": [["\\\"", "\\\""]], "extensions": ["clj"] }, "ClojureC": { "line_comment": [";"], "quotes": [["\\\"", "\\\""]], "extensions": ["cljc"] }, "ClojureScript": { "line_comment": [";"], "quotes": [["\\\"", "\\\""]], "extensions": ["cljs"] }, "CMake": { "line_comment": ["#"], "quotes": [["\\\"", "\\\""]], "extensions": ["cmake"], "filenames": ["cmakelists.txt"] }, "Cobol": { "name": "COBOL", "line_comment": ["*"], "extensions": ["cob", "cbl", "ccp", "cobol", "cpy"] }, "CodeQL": { "line_comment": ["//"], "multi_line_comments": [["/*", "*/"]], "quotes": [["\\\"", "\\\""]], "extensions": ["ql", "qll"] }, "CoffeeScript": { "line_comment": ["#"], "multi_line_comments": [["###", "###"]], "quotes": [["\\\"", "\\\""], ["'", "'"]], "extensions": ["coffee", "cjsx"] }, "Cogent": { "line_comment": ["--"], "extensions": ["cogent"] }, "ColdFusion": { "multi_line_comments": [[""]], "quotes": [["\\\"", "\\\""], ["'", "'"]], "extensions": ["cfm"] }, "ColdFusionScript": { "name": "ColdFusion CFScript", "line_comment": ["//"], "multi_line_comments": [["/*", "*/"]], "quotes": [["\\\"", "\\\""]], "extensions": ["cfc"] }, "Coq": { "quotes": [["\\\"", "\\\""]], "multi_line_comments": [["(*", "*)"]], "extensions": ["v"] }, "Cpp": { "name": "C++", "line_comment": ["//"], "multi_line_comments": [["/*", "*/"]], "quotes": [["\\\"", "\\\""]], "verbatim_quotes": [["R\\\"(", ")\\\""]], "extensions": ["cc", "cpp", "cxx", "c++", "pcc", "tpp"] }, "CppHeader": { "name": "C++ Header", "line_comment": ["//"], "multi_line_comments": [["/*", "*/"]], "quotes": [["\\\"", "\\\""]], "extensions": ["hh", "hpp", "hxx", "inl", "ipp"] }, "Crystal": { "line_comment": ["#"], "shebangs": ["#!/usr/bin/crystal"], "quotes": [["\\\"", "\\\""], ["'", "'"]], "env": ["crystal"], "extensions": ["cr"] }, "CSharp": { "name": "C#", "line_comment": ["//"], "multi_line_comments": [["/*", "*/"]], "quotes": [["\\\"", "\\\""]], "verbatim_quotes": [["@\\\"", "\\\""]], "extensions": ["cs", "csx"] }, "CShell": { "name": "C Shell", "shebangs": ["#!/bin/csh"], "line_comment": ["#"], "env": ["csh"], "extensions": ["csh"] }, "Css": { "name": "CSS", "line_comment": ["//"], "multi_line_comments": [["/*", "*/"]], "quotes": [["\\\"", "\\\""], ["'", "'"]], "mime": ["text/css"], "extensions": ["css"] }, "Cuda": { "name": "CUDA", "line_comment": ["//"], "multi_line_comments": [["/*", "*/"]], "quotes": [["\\\"", "\\\""]], "extensions": ["cu"] }, "Cue": { "name": "CUE", "line_comment": ["//"], "quotes": [ ["\\\"", "\\\""], ["'", "'"], ["\\\"\\\"\\\"", "\\\"\\\"\\\""] ], "verbatim_quotes": [["#\\\"", "\\\"#"]], "extensions": ["cue"] }, "Cython": { "line_comment": ["#"], "doc_quotes": [["\\\"\\\"\\\"", "\\\"\\\"\\\""], ["'''", "'''"]], "quotes": [["\\\"", "\\\""], ["'", "'"]], "env": ["cython"], "extensions": ["pyx", "pxd", "pxi"] }, "D": { "line_comment": ["//"], "multi_line_comments": [["/*", "*/"]], "quotes": [["\\\"", "\\\""], ["'", "'"]], "nested_comments": [["/+", "+/"]], "extensions": ["d"] }, "D2": { "line_comment": ["#"], "multi_line_comments": [["\\\"\\\"\\\"", "\\\"\\\"\\\""]], "extensions": ["d2"] }, "Daml": { "name": "DAML", "nested": true, "line_comment": ["-- "], "multi_line_comments": [["{-", "-}"]], "extensions": ["daml"] }, "Dart": { "line_comment": ["//"], "multi_line_comments": [["/*", "*/"]], "quotes": [ ["\\\"", "\\\""], ["'", "'"], ["\\\"\\\"\\\"", "\\\"\\\"\\\""], ["'''", "'''"] ], "extensions": ["dart"] }, "DeviceTree": { "name": "Device Tree", "line_comment": ["//"], "multi_line_comments": [["/*", "*/"]], "quotes": [["\\\"", "\\\""]], "extensions": ["dts", "dtsi"] }, "Dhall":{ "nested": true, "line_comment": ["--"], "multi_line_comments": [["{-", "-}"]], "quotes": [["\\\"", "\\\""], ["''", "''"]], "extensions": ["dhall"] }, "Dockerfile": { "line_comment": ["#"], "extensions": ["dockerfile", "dockerignore"], "filenames": ["dockerfile"], "quotes": [["\\\"", "\\\""], ["'", "'"]] }, "DotNetResource": { "name": ".NET Resource", "multi_line_comments": [[""]], "quotes": [["\\\"", "\\\""]], "extensions": ["resx"] }, "DreamMaker": { "name": "Dream Maker", "line_comment": ["//"], "multi_line_comments": [["/*", "*/"]], "nested": true, "extensions": ["dm", "dme"], "quotes": [["\\\"", "\\\""], ["{\\\"", "\\\"}"], ["'", "'"]] }, "Dust": { "name": "Dust.js", "multi_line_comments": [["{!", "!}"]], "extensions": ["dust"] }, "Ebuild": { "line_comment": ["#"], "quotes": [["\\\"", "\\\""], ["'", "'"]], "extensions": ["ebuild", "eclass"] }, "EdgeQL": { "name": "EdgeQL", "line_comment": ["#"], "quotes": [["'", "'"], ["\\\"", "\\\""], ["$", "$"]], "extensions": ["edgeql"] }, "ESDL": { "name": "EdgeDB Schema Definition", "line_comment": ["#"], "quotes": [["'", "'"], ["\\\"", "\\\""]], "extensions": ["esdl"] }, "Edn": { "line_comment": [";"], "extensions": ["edn"] }, "Eighth": { "name": "8th", "line_comment": ["\\\\ ", "-- "], "multi_line_comments": [["(*", "*)"]], "nested": true, "quotes": [["\\\"", "\\\""]], "extensions": ["8th"] }, "Elisp": { "name": "Emacs Lisp", "line_comment": [";"], "extensions": ["el"] }, "Elixir": { "line_comment": ["#"], "quotes": [ ["\\\"\\\"\\\"", "\\\"\\\"\\\""], ["\\\"", "\\\""], ["'''", "'''"], ["'", "'"] ], "extensions": ["ex", "exs"] }, "Elm": { "nested": true, "line_comment": ["--"], "multi_line_comments": [["{-", "-}"]], "extensions": ["elm"] }, "Elvish": { "line_comment": ["#"], "quotes": [["\\\"", "\\\""], ["'", "'"]], "env": ["elvish"], "extensions": ["elv"] }, "EmacsDevEnv": { "name": "Emacs Dev Env", "line_comment": [";"], "extensions": ["ede"] }, "Emojicode": { "line_comment": ["💭"], "multi_line_comments": [["💭🔜", "🔚💭"], ["📗", "📗"], ["📘", "📘"]], "quotes": [["❌🔤", "❌🔤"]], "extensions": ["emojic", "🍇"] }, "Erlang": { "line_comment": ["%"], "extensions": ["erl", "hrl"] }, "Factor": { "line_comment": ["!", "#!"], "multi_line_comments": [["/*", "*/"]], "extensions": ["factor"] }, "FEN": { "name": "FEN", "blank": true, "extensions": ["fen"] }, "Fennel" : { "line_comment": [";", ";;"], "quotes": [["\\\"", "\\\""]], "extensions": ["fnl", "fnlm"] }, "Fish": { "shebangs": ["#!/bin/fish"], "line_comment": ["#"], "quotes": [["\\\"", "\\\""], ["'", "'"]], "env": ["fish"], "extensions": ["fish"] }, "FlatBuffers": { "name": "FlatBuffers Schema", "line_comment": ["//"], "multi_line_comments": [["/*", "*/"]], "quotes": [["\\\"", "\\\""]], "extensions": ["fbs"] }, "ForgeConfig": { "name": "Forge Config", "line_comment": ["#", "~"], "extensions": ["cfg"] }, "Forth": { "line_comment": ["\\\\"], "multi_line_comments": [["( ", ")"]], "extensions": [ "4th", "forth", "fr", "frt", "fth", "f83", "fb", "fpm", "e4", "rx", "ft" ] }, "FortranLegacy": { "name": "FORTRAN Legacy", "line_comment": ["c", "C", "!", "*"], "quotes": [["\\\"", "\\\""], ["'", "'"]], "extensions": ["f", "for", "ftn", "f77", "pfo"] }, "FortranModern": { "name": "FORTRAN Modern", "line_comment": ["!"], "quotes": [["\\\"", "\\\""]], "extensions": ["f03", "f08", "f90", "f95", "fpp"] }, "FreeMarker": { "multi_line_comments": [["<#--", "-->"]], "extensions": ["ftl", "ftlh", "ftlx"] }, "FSharp": { "name": "F#", "line_comment": ["//"], "multi_line_comments": [["(*", "*)"]], "quotes": [["\\\"", "\\\""]], "verbatim_quotes": [["@\\\"", "\\\""]], "extensions": ["fs", "fsi", "fsx", "fsscript"] }, "Fstar": { "name": "F*", "quotes": [["\\\"", "\\\""]], "line_comment": ["//"], "multi_line_comments": [["(*", "*)"]], "extensions": ["fst", "fsti"] }, "Futhark": { "line_comment": ["--"], "extensions": ["fut"] }, "GDB": { "name": "GDB Script", "line_comment": ["#"], "extensions": ["gdb"] }, "GdScript": { "name": "GDScript", "line_comment": ["#"], "quotes": [ ["\\\"", "\\\""], ["'", "'"], ["\\\"\\\"\\\"", "\\\"\\\"\\\""] ], "extensions": ["gd"] }, "Gherkin": { "name": "Gherkin (Cucumber)", "line_comment": ["#"], "extensions": ["feature"] }, "Gleam": { "name": "Gleam", "line_comment": ["//", "///", "////"], "quotes": [["\\\"", "\\\""]], "extensions": ["gleam"] }, "GlimmerJs": { "name": "Glimmer JS", "line_comment": ["//"], "multi_line_comments": [["/*", "*/"], [""]], "quotes": [["\\\"", "\\\""], ["'", "'"], ["`", "`"]], "important_syntax": [""]], "quotes": [["\\\"", "\\\""], ["'", "'"], ["`", "`"]], "important_syntax": [""], ["{{/*", "*/}}"]], "quotes": [["\\\"", "\\\""], ["'", "'"]], "extensions": ["gohtml"] }, "Graphql": { "name": "GraphQL", "quotes": [["\\\"", "\\\""], ["\\\"\\\"\\\"", "\\\"\\\"\\\""]], "line_comment": ["#"], "extensions": ["gql", "graphql"] }, "Groovy": { "line_comment": ["//"], "multi_line_comments": [["/*", "*/"]], "quotes": [["\\\"", "\\\""]], "env": ["groovy"], "extensions": ["groovy", "grt", "gtpl", "gvy"] }, "Gwion": { "line_comment": ["#!"], "quotes": [["\\\"", "\\\""]], "extensions": ["gw"] }, "Haml": { "line_comment": ["-#"], "quotes": [["\\\"", "\\\""], ["'", "'"]], "extensions": ["haml"] }, "Hamlet": { "multi_line_comments": [[""]], "quotes": [["\\\"", "\\\""], ["'", "'"]], "extensions": ["hamlet"] }, "Happy": { "extensions": ["y", "ly"] }, "Handlebars": { "multi_line_comments": [[""], ["{{!", "}}"]], "quotes": [["\\\"", "\\\""], ["'", "'"]], "extensions": ["hbs", "handlebars"] }, "Haskell": { "nested": true, "line_comment": ["--"], "multi_line_comments": [["{-", "-}"]], "extensions": ["hs"] }, "Haxe": { "line_comment": ["//"], "multi_line_comments": [["/*", "*/"]], "quotes": [["\\\"", "\\\""], ["'", "'"]], "extensions": ["hx"] }, "Hcl": { "name": "HCL", "line_comment": ["#", "//"], "multi_line_comments": [["/*", "*/"]], "quotes": [["\\\"", "\\\""]], "extensions": ["hcl", "tf", "tfvars"] }, "Headache": { "line_comment": ["//"], "multi_line_comments": [["/*", "*/"]], "quotes": [["\\\"", "\\\""]], "extensions": ["ha"] }, "Hex": { "name": "HEX", "blank": true, "extensions": ["hex"] }, "Hex0": { "extensions": ["hex0"], "line_comment": ["#", ";"] }, "Hex1": { "extensions": ["hex1"], "line_comment": ["#", ";"] }, "Hex2": { "extensions": ["hex2"], "line_comment": ["#", ";"] }, "HiCad": { "name": "HICAD", "line_comment": ["REM", "rem"], "extensions": ["MAC", "mac"] }, "Hlsl": { "name": "HLSL", "line_comment": ["//"], "multi_line_comments": [["/*", "*/"]], "quotes": [["\\\"", "\\\""]], "extensions": ["hlsl", "fx", "fxsub"] }, "HolyC": { "line_comment": ["//"], "multi_line_comments": [["/*", "*/"]], "quotes": [["\\\"", "\\\""]], "extensions": ["HC", "hc","ZC","zc"] }, "Html": { "name": "HTML", "multi_line_comments": [[""]], "quotes": [["\\\"", "\\\""], ["'", "'"]], "kind": "html", "important_syntax": ["", "\\\\"] ], "quotes": [["''", "''"]], "extensions": ["thy"] }, "Jai": { "name": "JAI", "line_comment": ["//"], "multi_line_comments": [["/*", "*/"]], "quotes": [["\\\"", "\\\""]], "extensions": ["jai"], "nested": true }, "Janet": { "line_comment": ["#"], "quotes": [["\\\"", "\\\""], ["'", "'"], ["`", "`"]], "extensions": ["janet"] }, "Java": { "line_comment": ["//"], "multi_line_comments": [["/*", "*/"]], "quotes": [["\\\"", "\\\""]], "extensions": ["java"] }, "JavaScript": { "line_comment": ["//"], "multi_line_comments": [["/*", "*/"]], "quotes": [["\\\"", "\\\""], ["'", "'"], ["`", "`"]], "mime": [ "application/javascript", "application/ecmascript", "application/x-ecmascript", "application/x-javascript", "text/javascript", "text/ecmascript", "text/javascript1.0", "text/javascript1.1", "text/javascript1.2", "text/javascript1.3", "text/javascript1.4", "text/javascript1.5", "text/jscript", "text/livescript", "text/x-ecmascript", "text/x-javascript" ], "extensions": ["cjs", "js", "mjs"] }, "Jinja2": { "name": "Jinja2", "blank": true, "extensions": ["j2", "jinja"], "multi_line_comments": [["{#", "#}"]] }, "Jq": { "name": "jq", "line_comment": ["#"], "quotes": [["\\\"", "\\\""]], "extensions": ["jq"] }, "JSLT": { "name": "JSLT", "line_comment": ["//"], "quotes": [["\\\"", "\\\""]], "extensions": ["jslt"] }, "Json": { "name": "JSON", "blank": true, "mime": ["application/json", "application/manifest+json"], "extensions": ["json"] }, "Jsonnet": { "line_comment": ["//", "#"], "multi_line_comments": [["/*", "*/"]], "quotes": [["\\\"", "\\\""], ["'", "'"]], "extensions": ["jsonnet", "libsonnet"] }, "Jsx": { "name": "JSX", "line_comment": ["//"], "multi_line_comments": [["/*", "*/"]], "quotes": [["\\\"", "\\\""], ["'", "'"], ["`", "`"]], "extensions": ["jsx"] }, "Julia": { "line_comment": ["#"], "multi_line_comments": [["#=", "=#"]], "quotes": [["\\\"", "\\\""], ["\\\"\\\"\\\"", "\\\"\\\"\\\""]], "nested": true, "extensions": ["jl"] }, "Julius": { "line_comment": ["//"], "multi_line_comments": [["/*", "*/"]], "quotes": [["\\\"", "\\\""], ["'", "'"], ["`", "`"]], "extensions": ["julius"] }, "Jupyter": { "name": "Jupyter Notebooks", "extensions": ["ipynb"] }, "Just": { "shebangs": ["#!/usr/bin/env just --justfile"], "env": ["just"], "line_comment": ["#"], "extensions": ["just"], "filenames": ["justfile"] }, "K": { "name": "K", "nested": true, "line_comment": ["/"], "quotes": [["\\\"", "\\\""]], "extensions": ["k"] }, "KakouneScript": { "name": "Kakoune script", "line_comment": ["#"], "quotes": [["\\\"", "\\\""], ["'", "'"]], "extensions": ["kak"] }, "Kaem": { "name": "Kaem", "line_comment": ["#"], "extensions": ["kaem"] }, "Kotlin": { "line_comment": ["//"], "multi_line_comments": [["/*", "*/"]], "nested": true, "quotes": [["\\\"", "\\\""], ["\\\"\\\"\\\"", "\\\"\\\"\\\""]], "extensions": ["kt", "kts"] }, "Ksh": { "name": "Korn shell", "shebangs": ["#!/bin/ksh"], "line_comment": ["#"], "quotes": [["\\\"", "\\\""], ["'", "'"]], "env": ["ksh"], "extensions": ["ksh"] }, "Lalrpop": { "name": "LALRPOP", "line_comment": ["//"], "extensions": ["lalrpop"], "quotes": [["\\\"", "\\\""], ["#\\\"", "\\\"#"]], "verbatim_quotes": [["r##\\\"", "\\\"##"], ["r#\\\"", "\\\"#"]] }, "KvLanguage": { "name":"KV Language", "line_comment": ["# "], "doc_quotes": [["\\\"\\\"\\\"", "\\\"\\\"\\\""], ["'''", "'''"]], "quotes": [["\\\"", "\\\""], ["'", "'"]], "extensions": ["kv"] }, "Lean": { "line_comment": ["--"], "multi_line_comments": [["/-", "-/"]], "nested": true, "extensions": ["lean", "hlean"] }, "Hledger": { "name": "hledger", "line_comment": [";", "#"], "multi_line_comments": [["comment", "end comment"]], "nested": false, "extensions": ["hledger"] }, "Less": { "name": "LESS", "line_comment": ["//"], "multi_line_comments": [["/*", "*/"]], "extensions": ["less"], "quotes": [["\\\"", "\\\""], ["'", "'"]] }, "Lex": { "line_comment": ["//"], "multi_line_comments": [["/*", "*/"]], "extensions": ["l", "lex"] }, "Liquid": { "name": "Liquid", "quotes": [["\\\"", "\\\""], ["'", "'"]], "extensions": ["liquid"], "multi_line_comments": [[""], ["{% comment %}", "{% endcomment %}"]] }, "LinguaFranca": { "name": "Lingua Franca", "line_comment": ["//", "#"], "important_syntax": ["{="], "multi_line_comments": [["/*", "*/"]], "quotes": [["\\\"", "\\\""]], "nested": true, "extensions": ["lf"] }, "LinkerScript": { "name": "LD Script", "multi_line_comments": [["/*", "*/"]], "quotes": [["\\\"", "\\\""]], "extensions": ["ld", "lds"] }, "Lisp": { "name": "Common Lisp", "line_comment": [";"], "multi_line_comments": [["#|", "|#"]], "nested": true, "extensions": ["lisp", "lsp", "asd"] }, "LiveScript": { "line_comment": ["#"], "multi_line_comments": [["/*", "*/"]], "quotes": [["\\\"", "\\\""], ["'", "'"]], "extensions": ["ls"] }, "LLVM": { "line_comment": [";"], "quotes": [["\\\"", "\\\""], ["'", "'"]], "extensions": ["ll"] }, "Logtalk": { "line_comment": ["%"], "quotes": [["\\\"", "\\\""]], "multi_line_comments": [["/*", "*/"]], "extensions": ["lgt", "logtalk"] }, "LolCode": { "name": "LOLCODE", "line_comment": ["BTW"], "quotes": [["\\\"", "\\\""]], "multi_line_comments": [["OBTW", "TLDR"]], "extensions": ["lol"] }, "Lua": { "line_comment": ["--"], "multi_line_comments": [["--[[", "]]"]], "quotes": [["\\\"", "\\\""], ["'", "'"]], "extensions": ["lua", "luau"] }, "Lucius": { "line_comment": ["//"], "multi_line_comments": [["/*", "*/"]], "quotes": [["\\\"", "\\\""], ["'", "'"]], "extensions": ["lucius"] }, "M1Assembly": { "name": "M1 Assembly", "extensions": ["m1"], "line_comment": ["#", ";"], "quotes": [["\\\"", "\\\""]] }, "M4": { "extensions": ["m4"], "line_comment": ["#", "dnl"], "quotes": [["`", "'"]] }, "Madlang": { "extensions": ["mad"], "line_comment": ["#"], "multi_line_comments": [["{#", "#}"]] }, "Makefile": { "line_comment": ["#"], "extensions": ["makefile", "mak", "mk"], "filenames": ["gnumakefile", "makefile"] }, "Markdown": { "literate": true, "important_syntax": ["```"], "extensions": ["md", "markdown"] }, "Max": { "extensions": ["maxpat"] }, "Mdx": { "name": "MDX", "literate": true, "important_syntax": ["```"], "extensions": ["mdx"] }, "Menhir": { "nested": true, "quotes": [["\\\"", "\\\""]], "line_comment": ["//"], "multi_line_comments": [ ["(*", "*)"], ["/*", "*/"] ], "extensions": ["mll", "mly", "vy"] }, "Meson": { "line_comment": ["#"], "quotes": [["'", "'"], ["'''", "'''"]], "filenames": ["meson.build", "meson_options.txt"] }, "Metal": { "name": "Metal Shading Language", "line_comment": ["//"], "multi_line_comments": [["/*", "*/"]], "quotes": [["\\\"", "\\\""]], "extensions": ["metal"] }, "Mint": { "blank": true, "extensions": ["mint"] }, "Mlatu": { "line_comment": ["//"], "quotes": [["\\\"", "\\\""]], "extensions": ["mlt"] }, "Modelica": { "line_comment": ["//"], "multi_line_comments": [["/*", "*/"]], "quotes": [["\\\"", "\\\""]], "extensions": ["mo", "mos"] }, "ModuleDef": { "name": "Module-Definition", "extensions": ["def"], "line_comment": [";"] }, "Mojo": { "line_comment": ["#"], "doc_quotes": [["\\\"\\\"\\\"", "\\\"\\\"\\\""], ["'''", "'''"]], "quotes": [["\\\"", "\\\""], ["'", "'"]], "extensions": ["mojo", "🔥"] }, "MonkeyC": { "name": "Monkey C", "extensions": ["mc"], "line_comment": ["//"], "multi_line_comments": [["/*", "*/"]], "quotes": [["\\\"", "\\\""]] }, "MoonBit": { "line_comment": ["//"], "quotes": [["\\\"", "\\\""]], "extensions": ["mbt", "mbti"] }, "MoonScript": { "line_comment": ["--"], "quotes": [["\\\"", "\\\""], ["'", "'"]], "extensions": ["moon"] }, "MsBuild": { "name": "MSBuild", "multi_line_comments": [[""]], "quotes": [["\\\"", "\\\""], ["'", "'"]], "extensions": ["csproj", "vbproj", "fsproj", "props", "targets"] }, "Mustache": { "multi_line_comments": [["{{!", "}}"]], "quotes": [["\\\"", "\\\""], ["'", "'"]], "extensions": ["mustache"] }, "Nextflow": { "line_comment": ["//"], "multi_line_comments": [["/*", "*/"]], "quotes": [["\\\"", "\\\""]], "extensions": ["nextflow", "nf"] }, "Nim": { "line_comment": ["#"], "quotes": [["\\\"", "\\\""], ["\\\"\\\"\\\"", "\\\"\\\"\\\""]], "extensions": ["nim"] }, "Nix": { "multi_line_comments": [["/*", "*/"]], "quotes": [["\\\"", "\\\""]], "line_comment": ["#"], "extensions": ["nix"] }, "NotQuitePerl": { "name": "Not Quite Perl", "line_comment": ["#"], "multi_line_comments": [["=begin", "=end"]], "quotes": [["\\\"", "\\\""], ["'", "'"]], "extensions": ["nqp"] }, "NuGetConfig": { "name": "NuGet Config", "multi_line_comments": [[""]], "quotes": [["\\\"", "\\\""], ["'", "'"]], "filenames": ["nuget.config", "packages.config", "nugetdefaults.config"] }, "Nushell": { "line_comment": ["#"], "quotes": [ ["\\\"", "\\\""], ["'", "'"] ], "extensions": ["nu"] }, "ObjectiveC": { "name": "Objective-C", "line_comment": ["//"], "multi_line_comments": [["/*", "*/"]], "quotes": [["\\\"", "\\\""]], "extensions": ["m"] }, "ObjectiveCpp": { "name": "Objective-C++", "line_comment": ["//"], "multi_line_comments": [["/*", "*/"]], "quotes": [["\\\"", "\\\""]], "extensions": ["mm"] }, "OCaml": { "quotes": [["\\\"", "\\\""]], "multi_line_comments": [["(*", "*)"]], "extensions": ["ml", "mli", "re", "rei"] }, "Odin": { "extensions": ["odin"], "line_comment": ["//"], "multi_line_comments": [["/*", "*/"]], "quotes": [["\\\"", "\\\""], ["'", "'"]] }, "OpenScad": { "name": "OpenSCAD", "extensions": ["scad"], "line_comment": ["//"], "multi_line_comments": [["/*", "*/"]], "quotes": [["\\\"", "\\\""], ["'", "'"]] }, "OpenPolicyAgent": { "name": "Open Policy Agent", "line_comment": ["#"], "quotes": [["\\\"","\\\""], ["`", "`"]], "extensions": ["rego"] }, "OpenCL": { "name": "OpenCL", "multi_line_comments": [["/*", "*/"]], "extensions": ["cl", "ocl"] }, "OpenQasm": { "name": "OpenQASM", "line_comment": ["//"], "multi_line_comments": [["/*", "*/"]], "extensions": ["qasm"] }, "OpenType": { "name": "OpenType Feature File", "line_comment": ["#"], "extensions": ["fea"] }, "Org": { "line_comment": ["# "], "extensions": ["org"] }, "Oz": { "line_comment": ["%"], "quotes": [["\\\"", "\\\""]], "multi_line_comments": [["/*", "*/"]], "extensions": ["oz"] }, "PacmanMakepkg": { "name": "Pacman's makepkg", "line_comment": ["#"], "quotes": [["\\\"", "\\\""], ["'", "'"]], "filenames": ["pkgbuild"] }, "Pan": { "line_comment": ["#"], "quotes": [["\\\"", "\\\""], ["'", "'"]], "extensions": ["pan", "tpl"] }, "Pascal": { "nested": true, "line_comment": ["//"], "multi_line_comments": [["{", "}"], ["(*", "*)"]], "quotes": [["'", "'"]], "extensions": ["pas"] }, "Perl": { "shebangs": ["#!/usr/bin/perl"], "line_comment": ["#"], "multi_line_comments": [["=pod", "=cut"]], "quotes": [["\\\"", "\\\""], ["'", "'"]], "extensions": ["pl", "pm"] }, "Pest": { "line_comment": ["//"], "quotes": [["\\\"", "\\\""], ["'", "'"]], "extensions": ["pest"] }, "Phix": { "line_comment": ["--", "//", "#!"], "multi_line_comments": [["/*", "*/"], ["--/*", "--*/"]], "nested": true, "quotes": [["\\\"", "\\\""], ["'", "'"]], "verbatim_quotes": [["\\\"\\\"\\\"", "\\\"\\\"\\\""], ["`", "`"]], "extensions": ["e","exw"] }, "Php": { "name": "PHP", "line_comment": ["#", "//"], "multi_line_comments": [["/*", "*/"]], "quotes": [["\\\"", "\\\""], ["'", "'"]], "extensions": ["php"] }, "PlantUml": { "name": "PlantUML", "line_comment": ["'"], "multi_line_comments": [["/'", "'/"]], "quotes": [["\\\"", "\\\""]], "extensions": ["puml"] }, "Po": { "name": "PO File", "line_comment": ["#"], "extensions": ["po", "pot"] }, "Poke": { "multi_line_comments": [["/*", "*/"]], "extensions": ["pk"] }, "Polly": { "multi_line_comments": [[""]], "quotes": [["\\\"", "\\\""], ["'", "'"]], "extensions": ["polly"] }, "Pony": { "line_comment": ["//"], "multi_line_comments": [["/*", "*/"]], "quotes": [["\\\"", "\\\""]], "doc_quotes": [["\\\"\\\"\\\"", "\\\"\\\"\\\""]], "extensions": ["pony"] }, "PostCss": { "name": "PostCSS", "line_comment": ["//"], "multi_line_comments": [["/*", "*/"]], "quotes": [["\\\"", "\\\""], ["'", "'"]], "extensions": ["pcss", "sss"] }, "PowerShell": { "line_comment": ["#"], "multi_line_comments": [["<#", "#>"]], "quotes": [ ["\\\"", "\\\""], ["'", "'"], ["\\\"@", "@\\\""], ["@'", "'@"] ], "extensions": ["ps1", "psm1", "psd1", "ps1xml", "cdxml", "pssc", "psc1"] }, "PRACTICE": { "name": "Lauterbach PRACTICE Script", "line_comment": [";", "//"], "quotes": [["\\\"", "\\\""]], "extensions": ["cmm"] }, "Processing": { "line_comment": ["//"], "multi_line_comments": [["/*", "*/"]], "quotes": [["\\\"", "\\\""]], "extensions": ["pde"] }, "Prolog": { "line_comment": ["%"], "quotes": [["\\\"", "\\\""]], "multi_line_comments": [["/*", "*/"]], "extensions": ["p", "pro"] }, "PSL": { "name": "PSL Assertion", "line_comment": ["//"], "multi_line_comments": [["/*", "*/"]], "quotes": [["\\\"", "\\\""]], "extensions": ["psl"] }, "Protobuf": { "name": "Protocol Buffers", "line_comment": ["//"], "extensions": ["proto"] }, "Pug" : { "line_comment": ["//", "//-"], "quotes": [ ["#{\\\"", "\\\"}"], ["#{'", "'}"], ["#{`", "`}"] ], "extensions": ["pug"] }, "Puppet": { "line_comment": ["#"], "quotes": [["\\\"", "\\\""], ["'", "'"]], "extensions": ["pp"] }, "PureScript": { "nested": true, "line_comment": ["--"], "multi_line_comments": [["{-", "-}"]], "extensions": ["purs"] }, "Pyret": { "line_comment": ["#"], "multi_line_comments": [["#|", "|#"]], "quotes": [["\\\"", "\\\""], ["'", "'"]], "extensions": ["arr"], "nested": true }, "Python": { "line_comment": ["#"], "doc_quotes": [["\\\"\\\"\\\"", "\\\"\\\"\\\""], ["'''", "'''"]], "quotes": [["\\\"", "\\\""], ["'", "'"]], "env": ["python", "python2", "python3"], "mime": ["text/x-python"], "extensions": ["py", "pyw", "pyi"] }, "PRQL": { "line_comment": ["#"], "quotes": [["\\\"", "\\\""], ["'", "'"]], "mime": ["application/prql"], "extensions": ["prql"] }, "Q": { "name": "Q", "nested": true, "line_comment": ["/"], "quotes": [["\\\"", "\\\""]], "extensions": ["q"] }, "Qcl": { "name": "QCL", "line_comment": ["//"], "multi_line_comments": [["/*", "*/"]], "quotes": [["\\\"", "\\\""]], "extensions": ["qcl"] }, "Qml": { "name": "QML", "line_comment": ["//"], "multi_line_comments": [["/*", "*/"]], "quotes": [["\\\"", "\\\""], ["'", "'"]], "extensions": ["qml"] }, "R": { "line_comment": ["#"], "extensions": ["r"] }, "Racket": { "line_comment": [";"], "multi_line_comments": [["#|", "|#"]], "nested": true, "env": ["racket"], "extensions": ["rkt", "scrbl"] }, "Rakefile": { "line_comment": ["#"], "multi_line_comments": [["=begin", "=end"]], "quotes": [["\\\"", "\\\""], ["'", "'"]], "filenames": ["rakefile"], "extensions": ["rake"] }, "Raku": { "shebangs": ["#!/usr/bin/raku", "#!/usr/bin/perl6"], "line_comment": ["#"], "multi_line_comments": [ ["#`(", ")"], ["#`[", "]"], ["#`{", "}"], ["#`「", "」"] ], "nested": true, "quotes": [["\\\"", "\\\""] , ["'", "'"]], "verbatim_quotes": [["「", "」"]], "doc_quotes": [ ["#|{", "}"], ["#={", "}"], ["#|(", ")"], ["#=(", ")"], ["#|[", "]"], ["#=[", "]"], ["#|「", "」"], ["#=「", "」"], ["=begin pod", "=end pod"], ["=begin code", "=end code"], ["=begin head", "=end head"], ["=begin item", "=end item"], ["=begin table", "=end table"], ["=begin defn", "=end defn"], ["=begin para", "=end para"], ["=begin comment", "=end comment"], ["=begin data", "=end data"], ["=begin DESCRIPTION", "=end DESCRIPTION"], ["=begin SYNOPSIS", "=end SYNOPSIS"], ["=begin ", "=end "] ], "env": ["raku", "perl6"], "extensions": ["raku", "rakumod", "rakutest", "pm6", "pl6", "p6"] }, "Razor": { "line_comment": ["//"], "multi_line_comments": [[""], ["@*", "*@"], ["/*", "*/"]], "quotes": [["\\\"", "\\\""]], "verbatim_quotes": [["@\\\"", "\\\""]], "extensions": ["cshtml", "razor"] }, "Redscript": { "name": "Redscript", "line_comment": ["//", "///"], "multi_line_comments": [["/*", "*/"]], "quotes": [["\\\"", "\\\""]], "nested": true, "extensions": ["reds"] }, "Renpy": { "name": "Ren'Py", "line_comment": ["#"], "quotes": [["\\\"", "\\\""], ["'", "'"], ["`", "`"]], "extensions": ["rpy"] }, "ReScript": { "line_comment": ["//"], "multi_line_comments": [["/*", "*/"]], "quotes": [["\\\"", "\\\""]], "extensions": ["res", "resi"] }, "ReStructuredText": { "blank": true, "extensions": ["rst"] }, "Roc": { "line_comment": ["#"], "quotes": [ ["\\\"", "\\\""], ["'", "'"] ], "doc_quotes": [["\\\"\\\"\\\"", "\\\"\\\"\\\""]], "extensions": ["roc"] }, "RON": { "name": "Rusty Object Notation", "line_comment": ["//"], "multi_line_comments": [["/*", "*/"]], "quotes": [["\\\"", "\\\""]], "nested": true, "extensions": ["ron"] }, "RPMSpecfile": { "name": "RPM Specfile", "line_comment": ["#"], "extensions": ["spec"] }, "Ruby": { "line_comment": ["#"], "multi_line_comments": [["=begin", "=end"]], "quotes": [["\\\"", "\\\""], ["'", "'"]], "env": ["ruby"], "extensions": ["rb"] }, "RubyHtml": { "name": "Ruby HTML", "multi_line_comments": [[""]], "important_syntax": ["", "<'"]], "extensions": ["e"] }, "Spice": { "name": "Spice Netlist", "line_comment": ["*"], "extensions": ["ckt"] }, "Sql": { "name": "SQL", "line_comment": ["--"], "multi_line_comments": [["/*", "*/"]], "quotes": [["'", "'"]], "extensions": ["sql"] }, "Sqf": { "name": "SQF", "line_comment": ["//"], "multi_line_comments": [["/*", "*/"]], "quotes": [["\\\"", "\\\""], ["'", "'"]], "extensions": ["sqf"] }, "SRecode": { "name": "SRecode Template", "line_comment": [";;"], "extensions": ["srt"] }, "Stan": { "line_comment": ["//", "#"], "multi_line_comments": [["/*", "*/"]], "quotes": [["\\\"", "\\\""]], "extensions": ["stan"] }, "Stata": { "line_comment": ["//", "*"], "multi_line_comments": [["/*", "*/"]], "extensions": ["do"] }, "Stratego": { "name": "Stratego/XT", "line_comment": ["//"], "multi_line_comments": [["/*", "*/"]], "quotes": [["\\\"", "\\\""], ["$[", "]"], ["$<", ">"], ["${", "}"]], "extensions": ["str"] }, "Stylus": { "line_comment": ["//"], "multi_line_comments": [["/*", "*/"]], "quotes": [["\\\"", "\\\""], ["'", "'"]], "extensions": ["styl"] }, "Svelte": { "multi_line_comments": [[""]], "important_syntax": [""]], "quotes": [["\\\"", "\\\""], ["'", "'"]], "mime": ["image/svg+xml"], "extensions": ["svg"] }, "Swift": { "line_comment": ["//"], "multi_line_comments": [["/*", "*/"]], "quotes": [["\\\"", "\\\""]], "nested": true, "extensions": ["swift"] }, "Swig": { "name": "SWIG", "line_comment": ["//"], "multi_line_comments": [["/*", "*/"]], "quotes": [["\\\"", "\\\""]], "nested": true, "extensions": ["swg", "i"] }, "SystemVerilog": { "line_comment": ["//"], "multi_line_comments": [["/*", "*/"]], "quotes": [["\\\"", "\\\""]], "extensions": ["sv", "svh"] }, "Slint": { "line_comment": ["//"], "multi_line_comments": [["/*", "*/"]], "quotes": [["\\\"", "\\\""]], "extensions": ["slint"] }, "Tact": { "line_comment": ["//"], "multi_line_comments": [["/*", "*/"]], "quotes": [["\\\"", "\\\""]], "extensions": ["tact"] }, "Tcl": { "name": "TCL", "line_comment": ["#"], "quotes": [["\\\"", "\\\""], ["'", "'"]], "extensions": ["tcl"] }, "Tera": { "multi_line_comments": [[""], ["{#", "#}"]], "quotes": [["\\\"", "\\\""], ["'", "'"]], "extensions": ["tera"] }, "Templ": { "name": "Templ", "line_comment": ["//"], "multi_line_comments": [[""], ["/*", "*/"]], "quotes": [["\\\"", "\\\""], ["'", "'"], ["`", "`"]], "important_syntax": ["templ", "script", "css"], "extensions": ["templ", "tmpl"] }, "Tex": { "name": "TeX", "line_comment": ["%"], "extensions": ["tex", "sty"] }, "Text": { "name": "Plain Text", "literate": true, "mime": ["text/plain"], "extensions": ["text", "txt"] }, "Thrift": { "line_comment": ["#", "//"], "multi_line_comments": [["/*", "*/"]], "quotes": [["\\\"", "\\\""], ["'", "'"]], "extensions": ["thrift"] }, "Toml": { "name": "TOML", "line_comment": ["#"], "quotes": [ ["\\\"", "\\\""], ["'", "'"], ["\\\"\\\"\\\"", "\\\"\\\"\\\""], ["'''", "'''"] ], "extensions": ["toml"] }, "Tsx": { "name": "TSX", "line_comment": ["//"], "multi_line_comments": [["/*", "*/"]], "quotes": [["\\\"", "\\\""], ["'", "'"], ["`", "`"]], "extensions": ["tsx"] }, "Ttcn": { "name": "TTCN-3", "line_comment": ["//"], "multi_line_comments": [["/*", "*/"]], "quotes": [["\\\"", "\\\""]], "extensions": ["ttcn", "ttcn3", "ttcnpp"] }, "Twig": { "name": "Twig", "quotes": [["\\\"", "\\\""], ["'", "'"]], "extensions": ["twig"], "multi_line_comments": [[""], ["{#", "#}"]] }, "TypeScript": { "line_comment": ["//"], "multi_line_comments": [["/*", "*/"]], "quotes": [["\\\"", "\\\""], ["'", "'"], ["`", "`"]], "extensions": ["ts", "mts", "cts"] }, "Typst": { "nested": true, "line_comment": ["//"], "multi_line_comments": [["/*", "*/"]], "quotes": [["\\\"", "\\\""]], "extensions": ["typ"] }, "Uiua": { "line_comment": ["#"], "quotes": [["\\\"", "\\\""]], "extensions": ["ua"] }, "UMPL": { "line_comment": ["!"], "quotes": [["`", "`"]], "extensions": ["umpl"] }, "Unison": { "nested": true, "line_comment": ["--"], "multi_line_comments": [["{-", "-}"]], "quotes": [["\\\"", "\\\""]], "extensions": ["u"] }, "UnrealDeveloperMarkdown": { "name": "Unreal Markdown", "important_syntax": ["```"], "extensions": ["udn"] }, "UnrealPlugin": { "name": "Unreal Plugin", "blank": true, "extensions": ["uplugin"] }, "UnrealProject": { "name": "Unreal Project", "blank": true, "extensions": ["uproject"] }, "UnrealScript": { "name": "Unreal Script", "line_comment": ["//"], "multi_line_comments": [["/*", "*/"]], "quotes": [["\\\"", "\\\""]], "extensions": ["uc", "uci", "upkg"] }, "UnrealShader": { "name": "Unreal Shader", "line_comment": ["//"], "multi_line_comments": [["/*", "*/"]], "quotes": [["\\\"", "\\\""]], "extensions": ["usf"] }, "UnrealShaderHeader": { "name": "Unreal Shader Header", "line_comment": ["//"], "multi_line_comments": [["/*", "*/"]], "quotes": [["\\\"", "\\\""]], "extensions": ["ush"] }, "UrWeb": { "name": "Ur/Web", "quotes": [["\\\"", "\\\""]], "multi_line_comments": [["(*", "*)"]], "extensions": ["ur", "urs"] }, "UrWebProject": { "name": "Ur/Web Project", "line_comment": ["#"], "extensions": ["urp"] }, "Vala": { "line_comment": ["//"], "multi_line_comments": [["/*", "*/"]], "quotes": [["\\\"", "\\\""]], "extensions": ["vala"] }, "VB6": { "name": "VB6/VBA", "line_comment": ["'"], "extensions": ["frm", "bas", "cls", "ctl", "dsr"] }, "VBScript": { "name": "VBScript", "line_comment": ["'", "REM"], "extensions": ["vbs"] }, "Velocity": { "name": "Apache Velocity", "line_comment": ["##"], "multi_line_comments": [["#*", "*#"]], "extensions": ["vm"], "quotes": [["'", "'"], ["\\\"", "\\\""]] }, "Verilog": { "line_comment": ["//"], "multi_line_comments": [["/*", "*/"]], "quotes": [["\\\"", "\\\""]], "extensions": ["vg", "vh"] }, "VerilogArgsFile": { "name": "Verilog Args File", "extensions": ["irunargs", "xrunargs"] }, "Vhdl": { "name": "VHDL", "line_comment": ["--"], "multi_line_comments": [["/*", "*/"]], "extensions": ["vhd", "vhdl"] }, "Virgil": { "line_comment": ["//"], "multi_line_comments": [["/*", "*/"]], "quotes": [["\\\"", "\\\""]], "extensions": ["v3"] }, "VisualBasic": { "name": "Visual Basic", "quotes": [["\\\"", "\\\""]], "line_comment": ["'"], "extensions": ["vb"] }, "VisualStudioProject": { "name": "Visual Studio Project", "multi_line_comments": [[""]], "quotes": [["\\\"", "\\\""], ["'", "'"]], "extensions": ["vcproj", "vcxproj"] }, "VisualStudioSolution": { "name": "Visual Studio Solution", "blank": true, "extensions": ["sln"] }, "VimScript": { "name": "Vim Script", "line_comment": ["\\\""], "quotes": [["\\\"", "\\\""], ["'", "'"]], "extensions": ["vim"] }, "Vue": { "name": "Vue", "line_comment": ["//"], "multi_line_comments": [[""], ["/*", "*/"]], "quotes": [["\\\"", "\\\""], ["'", "'"], ["`", "`"]], "important_syntax": [""]], "quotes": [["\\\"", "\\\""], ["'", "'"]], "extensions": ["xaml"] }, "XcodeConfig": { "name": "Xcode Config", "line_comment": ["//"], "quotes": [["\\\"", "\\\""], ["'", "'"]], "extensions": ["xcconfig"] }, "Xml": { "name": "XML", "multi_line_comments": [[""]], "quotes": [["\\\"", "\\\""], ["'", "'"]], "extensions": ["xml"] }, "XSL": { "name": "XSL", "multi_line_comments": [[""]], "quotes": [["\\\"", "\\\""], ["'", "'"]], "extensions": ["xsl", "xslt"] }, "Xtend": { "line_comment": ["//"], "multi_line_comments": [["/*", "*/"]], "quotes": [["\\\"", "\\\""], ["'", "'"], ["'''", "'''"]], "extensions": ["xtend"] }, "Yaml": { "name": "YAML", "line_comment": ["#"], "quotes": [["\\\"", "\\\""], ["'", "'"]], "extensions": ["yaml", "yml"] }, "ZenCode": { "line_comment": ["//", "#"], "multi_line_comments": [["/*", "*/"]], "quotes": [["\\\"", "\\\""], ["'", "'"]], "verbatim_quotes": [["@\\\"", "\\\""], ["@'", "'"]], "extensions": ["zs"] }, "Zig": { "line_comment": ["//"], "quotes": [["\\\"", "\\\""]], "extensions": ["zig"] }, "Zokrates": { "name": "ZoKrates", "line_comment": ["//"], "multi_line_comments": [["/*", "*/"]], "extensions": ["zok"] }, "Zsh": { "shebangs": ["#!/bin/zsh"], "line_comment": ["#"], "quotes": [["\\\"", "\\\""], ["'", "'"]], "extensions": ["zsh"] }, "GdShader": { "name": "GDShader", "line_comment": ["//"], "multi_line_comments": [["/*", "*/"]], "extensions": ["gdshader"] } } } tokei-13.0.0/src/cli.rs000064400000000000000000000412671046102023000127360ustar 00000000000000use std::{process, str::FromStr}; use clap::{crate_description, value_parser, Arg, ArgAction, ArgMatches}; use colored::Colorize; use tokei::{Config, LanguageType, Sort}; use crate::{ cli_utils::{crate_version, parse_or_exit, NumberFormatStyle}, consts::{ BLANKS_COLUMN_WIDTH, CODE_COLUMN_WIDTH, COMMENTS_COLUMN_WIDTH, LANGUAGE_COLUMN_WIDTH, LINES_COLUMN_WIDTH, PATH_COLUMN_WIDTH, }, input::Format, }; /// Used for sorting languages. #[derive(Clone, Copy, Debug, Eq, Ord, PartialEq, PartialOrd)] pub enum Streaming { /// simple lines. Simple, /// Json outputs. Json, } impl std::str::FromStr for Streaming { type Err = String; fn from_str(s: &str) -> Result { Ok(match s.to_lowercase().as_ref() { "simple" => Streaming::Simple, "json" => Streaming::Json, s => return Err(format!("Unsupported streaming option: {}", s)), }) } } #[derive(Debug)] pub struct Cli { matches: ArgMatches, pub columns: Option, pub files: bool, pub hidden: bool, pub no_ignore: bool, pub no_ignore_parent: bool, pub no_ignore_dot: bool, pub no_ignore_vcs: bool, pub output: Option, pub streaming: Option, pub print_languages: bool, pub sort: Option, pub sort_reverse: bool, pub types: Option>, pub compact: bool, pub number_format: num_format::CustomFormat, } impl Cli { pub fn from_args() -> Self { let matches = clap::Command::new("tokei") .version(crate_version()) .author("Erin P. + Contributors") .styles(clap_cargo::style::CLAP_STYLING) .about(concat!( crate_description!(), "\n", "Support this project on GitHub Sponsors: https://github.com/sponsors/XAMPPRocky" )) .arg( Arg::new("columns") .long("columns") .short('c') .value_parser(value_parser!(usize)) .conflicts_with("output") .help( "Sets a strict column width of the output, only available for \ terminal output.", ), ) .arg( Arg::new("exclude") .long("exclude") .short('e') .action(ArgAction::Append) .help("Ignore all files & directories matching the pattern."), ) .arg( Arg::new("files") .long("files") .short('f') .action(ArgAction::SetTrue) .help("Will print out statistics on individual files."), ) .arg( Arg::new("file_input") .long("input") .short('i') .help( "Gives statistics from a previous tokei run. Can be given a file path, \ or \"stdin\" to read from stdin.", ), ) .arg( Arg::new("hidden") .long("hidden") .action(ArgAction::SetTrue) .help("Count hidden files."), ) .arg( Arg::new("input") .num_args(1..) .conflicts_with("languages") .help("The path(s) to the file or directory to be counted. (default current directory)"), ) .arg( Arg::new("languages") .long("languages") .short('l') .action(ArgAction::SetTrue) .conflicts_with("input") .help("Prints out supported languages and their extensions."), ) .arg(Arg::new("no_ignore") .long("no-ignore") .action(ArgAction::SetTrue) .help( "\ Don't respect ignore files (.gitignore, .ignore, etc.). This implies \ --no-ignore-parent, --no-ignore-dot, and --no-ignore-vcs.\ ", )) .arg(Arg::new("no_ignore_parent") .long("no-ignore-parent") .action(ArgAction::SetTrue) .help( "\ Don't respect ignore files (.gitignore, .ignore, etc.) in parent \ directories.\ ", )) .arg(Arg::new("no_ignore_dot") .long("no-ignore-dot") .action(ArgAction::SetTrue) .help( "\ Don't respect .ignore and .tokeignore files, including those in \ parent directories.\ ", )) .arg(Arg::new("no_ignore_vcs") .long("no-ignore-vcs") .action(ArgAction::SetTrue) .help( "\ Don't respect VCS ignore files (.gitignore, .hgignore, etc.) including \ those in parent directories.\ ", )) .arg( Arg::new("output") .long("output") .short('o') .value_parser(Format::from_str) .help( "Outputs Tokei in a specific format. Compile with additional features for \ more format support.", ), ) .arg( Arg::new("streaming") .long("streaming") .value_parser(["simple", "json"]) .ignore_case(true) .help( "prints the (language, path, lines, blanks, code, comments) records as \ simple lines or as Json for batch processing", ), ) .arg( Arg::new("sort") .long("sort") .short('s') .value_parser(["files", "lines", "blanks", "code", "comments"]) .ignore_case(true) .conflicts_with("rsort") .help("Sort languages based on column"), ) .arg( Arg::new("rsort") .long("rsort") .short('r') .value_parser(["files", "lines", "blanks", "code", "comments"]) .ignore_case(true) .conflicts_with("sort") .help("Reverse sort languages based on column"), ) .arg( Arg::new("types") .long("types") .short('t') .action(ArgAction::Append) .help( "Filters output by language type, separated by a comma. i.e. \ -t=Rust,Markdown", ), ) .arg( Arg::new("compact") .long("compact") .short('C') .action(ArgAction::SetTrue) .help("Do not print statistics about embedded languages."), ) .arg( Arg::new("num_format_style") .long("num-format") .short('n') .value_parser(["commas", "dots", "plain", "underscores"]) .conflicts_with("output") .help( "Format of printed numbers, i.e., plain (1234, default), \ commas (1,234), dots (1.234), or underscores (1_234). Cannot be \ used with --output.", ), ) .arg( Arg::new("verbose") .long("verbose") .short('v') .action(ArgAction::Count) .help( "Set log output level: 1: to show unknown file extensions, 2: reserved for future debugging, 3: enable file level trace. Not recommended on multiple files", ), ) .get_matches(); let columns = matches.get_one::("columns").cloned(); let files = matches.get_flag("files"); let hidden = matches.get_flag("hidden"); let no_ignore = matches.get_flag("no_ignore"); let no_ignore_parent = matches.get_flag("no_ignore_parent"); let no_ignore_dot = matches.get_flag("no_ignore_dot"); let no_ignore_vcs = matches.get_flag("no_ignore_vcs"); let print_languages = matches.get_flag("languages"); let verbose = matches.get_count("verbose") as u64; let compact = matches.get_flag("compact"); let types = matches.get_many("types").map(|e| { e.flat_map(|x: &String| { x.split(',') .map(str::parse::) .filter_map(Result::ok) .collect::>() }) .collect() }); let num_format_style: NumberFormatStyle = matches .get_one::("num_format_style") .cloned() .unwrap_or_default(); let number_format = match num_format_style.get_format() { Ok(format) => format, Err(e) => { eprintln!("Error:\n{}", e); process::exit(1); } }; // Sorting category should be restricted by clap but parse before we do // work just in case. let (sort, sort_reverse) = if let Some(sort) = matches.get_one::("sort") { (Some(sort.clone()), false) } else { let sort = matches.get_one::("rsort"); (sort.cloned(), sort.is_some()) }; let sort = sort.map(|x| match Sort::from_str(&x) { Ok(sort) => sort, Err(e) => { eprintln!("Error:\n{}", e); process::exit(1); } }); // Format category is overly accepting by clap (so the user knows what // is supported) but this will fail if support is not compiled in and // give a useful error to the user. let output = matches.get_one("output").cloned(); let streaming = matches .get_one("streaming") .cloned() .map(parse_or_exit::); crate::cli_utils::setup_logger(verbose); let cli = Cli { matches, columns, files, hidden, no_ignore, no_ignore_parent, no_ignore_dot, no_ignore_vcs, output, streaming, print_languages, sort, sort_reverse, types, compact, number_format, }; debug!("CLI Config: {:#?}", cli); cli } pub fn file_input(&self) -> Option<&str> { self.matches.get_one("file_input").cloned() } pub fn ignored_directories(&self) -> Vec<&str> { let mut ignored_directories: Vec<&str> = Vec::new(); if let Some(user_ignored) = self.matches.get_many::("exclude") { ignored_directories.extend(user_ignored.map(|x| x.as_str())); } ignored_directories } pub fn input(&self) -> Vec<&str> { match self.matches.get_many::("input") { Some(vs) => vs.map(|x| x.as_str()).collect(), None => vec!["."], } } pub fn print_supported_languages() -> Result<(), Box> { use table_formatter::table::*; use table_formatter::{cell, table}; let term_width = term_size::dimensions().map(|(w, _)| w).unwrap_or(75) - 8; let (lang_w, suffix_w) = if term_width <= 80 { (term_width / 2, term_width / 2) } else { (40, term_width - 40) }; let header = vec![ cell!( "Language", align = Align::Left, padding = Padding::NONE, width = Some(lang_w) ) .with_formatter(vec![table_formatter::table::FormatterFunc::Normal( Colorize::bold, )]), cell!( "Extensions", align = Align::Left, padding = Padding::new(3, 0), width = Some(suffix_w) ) .with_formatter(vec![table_formatter::table::FormatterFunc::Normal( Colorize::bold, )]), ]; let content = LanguageType::list() .iter() .map(|(key, ext)| { vec![ // table::TableCell::new(table::Cell::TextCell(key.name().to_string())) // .with_width(lang_w), cell!(key.name()).with_width(Some(lang_w)), cell!( if matches!(key, LanguageType::Emojicode) { ext.join(", ") + "\u{200b}" } else if ext.is_empty() { "".to_string() } else { ext.join(", ") }, align = Align::Left, padding = Padding::new(3, 0), width = Some(suffix_w) ), ] }) .collect(); let t = table!(header - content with Border::ALL); let mut render_result = Vec::new(); t.render(&mut render_result)?; println!("{}", String::from_utf8(render_result)?); Ok(()) } /// Overrides the shared options (See `tokei::Config` for option /// descriptions) between the CLI and the config files. CLI flags have /// higher precedence than options present in config files. /// /// #### Shared options /// * `hidden` /// * `no_ignore` /// * `no_ignore_parent` /// * `no_ignore_dot` /// * `no_ignore_vcs` /// * `types` pub fn override_config(&mut self, mut config: Config) -> Config { config.hidden = if self.hidden { Some(true) } else { config.hidden }; config.no_ignore = if self.no_ignore { Some(true) } else { config.no_ignore }; config.no_ignore_parent = if self.no_ignore_parent { Some(true) } else { config.no_ignore_parent }; config.no_ignore_dot = if self.no_ignore_dot { Some(true) } else { config.no_ignore_dot }; config.no_ignore_vcs = if self.no_ignore_vcs { Some(true) } else { config.no_ignore_vcs }; config.for_each_fn = match self.streaming { Some(Streaming::Json) => Some(|l: LanguageType, e| { println!("{}", serde_json::json!({"language": l.name(), "stats": e})); }), Some(Streaming::Simple) => Some(|l: LanguageType, e| { println!( "{:>LANGUAGE_COLUMN_WIDTH$} {:LINES_COLUMN_WIDTH$} {:>CODE_COLUMN_WIDTH$} {:>COMMENTS_COLUMN_WIDTH$} {:>BLANKS_COLUMN_WIDTH$}", l.name(), e.name.to_string_lossy().to_string(), e.stats.lines(), e.stats.code, e.stats.comments, e.stats.blanks ); }), _ => None, }; config.types = self.types.take().or(config.types); config } pub fn print_input_parse_failure(input_filename: &str) { eprintln!("Error:\n Failed to parse input file: {}", input_filename); let not_supported = Format::not_supported(); if !not_supported.is_empty() { eprintln!( " This version of tokei was compiled without serialization support for the following formats: {not_supported} You may want to install any comma separated combination of {all:?}: cargo install tokei --features {all:?} Or use the 'all' feature: cargo install tokei --features all \n", not_supported = not_supported.join(", "), // no space after comma to ease copypaste all = self::Format::all_feature_names().join(",") ); } } } tokei-13.0.0/src/cli_utils.rs000064400000000000000000000371721046102023000141560ustar 00000000000000use std::{ borrow::Cow, fmt, io::{self, Write}, process, str::FromStr, }; use clap::crate_version; use colored::Colorize; use num_format::ToFormattedString; use crate::input::Format; use tokei::{find_char_boundary, CodeStats, Language, LanguageType, Report}; use crate::consts::{ BLANKS_COLUMN_WIDTH, CODE_COLUMN_WIDTH, COMMENTS_COLUMN_WIDTH, FILES_COLUMN_WIDTH, LINES_COLUMN_WIDTH, }; const NO_LANG_HEADER_ROW_LEN: usize = 69; const NO_LANG_ROW_LEN: usize = 63; const NO_LANG_ROW_LEN_NO_SPACES: usize = 56; const IDENT_INACCURATE: &str = "(!)"; pub fn crate_version() -> String { if Format::supported().is_empty() { format!( "{} compiled without serialization formats.", crate_version!() ) } else { format!( "{} compiled with serialization support: {}", crate_version!(), Format::supported().join(", ") ) } } pub fn setup_logger(verbose_option: u64) { use log::LevelFilter; let mut builder = env_logger::Builder::new(); let filter_level = match verbose_option { 1 => LevelFilter::Warn, 2 => LevelFilter::Debug, 3 => LevelFilter::Trace, _ => LevelFilter::Error, }; builder.filter(None, filter_level); builder.init(); } pub fn parse_or_exit(s: &str) -> T where T: FromStr, T::Err: fmt::Display, { T::from_str(s).unwrap_or_else(|e| { eprintln!("Error:\n{}", e); process::exit(1); }) } #[non_exhaustive] #[derive(Debug, Copy, Clone)] pub enum NumberFormatStyle { // 1234 (Default) Plain, // 1,234 Commas, // 1.234 Dots, // 1_234 Underscores, } impl Default for NumberFormatStyle { fn default() -> Self { Self::Plain } } impl FromStr for NumberFormatStyle { type Err = String; fn from_str(s: &str) -> Result { match s { "plain" => Ok(Self::Plain), "commas" => Ok(Self::Commas), "dots" => Ok(Self::Dots), "underscores" => Ok(Self::Underscores), _ => Err(format!( "Expected 'plain', 'commas', 'underscores', or 'dots' for num-format, but got '{}'", s, )), } } } impl NumberFormatStyle { fn separator(self) -> &'static str { match self { Self::Plain => "", Self::Commas => ",", Self::Dots => ".", Self::Underscores => "_", } } pub fn get_format(self) -> Result { num_format::CustomFormat::builder() .grouping(num_format::Grouping::Standard) .separator(self.separator()) .build() } } pub struct Printer { writer: W, columns: usize, path_length: usize, row: String, subrow: String, list_files: bool, number_format: num_format::CustomFormat, } impl Printer { pub fn new( columns: usize, list_files: bool, writer: W, number_format: num_format::CustomFormat, ) -> Self { Self { columns, list_files, path_length: columns - NO_LANG_ROW_LEN_NO_SPACES, writer, row: "━".repeat(columns), subrow: "─".repeat(columns), number_format, } } } impl Printer { pub fn print_header(&mut self) -> io::Result<()> { self.print_row()?; let files_column_width: usize = FILES_COLUMN_WIDTH + 6; writeln!( self.writer, " {:<6$} {:>files_column_width$} {:>LINES_COLUMN_WIDTH$} {:>CODE_COLUMN_WIDTH$} {:>COMMENTS_COLUMN_WIDTH$} {:>BLANKS_COLUMN_WIDTH$}", "Language".bold().blue(), "Files".bold().blue(), "Lines".bold().blue(), "Code".bold().blue(), "Comments".bold().blue(), "Blanks".bold().blue(), self.columns - NO_LANG_HEADER_ROW_LEN )?; self.print_row() } pub fn print_inaccuracy_warning(&mut self) -> io::Result<()> { writeln!( self.writer, "Note: results can be inaccurate for languages marked with '{}'", IDENT_INACCURATE ) } pub fn print_language(&mut self, language: &Language, name: &str) -> io::Result<()> where W: Write, { self.print_language_name(language.inaccurate, name, None)?; write!(self.writer, " ")?; writeln!( self.writer, "{:>FILES_COLUMN_WIDTH$} {:>LINES_COLUMN_WIDTH$} {:>CODE_COLUMN_WIDTH$} {:>COMMENTS_COLUMN_WIDTH$} {:>BLANKS_COLUMN_WIDTH$}", language .reports .len() .to_formatted_string(&self.number_format), language.lines().to_formatted_string(&self.number_format), language.code.to_formatted_string(&self.number_format), language.comments.to_formatted_string(&self.number_format), language.blanks.to_formatted_string(&self.number_format), ) } fn print_language_in_print_total(&mut self, language: &Language) -> io::Result<()> where W: Write, { self.print_language_name(language.inaccurate, "Total", None)?; write!(self.writer, " ")?; writeln!( self.writer, "{:>FILES_COLUMN_WIDTH$} {:>LINES_COLUMN_WIDTH$} {:>CODE_COLUMN_WIDTH$} {:>COMMENTS_COLUMN_WIDTH$} {:>BLANKS_COLUMN_WIDTH$}", language .children .values() .map(Vec::len) .sum::() .to_formatted_string(&self.number_format) .blue(), language .lines() .to_formatted_string(&self.number_format) .blue(), language .code .to_formatted_string(&self.number_format) .blue(), language .comments .to_formatted_string(&self.number_format) .blue(), language .blanks .to_formatted_string(&self.number_format) .blue(), ) } pub fn print_language_name( &mut self, inaccurate: bool, name: &str, prefix: Option<&str>, ) -> io::Result<()> { let mut lang_section_len = self.columns - NO_LANG_ROW_LEN - prefix.map_or(0, str::len); if inaccurate { lang_section_len -= IDENT_INACCURATE.len(); } if let Some(prefix) = prefix { write!(self.writer, "{}", prefix)?; } // truncate and replace the last char with a `|` if the name is too long if lang_section_len < name.len() { write!(self.writer, " {:.len$}", name, len = lang_section_len - 1)?; write!(self.writer, "|")?; } else { write!( self.writer, " {: io::Result<()> { self.print_language_name(false, &language_type.to_string(), Some(" |-"))?; let mut code = 0; let mut comments = 0; let mut blanks = 0; for stats in stats.iter().map(tokei::CodeStats::summarise) { code += stats.code; comments += stats.comments; blanks += stats.blanks; } if stats.is_empty() { Ok(()) } else { writeln!( self.writer, " {:>FILES_COLUMN_WIDTH$} {:>LINES_COLUMN_WIDTH$} {:>CODE_COLUMN_WIDTH$} {:>COMMENTS_COLUMN_WIDTH$} {:>BLANKS_COLUMN_WIDTH$}", stats.len().to_formatted_string(&self.number_format), (code + comments + blanks).to_formatted_string(&self.number_format), code.to_formatted_string(&self.number_format), comments.to_formatted_string(&self.number_format), blanks.to_formatted_string(&self.number_format), ) } } fn print_language_total(&mut self, parent: &Language) -> io::Result<()> { for (language, reports) in &parent.children { self.print_code_stats( *language, &reports .iter() .map(|r| r.stats.summarise()) .collect::>(), )?; } let mut subtotal = tokei::Report::new("(Total)".into()); let summary = parent.summarise(); subtotal.stats.code += summary.code; subtotal.stats.comments += summary.comments; subtotal.stats.blanks += summary.blanks; self.print_report_with_name(&subtotal)?; Ok(()) } pub fn print_results<'a, I>( &mut self, languages: I, compact: bool, is_sorted: bool, ) -> io::Result<()> where I: Iterator, { let (a, b): (Vec<_>, Vec<_>) = languages .filter(|(_, v)| !v.is_empty()) .partition(|(_, l)| compact || l.children.is_empty()); let mut first = true; for languages in &[&a, &b] { for &(name, language) in *languages { let has_children = !(compact || language.children.is_empty()); if first { first = false; } else if has_children || self.list_files { self.print_subrow()?; } self.print_language(language, name.name())?; if has_children { self.print_language_total(language)?; } if self.list_files { self.print_subrow()?; let mut reports: Vec<&Report> = language.reports.iter().collect(); if !is_sorted { reports.sort_by(|&a, &b| a.name.cmp(&b.name)); } if compact { for &report in &reports { writeln!(self.writer, "{:1$}", report, self.path_length)?; } } else { let (a, b): (Vec<&Report>, Vec<&Report>) = reports.iter().partition(|&r| r.stats.blobs.is_empty()); for reports in &[&a, &b] { let mut first = true; for report in reports.iter() { if report.stats.blobs.is_empty() { writeln!(self.writer, "{:1$}", report, self.path_length)?; } else { if first && a.is_empty() { writeln!(self.writer, " {}", report.name.display())?; first = false; } else { writeln!( self.writer, "-- {} {}", report.name.display(), "-".repeat( self.columns - 4 - report.name.display().to_string().len() ) )?; } let mut new_report = (*report).clone(); new_report.name = name.to_string().into(); writeln!( self.writer, " |-{:1$}", new_report, self.path_length - 3 )?; self.print_report_total(report, language.inaccurate)?; } } } } } } } Ok(()) } fn print_row(&mut self) -> io::Result<()> { writeln!(self.writer, "{}", self.row) } fn print_subrow(&mut self) -> io::Result<()> { writeln!(self.writer, "{}", self.subrow.dimmed()) } fn print_report( &mut self, language_type: LanguageType, stats: &CodeStats, inaccurate: bool, ) -> io::Result<()> { self.print_language_name(inaccurate, &language_type.to_string(), Some(" |-"))?; writeln!( self.writer, " {:>FILES_COLUMN_WIDTH$} {:>LINES_COLUMN_WIDTH$} {:>CODE_COLUMN_WIDTH$} {:>COMMENTS_COLUMN_WIDTH$} {:>BLANKS_COLUMN_WIDTH$}", " ", stats.lines().to_formatted_string(&self.number_format), stats.code.to_formatted_string(&self.number_format), stats.comments.to_formatted_string(&self.number_format), stats.blanks.to_formatted_string(&self.number_format), ) } fn print_report_total(&mut self, report: &Report, inaccurate: bool) -> io::Result<()> { if report.stats.blobs.is_empty() { return Ok(()); } let mut subtotal = tokei::Report::new("|- (Total)".into()); subtotal.stats.code += report.stats.code; subtotal.stats.comments += report.stats.comments; subtotal.stats.blanks += report.stats.blanks; for (language_type, stats) in &report.stats.blobs { self.print_report(*language_type, stats, inaccurate)?; subtotal.stats += stats.summarise(); } self.print_report_with_name(report)?; Ok(()) } fn print_report_with_name(&mut self, report: &Report) -> io::Result<()> { let name = report.name.to_string_lossy(); let name_length = name.len(); if name_length > self.path_length { let mut formatted = String::from("|"); // Add 1 to the index to account for the '|' we add to the output string let from = find_char_boundary(&name, name_length + 1 - self.path_length); formatted.push_str(&name[from..]); } self.print_report_total_formatted(name, self.path_length, report)?; Ok(()) } fn print_report_total_formatted( &mut self, name: Cow<'_, str>, max_len: usize, report: &Report, ) -> io::Result<()> { let lines_column_width: usize = FILES_COLUMN_WIDTH + 6; writeln!( self.writer, " {: lines_column_width$} {:>CODE_COLUMN_WIDTH$} {:>COMMENTS_COLUMN_WIDTH$} {:>BLANKS_COLUMN_WIDTH$}", name, report .stats .lines() .to_formatted_string(&self.number_format), report.stats.code.to_formatted_string(&self.number_format), report .stats .comments .to_formatted_string(&self.number_format), report.stats.blanks.to_formatted_string(&self.number_format), max = max_len ) } pub fn print_total(&mut self, languages: &tokei::Languages) -> io::Result<()> { let total = languages.total(); self.print_row()?; self.print_language_in_print_total(&total)?; self.print_row() } } tokei-13.0.0/src/config.rs000064400000000000000000000161221046102023000134240ustar 00000000000000use std::{env, fs, path::PathBuf}; use etcetera::BaseStrategy; use crate::language::LanguageType; use crate::sort::Sort; use crate::stats::Report; /// A configuration struct for how [`Languages::get_statistics`] searches and /// counts languages. /// /// ``` /// use tokei::Config; /// /// let config = Config { /// treat_doc_strings_as_comments: Some(true), /// ..Config::default() /// }; /// ``` /// /// [`Languages::get_statistics`]: struct.Languages.html#method.get_statistics #[derive(Debug, Default, Deserialize)] pub struct Config { /// Width of columns to be printed to the terminal. _This option is ignored /// in the library._ *Default:* Auto detected width of the terminal. pub columns: Option, /// Count hidden files and directories. *Default:* `false`. pub hidden: Option, /// Don't respect ignore files (.gitignore, .ignore, etc.). This implies --no-ignore-parent, /// --no-ignore-dot, and --no-ignore-vcs. *Default:* `false`. pub no_ignore: Option, /// Don't respect ignore files (.gitignore, .ignore, etc.) in parent directories. /// *Default:* `false`. pub no_ignore_parent: Option, /// Don't respect .ignore and .tokeignore files, including those in parent directories. /// *Default:* `false`. pub no_ignore_dot: Option, /// Don't respect VCS ignore files (.gitignore, .hgignore, etc.), including those in /// parent directories. *Default:* `false`. pub no_ignore_vcs: Option, /// Whether to treat doc strings in languages as comments. *Default:* /// `false`. pub treat_doc_strings_as_comments: Option, /// Sort languages. *Default:* `None`. pub sort: Option, /// Filters languages searched to just those provided. E.g. A directory /// containing `C`, `Cpp`, and `Rust` with a `Config.types` of `[Cpp, Rust]` /// will count only `Cpp` and `Rust`. *Default:* `None`. pub types: Option>, // /// A map of individual language configuration. // pub languages: Option>, /// Whether to output only the paths for downstream batch processing /// *Default:* false #[serde(skip)] /// Adds a closure for each function, e.g., print the result pub for_each_fn: Option, } impl Config { /// Constructs a new `Config` from either `$base/tokei.toml` or /// `$base/.tokeirc`. `tokei.toml` takes precedence over `.tokeirc` /// as the latter is a hidden file on Unix and not an idiomatic /// filename on Windows. fn get_config(base: PathBuf) -> Option { fs::read_to_string(base.join("tokei.toml")) .ok() .or_else(|| fs::read_to_string(base.join(".tokeirc")).ok()) .and_then(|s| toml::from_str(&s).ok()) } /// Creates a `Config` from three configuration files if they are available. /// Files can have two different names `tokei.toml` and `.tokeirc`. /// Firstly it will attempt to find a config in the configuration directory /// (see below), secondly from the home directory, `$HOME/`, /// and thirdly from the current directory, `./`. /// The current directory's configuration will take priority over the configuration /// directory. /// /// |Platform | Value | Example | /// | ------- | ------------------------------------- | ------------------------------ | /// | Linux | `$XDG_CONFIG_HOME` or `$HOME`/.config | /home/alice/.config | /// | macOS | `$XDG_CONFIG_HOME` or `$HOME`/.config | /Users/alice/.config | /// | Windows | `{FOLDERID_RoamingAppData}` | C:\Users\Alice\AppData\Roaming | /// /// # Example /// ```toml /// columns = 80 /// types = ["Python"] /// treat_doc_strings_as_comments = true // /// // /// [[languages.Python]] // /// extensions = ["py3"] /// ``` pub fn from_config_files() -> Self { let conf_dir = etcetera::choose_base_strategy() .ok() .map(|basedirs| basedirs.config_dir()) .and_then(Self::get_config) .unwrap_or_default(); let home_dir = etcetera::home_dir() .ok() .and_then(Self::get_config) .unwrap_or_default(); let current_dir = env::current_dir() .ok() .and_then(Self::get_config) .unwrap_or_default(); #[allow(clippy::or_fun_call)] Config { columns: current_dir .columns .or(home_dir.columns.or(conf_dir.columns)), hidden: current_dir.hidden.or(home_dir.hidden.or(conf_dir.hidden)), //languages: current_dir.languages.or(conf_dir.languages), treat_doc_strings_as_comments: current_dir.treat_doc_strings_as_comments.or(home_dir .treat_doc_strings_as_comments .or(conf_dir.treat_doc_strings_as_comments)), sort: current_dir.sort.or(home_dir.sort.or(conf_dir.sort)), types: current_dir.types.or(home_dir.types.or(conf_dir.types)), for_each_fn: current_dir .for_each_fn .or(home_dir.for_each_fn.or(conf_dir.for_each_fn)), no_ignore: current_dir .no_ignore .or(home_dir.no_ignore.or(conf_dir.no_ignore)), no_ignore_parent: current_dir .no_ignore_parent .or(home_dir.no_ignore_parent.or(conf_dir.no_ignore_parent)), no_ignore_dot: current_dir .no_ignore_dot .or(home_dir.no_ignore_dot.or(conf_dir.no_ignore_dot)), no_ignore_vcs: current_dir .no_ignore_vcs .or(home_dir.no_ignore_vcs.or(conf_dir.no_ignore_vcs)), } } } /* /// Configuration for an individual [`LanguageType`]. /// /// ``` /// use std::collections::HashMap; /// use tokei::{Config, LanguageConfig, LanguageType}; /// /// let config = Config { /// languages: { /// let cpp_conf = LanguageConfig { /// extensions: vec![String::from("c")], /// }; /// /// let mut languages_config = HashMap::new(); /// languages_config.insert(LanguageType::Cpp, cpp_conf); /// /// Some(languages_config) /// }, /// /// ..Config::default() /// }; /// /// ``` /// /// [`LanguageType`]: enum.LanguageType.html #[derive(Debug, Default, Deserialize)] pub struct LanguageConfig { /// Additional extensions for a language. Any extensions that overlap with /// already defined extensions from `tokei` will be ignored. pub extensions: Vec, } impl LanguageConfig { /// Creates a new empty configuration. By default this will not change /// anything from the default. pub fn new() -> Self { Self::default() } /// Accepts a `Vec` representing additional extensions for a /// language. Any extensions that overlap with already defined extensions /// from `tokei` will be ignored. pub fn extensions(&mut self, extensions: Vec) { self.extensions = extensions; } } */ tokei-13.0.0/src/consts.rs000064400000000000000000000011421046102023000134640ustar 00000000000000// Set of common pub consts. /// Fallback row length pub const FALLBACK_ROW_LEN: usize = 81; // Column widths used for console printing. /// Language column width pub const LANGUAGE_COLUMN_WIDTH: usize = 10; /// Path column width pub const PATH_COLUMN_WIDTH: usize = 80; /// Files column width pub const FILES_COLUMN_WIDTH: usize = 8; /// Lines column width pub const LINES_COLUMN_WIDTH: usize = 12; /// Code column width pub const CODE_COLUMN_WIDTH: usize = 12; /// Comments column width pub const COMMENTS_COLUMN_WIDTH: usize = 12; /// Blanks column width pub const BLANKS_COLUMN_WIDTH: usize = 12; tokei-13.0.0/src/input.rs000064400000000000000000000157361046102023000133300ustar 00000000000000use serde::{Deserialize, Serialize}; use std::{collections::BTreeMap, error::Error, str::FromStr}; use tokei::{Language, LanguageType, Languages}; type LanguageMap = BTreeMap; #[derive(Deserialize, Serialize, Debug)] struct Output { #[serde(flatten)] languages: LanguageMap, #[serde(rename = "Total")] totals: Language, } macro_rules! supported_formats { ($( ($name:ident, $feature:expr, $variant:ident [$($krate:ident),+]) => $parse_kode:expr, $print_kode:expr, )+) => ( $( // for each format $( // for each required krate #[cfg(feature = $feature)] extern crate $krate; )+ )+ /// Supported serialization formats. /// /// To enable all formats compile with the `all` feature. #[cfg_attr(test, derive(strum_macros::EnumIter))] #[derive(Debug, Clone)] pub enum Format { Json, $( #[cfg(feature = $feature)] $variant ),+ // TODO: Allow adding format at runtime when used as a lib? } impl Format { pub fn supported() -> &'static [&'static str] { &[ "json", $( #[cfg(feature = $feature)] stringify!($name) ),+ ] } pub fn all() -> &'static [&'static str] { &[ $( stringify!($name) ),+ ] } pub fn all_feature_names() -> &'static [&'static str] { &[ $( $feature ),+ ] } pub fn not_supported() -> &'static [&'static str] { &[ $( #[cfg(not(feature = $feature))] stringify!($name) ),+ ] } pub fn parse(input: &str) -> Option { if input.is_empty() { return None } if let Ok(Output { languages, .. }) = serde_json::from_str::(input) { return Some(languages); } $( // attributes are not yet allowed on `if` expressions #[cfg(feature = $feature)] { let parse = &{ $parse_kode }; if let Ok(Output { languages, .. }) = parse(input) { return Some(languages) } } )+ // Didn't match any of the compiled serialization formats None } pub fn print(&self, languages: &Languages) -> Result> { let output = Output { languages: (*languages).to_owned(), totals: languages.total() }; match *self { Format::Json => Ok(serde_json::to_string(&output)?), $( #[cfg(feature = $feature)] Format::$variant => { let print= &{ $print_kode }; Ok(print(&output)?) } ),+ } } } impl FromStr for Format { type Err = String; fn from_str(format: &str) -> Result { match format { "json" => Ok(Format::Json), $( stringify!($name) => { #[cfg(feature = $feature)] return Ok(Format::$variant); #[cfg(not(feature = $feature))] return Err(format!( "This version of tokei was compiled without \ any '{format}' serialization support, to enable serialization, \ reinstall tokei with the features flag. cargo install tokei --features {feature} If you want to enable all supported serialization formats, you can use the 'all' feature. cargo install tokei --features all\n", format = stringify!($name), feature = $feature) ); } ),+ format => Err(format!("{:?} is not a supported serialization format", format)), } } } ) } // The ordering of these determines the attempted order when parsing. supported_formats!( (cbor, "cbor", Cbor [serde_cbor, hex]) => |input| { hex::FromHex::from_hex(input) .map_err(|e: hex::FromHexError| >::from(e)) .and_then(|hex: Vec<_>| Ok(serde_cbor::from_slice(&hex)?)) }, |languages| serde_cbor::to_vec(&languages).map(hex::encode), (json, "json", Json [serde_json]) => serde_json::from_str, serde_json::to_string, (yaml, "yaml", Yaml [serde_yaml]) => serde_yaml::from_str, serde_yaml::to_string, ); pub fn add_input(input: &str, languages: &mut Languages) -> bool { use std::fs::File; use std::io::Read; let map = match File::open(input) { Ok(mut file) => { let contents = { let mut contents = String::new(); file.read_to_string(&mut contents) .expect("Couldn't read file"); contents }; convert_input(&contents) } Err(_) => { if input == "stdin" { let mut stdin = ::std::io::stdin(); let mut buffer = String::new(); let _ = stdin.read_to_string(&mut buffer); convert_input(&buffer) } else { convert_input(input) } } }; if let Some(map) = map { *languages += map; true } else { false } } fn convert_input(contents: &str) -> Option { self::Format::parse(contents) } #[cfg(test)] mod tests { use super::*; use strum::IntoEnumIterator; use tokei::Config; use std::path::Path; #[test] fn formatting_print_matches_parse() { // Get language results from sample dir let data_dir = Path::new("tests").join("data"); let mut langs = Languages::new(); langs.get_statistics(&[data_dir], &[], &Config::default()); // Check that the value matches after serializing and deserializing for variant in Format::iter() { let serialized = variant .print(&langs) .unwrap_or_else(|_| panic!("Failed serializing variant: {:?}", variant)); let deserialized = Format::parse(&serialized) .unwrap_or_else(|| panic!("Failed deserializing variant: {:?}", variant)); assert_eq!(*langs, deserialized); } } } tokei-13.0.0/src/language/embedding.rs000064400000000000000000000152771046102023000156720ustar 00000000000000#![allow(clippy::trivial_regex)] use crate::LanguageType; use once_cell::sync::Lazy; use regex::bytes::Regex; pub static START_SCRIPT: Lazy = Lazy::new(|| Regex::new(r#""#).unwrap()); pub static END_SCRIPT: Lazy = Lazy::new(|| Regex::new(r#""#).unwrap()); pub static START_STYLE: Lazy = Lazy::new(|| Regex::new(r#""#).unwrap()); pub static END_STYLE: Lazy = Lazy::new(|| Regex::new(r#""#).unwrap()); pub static START_TEMPLATE: Lazy = Lazy::new(|| Regex::new(r#""#).unwrap()); pub static END_TEMPLATE: Lazy = Lazy::new(|| Regex::new(r#""#).unwrap()); pub static STARTING_MARKDOWN_REGEX: Lazy = Lazy::new(|| Regex::new(r#"```\S+\s"#).unwrap()); pub static ENDING_MARKDOWN_REGEX: Lazy = Lazy::new(|| Regex::new(r#"```\s?"#).unwrap()); pub static STARTING_LF_BLOCK_REGEX: Lazy = Lazy::new(|| Regex::new(r#"\{="#).unwrap()); pub static ENDING_LF_BLOCK_REGEX: Lazy = Lazy::new(|| Regex::new(r#"=}"#).unwrap()); /// A memory of a regex matched. /// The values provided by `Self::start` and `Self::end` are in the same space as the /// start value supplied to `RegexCache::build` pub struct Capture<'a> { start: usize, text: &'a [u8], } impl Capture<'_> { #[inline(always)] fn start(&self) -> usize { self.start } #[inline(always)] pub fn end(&self) -> usize { self.start + self.text.len() } #[inline(always)] pub fn as_bytes(&self) -> &[u8] { self.text } } impl<'a> std::fmt::Debug for Capture<'a> { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_struct("Capture") .field("start", &self.start) .field("end", &self.end()) .field("text", &String::from_utf8_lossy(self.text)) .finish() } } pub(crate) struct RegexCache<'a> { inner: Option>, } /// Embedding regexes are similar between different sets of languages. /// `RegexFamily` records both which family the language belongs to, /// as well as the actual matches pub(crate) enum RegexFamily<'a> { HtmlLike(HtmlLike<'a>), LinguaFranca(SimpleCapture<'a>), Markdown(SimpleCapture<'a>), Rust, } pub(crate) struct HtmlLike<'a> { start_script: Option]>>, start_style: Option]>>, start_template: Option]>>, } pub(crate) struct SimpleCapture<'a> { starts: Option]>>, } impl<'a> HtmlLike<'a> { pub fn start_script_in_range<'this>( &'this self, start: usize, end: usize, ) -> Option>> { filter_range(self.start_script.as_ref()?, start, end) } pub fn start_style_in_range<'this>( &'this self, start: usize, end: usize, ) -> Option>> { filter_range(self.start_style.as_ref()?, start, end) } pub fn start_template_in_range<'this>( &'this self, start: usize, end: usize, ) -> Option>> { filter_range(self.start_template.as_ref()?, start, end) } } impl<'a> SimpleCapture<'a> { pub fn starts_in_range<'this>( &'this self, start: usize, end: usize, ) -> Option<&'this Capture<'a>> { filter_range(self.starts.as_ref()?, start, end).and_then(|mut it| it.next()) } fn make_capture( regex: &Regex, lines: &'a [u8], start: usize, end: usize, ) -> Option> { let capture = SimpleCapture { starts: save_captures(regex, lines, start, end), }; if capture.starts.is_some() { Some(capture) } else { None } } } fn filter_range<'dataset, 'cap>( dataset: &'dataset [Capture<'cap>], start: usize, end: usize, ) -> Option>> { let pos = dataset .binary_search_by_key(&start, |cap| cap.start()) .ok()?; if pos >= dataset.len() || dataset[pos].end() > end { None } else { Some( dataset[pos..] .iter() .take_while(move |cap| cap.end() <= end), ) } } impl<'a> RegexCache<'a> { /// Returns the language family for which regexes were matched, if any pub(crate) fn family(&self) -> Option<&RegexFamily> { self.inner.as_ref() } /// Tries to memoize any matches of embedding regexes that occur within lines[start..end] /// for the given language. Any `Capture` values eventually recovered will use the same /// zero for their start as the given `start` argument. pub(crate) fn build(lang: LanguageType, lines: &'a [u8], start: usize, end: usize) -> Self { let inner = match lang { LanguageType::Markdown | LanguageType::UnrealDeveloperMarkdown => { SimpleCapture::make_capture(&STARTING_MARKDOWN_REGEX, lines, start, end) .map(RegexFamily::Markdown) } LanguageType::Rust => Some(RegexFamily::Rust), LanguageType::LinguaFranca => { SimpleCapture::make_capture(&STARTING_LF_BLOCK_REGEX, lines, start, end) .map(RegexFamily::LinguaFranca) } LanguageType::Html | LanguageType::RubyHtml | LanguageType::Svelte | LanguageType::Vue | LanguageType::GlimmerJs | LanguageType::GlimmerTs => { let html = HtmlLike { start_script: save_captures(&START_SCRIPT, lines, start, end), start_style: save_captures(&START_STYLE, lines, start, end), start_template: save_captures(&START_TEMPLATE, lines, start, end), }; if html.start_script.is_some() || html.start_style.is_some() || html.start_template.is_some() { Some(RegexFamily::HtmlLike(html)) } else { None } } _ => None, }; Self { inner } } } fn save_captures<'a>( regex: &Regex, lines: &'a [u8], start: usize, end: usize, ) -> Option]>> { let v: Vec<_> = regex .captures(&lines[start..end])? .iter() .flatten() .map(|cap| Capture { start: start + cap.start(), text: cap.as_bytes(), }) .collect(); if v.is_empty() { None } else { Some(v.into()) } } tokei-13.0.0/src/language/language_type.rs000064400000000000000000000305501046102023000165670ustar 00000000000000use std::{ borrow::Cow, fmt, fs::File, io::{self, Read}, path::{Path, PathBuf}, str::FromStr, }; use crate::{ config::Config, language::syntax::{FileContext, LanguageContext, SyntaxCounter}, stats::{CodeStats, Report}, utils::{ext::SliceExt, fs as fsutils}, }; use encoding_rs_io::DecodeReaderBytesBuilder; use grep_searcher::{LineIter, LineStep}; use once_cell::sync::Lazy; use rayon::prelude::*; use serde::Serialize; use self::LanguageType::*; include!(concat!(env!("OUT_DIR"), "/language_type.rs")); impl Serialize for LanguageType { fn serialize(&self, serializer: S) -> Result where S: serde::Serializer, { serializer.serialize_str(self.name()) } } impl LanguageType { /// Parses a given [`Path`] using the [`LanguageType`]. Returning [`Report`] /// on success and giving back ownership of [`PathBuf`] on error. pub fn parse(self, path: PathBuf, config: &Config) -> Result { let text = { let f = match File::open(&path) { Ok(f) => f, Err(e) => return Err((e, path)), }; let mut s = Vec::new(); let mut reader = DecodeReaderBytesBuilder::new().build(f); if let Err(e) = reader.read_to_end(&mut s) { return Err((e, path)); } s }; let mut stats = Report::new(path); stats += self.parse_from_slice(text, config); Ok(stats) } /// Parses the text provided as the given [`LanguageType`]. pub fn parse_from_str>(self, text: A, config: &Config) -> CodeStats { self.parse_from_slice(text.as_ref().as_bytes(), config) } /// Parses the bytes provided as the given [`LanguageType`]. pub fn parse_from_slice>(self, text: A, config: &Config) -> CodeStats { let text = text.as_ref(); if self == Jupyter { return self .parse_jupyter(text.as_ref(), config) .unwrap_or_default(); } let syntax = { let mut syntax_mut = SyntaxCounter::new(self); if self == LinguaFranca { syntax_mut.lf_embedded_language = self.find_lf_target_language(text); } syntax_mut }; if let Some(end) = syntax.shared.important_syntax.find(text).and_then(|m| { // Get the position of the last line before the important // syntax. text[..=m.start()] .iter() .rev() .position(|&c| c == b'\n') .filter(|&p| p != 0) .map(|p| m.start() - p) }) { let (skippable_text, rest) = text.split_at(end + 1); let is_fortran = syntax.shared.is_fortran; let is_literate = syntax.shared.is_literate; let comments = syntax.shared.line_comments; trace!( "Using Simple Parse on {:?}", String::from_utf8_lossy(skippable_text) ); let parse_lines = move || self.parse_lines(config, rest, CodeStats::new(), syntax); let simple_parse = move || { LineIter::new(b'\n', skippable_text) .par_bridge() .map(|line| { // FORTRAN has a rule where it only counts as a comment if it's the // first character in the column, so removing starting whitespace // could cause a miscount. let line = if is_fortran { line } else { line.trim() }; if line.trim().is_empty() { (1, 0, 0) } else if is_literate || comments.iter().any(|c| line.starts_with(c.as_bytes())) { (0, 0, 1) } else { (0, 1, 0) } }) .reduce(|| (0, 0, 0), |a, b| (a.0 + b.0, a.1 + b.1, a.2 + b.2)) }; let (mut stats, (blanks, code, comments)) = rayon::join(parse_lines, simple_parse); stats.blanks += blanks; stats.code += code; stats.comments += comments; stats } else { self.parse_lines(config, text, CodeStats::new(), syntax) } } #[inline] fn parse_lines( self, config: &Config, lines: &[u8], mut stats: CodeStats, mut syntax: SyntaxCounter, ) -> CodeStats { let mut stepper = LineStep::new(b'\n', 0, lines.len()); while let Some((start, end)) = stepper.next(lines) { let line = &lines[start..end]; // FORTRAN has a rule where it only counts as a comment if it's the // first character in the column, so removing starting whitespace // could cause a miscount. let line = if syntax.shared.is_fortran { line } else { line.trim() }; trace!("{}", String::from_utf8_lossy(line)); if syntax.try_perform_single_line_analysis(line, &mut stats) { continue; } let started_in_comments = !syntax.stack.is_empty() || (config.treat_doc_strings_as_comments == Some(true) && syntax.quote.is_some() && syntax.quote_is_doc_quote); let ended_with_comments = match syntax.perform_multi_line_analysis(lines, start, end, config) { crate::language::syntax::AnalysisReport::Normal(end) => end, crate::language::syntax::AnalysisReport::ChildLanguage(FileContext { language, end, stats: blob, }) => { match language { LanguageContext::Markdown { balanced, language } => { // Add the lines for the code fences. stats.comments += if balanced { 2 } else { 1 }; // Add the code inside the fence to the stats. *stats.blobs.entry(language).or_default() += blob; } LanguageContext::Rust => { // Add all the markdown blobs. *stats.blobs.entry(LanguageType::Markdown).or_default() += blob; } LanguageContext::LinguaFranca => { let child_lang = syntax.get_lf_target_language(); *stats.blobs.entry(child_lang).or_default() += blob; } LanguageContext::Html { language } => { stats.code += 1; // Add all the markdown blobs. *stats.blobs.entry(language).or_default() += blob; } } // Advance to after the language code and the delimiter.. stepper = LineStep::new(b'\n', end, lines.len()); continue; } }; trace!("{}", String::from_utf8_lossy(line)); if syntax.shared.is_literate || syntax.line_is_comment(line, config, ended_with_comments, started_in_comments) { stats.comments += 1; trace!("Comment No.{}", stats.comments); trace!("Was the Comment stack empty?: {}", !started_in_comments); } else { stats.code += 1; trace!("Code No.{}", stats.code); } } stats } fn parse_jupyter(&self, json: &[u8], config: &Config) -> Option { #[derive(Deserialize)] struct Jupyter { cells: Vec, metadata: JupyterMetadata, } #[derive(Clone, Copy, Deserialize, PartialEq, Eq)] #[serde(rename_all = "lowercase")] enum CellType { Markdown, Code, } #[derive(Deserialize)] struct JupyterCell { cell_type: CellType, source: Vec, } #[derive(Deserialize)] struct JupyterMetadata { kernelspec: serde_json::Value, language_info: serde_json::Value, } let jupyter: Jupyter = serde_json::from_slice(json).ok()?; let mut jupyter_stats = CodeStats::new(); let language = jupyter .metadata .kernelspec .get("language") .and_then(serde_json::Value::as_str) .and_then(|v| LanguageType::from_str(v).ok()) .or_else(|| { jupyter .metadata .language_info .get("file_extension") .and_then(serde_json::Value::as_str) .and_then(LanguageType::from_file_extension) }) .unwrap_or(LanguageType::Python); let iter = jupyter .cells .par_iter() .map(|cell| match cell.cell_type { CellType::Markdown => ( LanguageType::Markdown, LanguageType::Markdown.parse_from_str(cell.source.join(""), config), ), CellType::Code => ( language, language.parse_from_str(cell.source.join(""), config), ), }) .collect::>(); for (language, stats) in iter { *jupyter_stats.blobs.entry(language).or_default() += &stats; jupyter_stats += &stats; } Some(jupyter_stats) } /// The embedded language in LF is declared in a construct that looks like this: `target C;`, `target Python`. /// This is the first thing in the file (although there may be comments before). fn find_lf_target_language(&self, bytes: &[u8]) -> Option { use regex::bytes::Regex; static LF_TARGET_REGEX: Lazy = Lazy::new(|| Regex::new(r#"(?m)\btarget\s+(\w+)\s*($|;|\{)"#).unwrap()); LF_TARGET_REGEX.captures(bytes).and_then(|captures| { let name = captures.get(1).unwrap().as_bytes(); if name == b"CCpp" { // this is a special alias for the C target in LF Some(C) } else { let name_str = &String::from_utf8_lossy(name); let by_name = LanguageType::from_name(name_str); if by_name.is_none() { trace!("LF target not recognized: {}", name_str); } by_name } }) } } #[cfg(test)] mod tests { use super::*; use std::{fs, path::Path}; #[test] fn rust_allows_nested() { assert!(LanguageType::Rust.allows_nested()); } fn assert_stats(stats: &CodeStats, blanks: usize, code: usize, comments: usize) { assert_eq!(stats.blanks, blanks, "expected {} blank lines", blanks); assert_eq!(stats.code, code, "expected {} code lines", code); assert_eq!( stats.comments, comments, "expected {} comment lines", comments ); } #[test] fn jupyter_notebook_has_correct_totals() { let sample_notebook = fs::read_to_string(Path::new("tests").join("data").join("jupyter.ipynb")).unwrap(); let stats = LanguageType::Jupyter .parse_jupyter(sample_notebook.as_bytes(), &Config::default()) .unwrap(); assert_stats(&stats, 115, 528, 333); } #[test] fn lf_embedded_language_is_counted() { let file_text = fs::read_to_string(Path::new("tests").join("data").join("linguafranca.lf")).unwrap(); let stats = LinguaFranca.parse_from_str(file_text, &Config::default()); assert_stats(&stats, 9, 11, 8); assert_eq!(stats.blobs.len(), 1, "num embedded languages"); let rust_stats = stats.blobs.get(&Rust).expect("should have a Rust entry"); assert_stats(rust_stats, 2, 5, 1); } } tokei-13.0.0/src/language/language_type.tera.rs000064400000000000000000000402711046102023000175220ustar 00000000000000use arbitrary::Arbitrary; /// Represents a individual programming language. Can be used to provide /// information about the language, such as multi line comments, single line /// comments, string literal syntax, whether a given language allows nesting /// comments. #[derive(Deserialize)] #[derive(Arbitrary, Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)] #[non_exhaustive] #[allow(clippy::upper_case_acronyms)] pub enum LanguageType { {% for key, value in languages -%} #[allow(missing_docs)] {% if value.name is defined %} #[serde(alias = "{{value.name}}")] {% else %} #[serde(alias = "{{key}}")] {% endif %} {{key}}, {% endfor %} } impl LanguageType { /// Returns the display name of a language. /// /// ``` /// # use tokei::*; /// let bash = LanguageType::Bash; /// /// assert_eq!(bash.name(), "BASH"); /// ``` pub fn name(self) -> &'static str { match self { {% for key, value in languages -%} {{key}} => {% if value.name %}"{{value.name}}"{% else %}"{{key}}"{% endif %}, {% endfor %} } } pub(crate) fn _is_blank(self) -> bool { match self { {% for key, v in languages -%} {{key}} => {{ v.blank | default(value=false) }}, {% endfor %} } } pub(crate) fn is_fortran(self) -> bool { self == LanguageType::FortranModern || self == LanguageType::FortranLegacy } /// Returns whether the language is "literate", meaning that it considered /// to primarily be documentation and is counted primarily as comments /// rather than procedural code. pub fn is_literate(self) -> bool { match self { {% for key, v in languages -%} {{key}} => {{ v.literate | default(value=false) }}, {% endfor %} } } /// Provides every variant in a Vec pub fn list() -> &'static [(Self, &'static [&'static str])] { &[{% for key, val in languages -%} ({{key}}, {% if val.extensions %} &[{% for extension in val.extensions %}"{{extension}}", {% endfor %}], {% else %} &[], {% endif %}), {% endfor %}] } /// Returns the single line comments of a language. /// ``` /// use tokei::LanguageType; /// let lang = LanguageType::Rust; /// assert_eq!(lang.line_comments(), &["//"]); /// ``` pub fn line_comments(self) -> &'static [&'static str] { match self { {% for key, value in languages -%} {{key}} => &[{% for item in value.line_comment | default(value=[]) %}"{{item}}",{% endfor %}], {% endfor %} } } /// Returns the single line comments of a language. /// ``` /// use tokei::LanguageType; /// let lang = LanguageType::Rust; /// assert_eq!(lang.multi_line_comments(), &[("/*", "*/")]); /// ``` pub fn multi_line_comments(self) -> &'static [(&'static str, &'static str)] { match self { {% for key, value in languages -%} {{key}} => &[ {%- for items in value.multi_line_comments | default(value=[]) -%} ({% for item in items %}"{{item}}",{% endfor %}), {%- endfor -%} ], {% endfor %} } } /// Returns whether the language allows nested multi line comments. /// ``` /// use tokei::LanguageType; /// let lang = LanguageType::Rust; /// assert!(lang.allows_nested()); /// ``` pub fn allows_nested(self) -> bool { match self { {% for key, v in languages -%} {{key}} => {{ v.nested | default(value=false) }}, {% endfor %} } } /// Returns what nested comments the language has. (Currently only D has /// any of this type.) /// ``` /// use tokei::LanguageType; /// let lang = LanguageType::D; /// assert_eq!(lang.nested_comments(), &[("/+", "+/")]); /// ``` pub fn nested_comments(self) -> &'static [(&'static str, &'static str)] { match self { {% for key, value in languages -%} {{key}} => &[ {%- for items in value.nested_comments | default(value=[]) -%} ({% for item in items %}"{{item}}",{% endfor %}), {%- endfor -%} ], {% endfor %} } } /// Returns the quotes of a language. /// ``` /// use tokei::LanguageType; /// let lang = LanguageType::C; /// assert_eq!(lang.quotes(), &[("\"", "\"")]); /// ``` pub fn quotes(self) -> &'static [(&'static str, &'static str)] { match self { {% for key, value in languages -%} {{key}} => &[ {%- for items in value.quotes | default(value=[]) -%} ({% for item in items %}"{{item}}",{% endfor %}), {%- endfor -%} ], {% endfor %} } } /// Returns the verbatim quotes of a language. /// ``` /// use tokei::LanguageType; /// let lang = LanguageType::CSharp; /// assert_eq!(lang.verbatim_quotes(), &[("@\"", "\"")]); /// ``` pub fn verbatim_quotes(self) -> &'static [(&'static str, &'static str)] { match self { {% for key, value in languages -%} {{key}} => &[ {%- for items in value.verbatim_quotes | default(value=[]) -%} ({% for item in items %}"{{item}}",{% endfor %}), {%- endfor -%} ], {% endfor %} } } /// Returns the doc quotes of a language. /// ``` /// use tokei::LanguageType; /// let lang = LanguageType::Python; /// assert_eq!(lang.doc_quotes(), &[("\"\"\"", "\"\"\""), ("'''", "'''")]); /// ``` pub fn doc_quotes(self) -> &'static [(&'static str, &'static str)] { match self { {% for key, value in languages -%} {{key}} => &[ {% for items in value.doc_quotes | default(value=[])-%} ({% for item in items %}"{{item}}",{% endfor %}), {%- endfor %} ], {%- endfor %} } } /// Returns the shebang of a language. /// ``` /// use tokei::LanguageType; /// let lang = LanguageType::Bash; /// assert_eq!(lang.shebangs(), &["#!/bin/bash"]); /// ``` pub fn shebangs(self) -> &'static [&'static str] { match self { {% for key, lang in languages -%} {{key}} => &[{% for item in lang.shebangs | default(value=[]) %}"{{item}}",{% endfor %}], {% endfor %} } } pub(crate) fn any_multi_line_comments(self) -> &'static [(&'static str, &'static str)] { match self { {% for key, value in languages -%} {{key}} => &[ {%- set starting_multi_line_comments = value.multi_line_comments | default(value=[]) -%} {%- set starting_nested_comments = value.nested_comments | default(value=[]) -%} {%- for item in starting_multi_line_comments | concat(with=starting_nested_comments) -%} ("{{item.0}}", "{{item.1}}"), {%- endfor -%} ], {% endfor %} } } pub(crate) fn any_comments(self) -> &'static [&'static str] { match self { {% for key, value in languages -%} {{key}} => &[ {%- set starting_multi_line_comments = value.multi_line_comments | default(value=[]) -%} {%- set starting_nested_comments = value.nested_comments | default(value=[]) -%} {%- for item in starting_multi_line_comments | concat(with=starting_nested_comments) -%} "{{item.0}}", "{{item.1}}", {%- endfor -%} {%- for item in value.line_comment | default(value=[]) -%} "{{item}}", {%- endfor -%} ], {% endfor %} } } /// Returns the parts of syntax that determines whether tokei can skip large /// parts of analysis. pub fn important_syntax(self) -> &'static [&'static str] { match self { {% for key, value in languages -%} {%- set starting_quotes = value.quotes | default(value=[]) | map(attribute="0") -%} {%- set starting_doc_quotes = value.doc_quotes | default(value=[]) | map(attribute="0") -%} {%- set starting_multi_line_comments = value.multi_line_comments | default(value=[]) | map(attribute="0") -%} {%- set starting_nested_comments = value.nested_comments | default(value=[]) | map(attribute="0") -%} {%- set important_syntax = value.important_syntax | default(value=[]) -%} {{key}} => &[ {%- for item in starting_quotes | concat(with=starting_doc_quotes) | concat(with=starting_multi_line_comments) | concat(with=starting_nested_comments) | concat(with=important_syntax) -%} "{{item}}", {%- endfor -%} {%- for context in value.contexts | default(value=[]) -%} {% if value.kind == "html" %} "<{{context.tag}}", {% endif %} {%- endfor -%} ], {% endfor %} } } /// Get language from a file path. May open and read the file. /// /// ```no_run /// use tokei::{Config, LanguageType}; /// /// let rust = LanguageType::from_path("./main.rs", &Config::default()); /// /// assert_eq!(rust, Some(LanguageType::Rust)); /// ``` pub fn from_path>(entry: P, _config: &Config) -> Option { let entry = entry.as_ref(); if let Some(filename) = fsutils::get_filename(entry) { match &*filename { {% for key, value in languages -%} {%- if value.filenames -%} {%- for item in value.filenames -%} | "{{item}}" {%- endfor -%} => return Some({{key}}), {% endif -%} {%- endfor %} _ => () } } match fsutils::get_extension(entry) { Some(extension) => LanguageType::from_file_extension(extension.as_str()), None => LanguageType::from_shebang(entry), } } /// Get language from a file extension. /// /// ```no_run /// use tokei::LanguageType; /// /// let rust = LanguageType::from_file_extension("rs"); /// /// assert_eq!(rust, Some(LanguageType::Rust)); /// ``` #[must_use] pub fn from_file_extension(extension: &str) -> Option { match extension { {% for key, value in languages -%} {%- if value.extensions -%} {%- for item in value.extensions %}| "{{item}}" {% endfor %}=> Some({{key}}), {% endif -%} {%- endfor %} extension => { warn!("Unknown extension: {}", extension); None }, } } /// Get language from its name. /// /// ```no_run /// use tokei::LanguageType; /// /// let rust = LanguageType::from_name("Rust"); /// /// assert_eq!(rust, Some(LanguageType::Rust)); /// ``` #[must_use] pub fn from_name(name: &str) -> Option { match name { {% for key, value in languages -%} {% if value.name and value.name != key -%} | "{{value.name}}" {% endif -%} | "{{key}}" => Some({{key}}), {% endfor %} unknown => { warn!("Unknown language name: {}", unknown); None }, } } /// Get language from its MIME type if available. /// /// ```no_run /// use tokei::LanguageType; /// /// let lang = LanguageType::from_mime("application/javascript"); /// /// assert_eq!(lang, Some(LanguageType::JavaScript)); /// ``` #[must_use] pub fn from_mime(mime: &str) -> Option { match mime { {% for key, value in languages -%} {%- if value.mime -%} {%- for item in value.mime %}| "{{item}}" {% endfor %}=> Some({{key}}), {% endif -%} {%- endfor %} _ => { warn!("Unknown MIME: {}", mime); None }, } } /// Get language from a shebang. May open and read the file. /// /// ```no_run /// use tokei::LanguageType; /// /// let rust = LanguageType::from_shebang("./main.rs"); /// /// assert_eq!(rust, Some(LanguageType::Rust)); /// ``` pub fn from_shebang>(entry: P) -> Option { // Read at max `READ_LIMIT` bytes from the given file. // A typical shebang line has a length less than 32 characters; // e.g. '#!/bin/bash' - 11B / `#!/usr/bin/env python3` - 22B // It is *very* unlikely the file contains a valid shebang syntax // if we don't find a newline character after searching the first 128B. const READ_LIMIT: usize = 128; let mut file = File::open(entry).ok()?; let mut buf = [0; READ_LIMIT]; let len = file.read(&mut buf).ok()?; let buf = &buf[..len]; let first_line = buf.split(|b| *b == b'\n').next()?; let first_line = std::str::from_utf8(first_line).ok()?; let mut words = first_line.split_whitespace(); match words.next() { {# First match against any shebang paths, and then check if the language matches any found in the environment shebang path. #} {% for key, value in languages -%} {%- if value.shebangs %} {%- for item in value.shebangs %}| Some("{{item}}") {% endfor %}=> Some({{key}}), {% endif -%} {%- endfor %} Some("#!/usr/bin/env") => { if let Some(word) = words.next() { match word { {% for key, value in languages -%} {%- if value.env -%} {%- for item in value.env %} {% if loop.index == 1 %} _ if word.starts_with("{{item}}") {% else %} || word.starts_with("{{item}}") {% endif %} {% endfor %}=> Some({{key}}), {% endif -%} {%- endfor %} env => { warn!("Unknown environment: {:?}", env); None } } } else { None } } _ => None, } } } impl FromStr for LanguageType { type Err = &'static str; fn from_str(from: &str) -> Result { match &*from.to_lowercase() { {% for key, value in languages %} {% if value.name %}"{{value.name | lower}}"{% else %}"{{key | lower}}"{% endif %} => Ok({{key}}), {% endfor %} _ => Err("Language not found, please use `-l` to see all available\ languages."), } } } impl fmt::Display for LanguageType { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.name()) } } impl<'a> From for Cow<'a, LanguageType> { fn from(from: LanguageType) -> Self { Cow::Owned(from) } } impl<'a> From<&'a LanguageType> for Cow<'a, LanguageType> { fn from(from: &'a LanguageType) -> Self { Cow::Borrowed(from) } } tokei-13.0.0/src/language/languages.rs000064400000000000000000000107311046102023000157100ustar 00000000000000use std::{ collections::{btree_map, BTreeMap}, iter::IntoIterator, ops::{AddAssign, Deref, DerefMut}, path::Path, }; use rayon::prelude::*; use crate::{ config::Config, language::{Language, LanguageType}, utils, }; /// A newtype representing a list of languages counted in the provided /// directory. /// ([_List of /// Languages_](https://github.com/XAMPPRocky/tokei#supported-languages)) #[derive(Debug, Default, PartialEq)] pub struct Languages { inner: BTreeMap, } impl serde::Serialize for Languages { fn serialize(&self, serializer: S) -> Result where S: serde::Serializer, { self.inner.serialize(serializer) } } impl<'de> serde::Deserialize<'de> for Languages { fn deserialize(deserializer: D) -> Result where D: serde::Deserializer<'de>, { let map = <_>::deserialize(deserializer)?; Ok(Self::from_previous(map)) } } impl Languages { fn from_previous(map: BTreeMap) -> Self { use std::collections::btree_map::Entry; let mut me = Self::new(); for (name, input_language) in map { match me.entry(name) { Entry::Occupied(mut entry) => { *entry.get_mut() += input_language; } Entry::Vacant(entry) => { entry.insert(input_language); } } } me } /// Populates the `Languages` struct with statistics about languages /// provided by [`Language`]. /// /// Takes a `&[&str]` of paths to recursively traverse, paths can be /// relative, absolute or glob paths. A second `&[&str]` of paths to ignore, /// these strings use the `.gitignore` syntax, such as `target` /// or `**/*.bk`. /// /// ```no_run /// use tokei::{Config, Languages}; /// /// let mut languages = Languages::new(); /// languages.get_statistics(&["."], &[".git", "target"], &Config::default()); /// ``` /// /// [`Language`]: struct.Language.html pub fn get_statistics>( &mut self, paths: &[A], ignored: &[&str], config: &Config, ) { utils::fs::get_all_files(paths, ignored, &mut self.inner, config); self.inner.par_iter_mut().for_each(|(_, l)| l.total()); } /// Constructs a new, Languages struct. Languages is always empty and does /// not allocate. /// /// ```rust /// # use tokei::*; /// let languages = Languages::new(); /// ``` #[must_use] pub fn new() -> Self { Languages::default() } /// Summary of the Languages struct. #[must_use] pub fn total(self: &Languages) -> Language { let mut total = Language::new(); for (ty, l) in self { let language = l.summarise(); total.comments += language.comments; total.blanks += language.blanks; total.code += language.code; total.inaccurate |= language.inaccurate; total.children.insert(*ty, language.reports.clone()); } total } } impl IntoIterator for Languages { type Item = as IntoIterator>::Item; type IntoIter = as IntoIterator>::IntoIter; fn into_iter(self) -> Self::IntoIter { self.inner.into_iter() } } impl<'a> IntoIterator for &'a Languages { type Item = (&'a LanguageType, &'a Language); type IntoIter = btree_map::Iter<'a, LanguageType, Language>; fn into_iter(self) -> Self::IntoIter { self.inner.iter() } } impl<'a> IntoIterator for &'a mut Languages { type Item = (&'a LanguageType, &'a mut Language); type IntoIter = btree_map::IterMut<'a, LanguageType, Language>; fn into_iter(self) -> Self::IntoIter { self.inner.iter_mut() } } impl AddAssign> for Languages { fn add_assign(&mut self, rhs: BTreeMap) { for (name, language) in rhs { if let Some(result) = self.inner.get_mut(&name) { *result += language; } } } } impl Deref for Languages { type Target = BTreeMap; fn deref(&self) -> &Self::Target { &self.inner } } impl DerefMut for Languages { fn deref_mut(&mut self) -> &mut BTreeMap { &mut self.inner } } tokei-13.0.0/src/language/mod.rs000064400000000000000000000123441046102023000145230ustar 00000000000000mod embedding; pub mod language_type; pub mod languages; mod syntax; use std::{collections::BTreeMap, mem, ops::AddAssign}; pub use self::{language_type::*, languages::Languages}; use crate::{sort::Sort, stats::Report}; /// A struct representing statistics about a single Language. #[derive(Clone, Debug, Deserialize, Default, PartialEq, Serialize)] pub struct Language { /// The total number of blank lines. pub blanks: usize, /// The total number of lines of code. pub code: usize, /// The total number of comments(both single, and multi-line) pub comments: usize, /// A collection of statistics of individual files. pub reports: Vec, /// A map of any languages found in the reports. pub children: BTreeMap>, /// Whether this language had problems with file parsing pub inaccurate: bool, } impl Language { /// Constructs a new empty Language with the comments provided. /// /// ``` /// # use tokei::*; /// let mut rust = Language::new(); /// ``` #[must_use] pub fn new() -> Self { Self::default() } /// Returns the total number of lines. #[inline] #[must_use] pub fn lines(&self) -> usize { self.blanks + self.code + self.comments } /// Add a `Report` to the Language. This will not update the totals in the /// Language struct. pub fn add_report(&mut self, report: Report) { for (lang, stats) in &report.stats.blobs { let mut new_report = Report::new(report.name.clone()); new_report.stats = stats.clone(); self.children.entry(*lang).or_default().push(new_report); } self.reports.push(report); } /// Marks this language as possibly not reflecting correct stats. #[inline] pub fn mark_inaccurate(&mut self) { self.inaccurate = true; } /// Creates a new `Language` from `self`, which is a summarised version /// of the language that doesn't contain any children. It will count /// non-blank lines in child languages as code unless the child language is /// considered "literate" then it will be counted as comments. #[must_use] pub fn summarise(&self) -> Language { let mut summary = self.clone(); for reports in self.children.values() { for stats in reports.iter().map(|r| r.stats.summarise()) { summary.comments += stats.comments; summary.code += stats.code; summary.blanks += stats.blanks; } } summary } /// Totals up the statistics of the `Stat` structs currently contained in /// the language. /// /// ```no_run /// use std::{collections::BTreeMap, path::PathBuf}; /// use tokei::Language; /// /// let mut language = Language::new(); /// /// // Add stats... /// /// assert_eq!(0, language.lines()); /// /// language.total(); /// /// assert_eq!(10, language.lines()); /// ``` pub fn total(&mut self) { let mut blanks = 0; let mut code = 0; let mut comments = 0; for report in &self.reports { blanks += report.stats.blanks; code += report.stats.code; comments += report.stats.comments; } self.blanks = blanks; self.code = code; self.comments = comments; } /// Checks if the language is empty. Empty meaning it doesn't have any /// statistics. /// /// ``` /// # use tokei::*; /// let rust = Language::new(); /// /// assert!(rust.is_empty()); /// ``` #[must_use] pub fn is_empty(&self) -> bool { self.code == 0 && self.comments == 0 && self.blanks == 0 && self.children.is_empty() } /// Sorts each of the `Report`s contained in the language based /// on what category is provided. /// /// ```no_run /// use std::{collections::BTreeMap, path::PathBuf}; /// use tokei::{Language, Sort}; /// /// let mut language = Language::new(); /// /// // Add stats... /// /// language.sort_by(Sort::Lines); /// assert_eq!(20, language.reports[0].stats.lines()); /// /// language.sort_by(Sort::Code); /// assert_eq!(8, language.reports[0].stats.code); /// ``` pub fn sort_by(&mut self, category: Sort) { match category { Sort::Blanks => self .reports .sort_by(|a, b| b.stats.blanks.cmp(&a.stats.blanks)), Sort::Comments => self .reports .sort_by(|a, b| b.stats.comments.cmp(&a.stats.comments)), Sort::Code => self.reports.sort_by(|a, b| b.stats.code.cmp(&a.stats.code)), Sort::Files => self.reports.sort_by(|a, b| a.name.cmp(&b.name)), Sort::Lines => self .reports .sort_by(|a, b| b.stats.lines().cmp(&a.stats.lines())), } } } impl AddAssign for Language { fn add_assign(&mut self, mut rhs: Self) { self.comments += rhs.comments; self.blanks += rhs.blanks; self.code += rhs.code; self.reports.extend(mem::take(&mut rhs.reports)); self.children.extend(mem::take(&mut rhs.children)); self.inaccurate |= rhs.inaccurate; } } tokei-13.0.0/src/language/syntax.rs000064400000000000000000000573631046102023000153040ustar 00000000000000use std::sync::Arc; use aho_corasick::AhoCorasick; use dashmap::DashMap; use grep_searcher::LineStep; use log::Level::Trace; use once_cell::sync::Lazy; use super::embedding::{ RegexCache, RegexFamily, ENDING_LF_BLOCK_REGEX, ENDING_MARKDOWN_REGEX, END_SCRIPT, END_STYLE, END_TEMPLATE, }; use crate::LanguageType::LinguaFranca; use crate::{stats::CodeStats, utils::ext::SliceExt, Config, LanguageType}; /// Tracks the syntax of the language as well as the current state in the file. /// Current has what could be consider three types of mode. /// - `plain` mode: This is the normal state, blanks are counted as blanks, /// string literals can trigger `string` mode, and comments can trigger /// `comment` mode. /// - `string` mode: This when the state machine is current inside a string /// literal for a given language, comments cannot trigger `comment` mode while /// in `string` mode. /// - `comment` mode: This when the state machine is current inside a comment /// for a given language, strings cannot trigger `string` mode while in /// `comment` mode. #[derive(Clone, Debug)] pub(crate) struct SyntaxCounter { pub(crate) shared: Arc, pub(crate) quote: Option<&'static str>, pub(crate) quote_is_doc_quote: bool, pub(crate) stack: Vec<&'static str>, pub(crate) quote_is_verbatim: bool, pub(crate) lf_embedded_language: Option, } #[derive(Clone, Debug)] pub(crate) struct FileContext { pub(crate) language: LanguageContext, pub(crate) stats: CodeStats, pub(crate) end: usize, } impl FileContext { pub fn new(language: LanguageContext, end: usize, stats: CodeStats) -> Self { Self { language, stats, end, } } } #[derive(Clone, Debug)] pub(crate) enum LanguageContext { Html { language: LanguageType, }, LinguaFranca, Markdown { balanced: bool, language: LanguageType, }, Rust, } #[derive(Clone, Debug)] pub(crate) struct SharedMatchers { pub language: LanguageType, pub allows_nested: bool, pub doc_quotes: &'static [(&'static str, &'static str)], pub important_syntax: AhoCorasick, #[allow(dead_code)] pub any_comments: &'static [&'static str], pub is_fortran: bool, pub is_literate: bool, pub line_comments: &'static [&'static str], pub any_multi_line_comments: &'static [(&'static str, &'static str)], pub multi_line_comments: &'static [(&'static str, &'static str)], pub nested_comments: &'static [(&'static str, &'static str)], pub string_literals: &'static [(&'static str, &'static str)], pub verbatim_string_literals: &'static [(&'static str, &'static str)], } impl SharedMatchers { pub fn new(language: LanguageType) -> Arc { static MATCHERS: Lazy>> = Lazy::new(DashMap::new); MATCHERS .entry(language) .or_insert_with(|| Arc::new(Self::init(language))) .value() .clone() } pub fn init(language: LanguageType) -> Self { fn init_corasick(pattern: &[&'static str]) -> AhoCorasick { AhoCorasick::builder() .match_kind(aho_corasick::MatchKind::LeftmostLongest) .start_kind(aho_corasick::StartKind::Unanchored) .prefilter(true) .kind(Some(aho_corasick::AhoCorasickKind::DFA)) .build(pattern) .unwrap() } Self { language, allows_nested: language.allows_nested(), doc_quotes: language.doc_quotes(), is_fortran: language.is_fortran(), is_literate: language.is_literate(), important_syntax: init_corasick(language.important_syntax()), any_comments: language.any_comments(), line_comments: language.line_comments(), multi_line_comments: language.multi_line_comments(), any_multi_line_comments: language.any_multi_line_comments(), nested_comments: language.nested_comments(), string_literals: language.quotes(), verbatim_string_literals: language.verbatim_quotes(), } } } #[derive(Debug)] pub(crate) enum AnalysisReport { /// No child languages were found, contains a boolean representing whether /// the line ended with comments or not. Normal(bool), ChildLanguage(FileContext), } impl SyntaxCounter { pub(crate) fn new(language: LanguageType) -> Self { Self { shared: SharedMatchers::new(language), quote_is_doc_quote: false, quote_is_verbatim: false, stack: Vec::with_capacity(1), lf_embedded_language: None, quote: None, } } /// Returns whether the syntax is currently in plain mode. pub(crate) fn is_plain_mode(&self) -> bool { self.quote.is_none() && self.stack.is_empty() } /// Returns whether the syntax is currently in string mode. pub(crate) fn _is_string_mode(&self) -> bool { self.quote.is_some() } /// Returns whether the syntax is currently in comment mode. pub(crate) fn _is_comment_mode(&self) -> bool { !self.stack.is_empty() } pub(crate) fn get_lf_target_language(&self) -> LanguageType { // in case the target declaration was not found, default it to that language const DEFAULT_LANG: LanguageType = LinguaFranca; self.lf_embedded_language.unwrap_or(DEFAULT_LANG) } #[inline] pub(crate) fn parse_line_comment(&self, window: &[u8]) -> bool { if self.quote.is_some() || !self.stack.is_empty() { false } else if let Some(comment) = self .shared .line_comments .iter() .find(|c| window.starts_with(c.as_bytes())) { trace!("Start {:?}", comment); true } else { false } } /// Try to see if we can determine what a line is from examining the whole /// line at once. Returns `true` if successful. pub(crate) fn try_perform_single_line_analysis( &self, line: &[u8], stats: &mut crate::stats::CodeStats, ) -> bool { if !self.is_plain_mode() { false } else if line.trim().is_empty() { stats.blanks += 1; trace!("Blank No.{}", stats.blanks); true } else if self.shared.important_syntax.is_match(line) { false } else { trace!("^ Skippable"); if self.shared.is_literate || self .shared .line_comments .iter() .any(|c| line.starts_with(c.as_bytes())) { stats.comments += 1; trace!("Comment No.{}", stats.comments); } else { stats.code += 1; trace!("Code No.{}", stats.code); } true } } pub(crate) fn perform_multi_line_analysis( &mut self, lines: &[u8], start: usize, end: usize, config: &Config, ) -> AnalysisReport { let mut ended_with_comments = false; let mut skip = 0; macro_rules! skip { ($skip:expr) => {{ skip = $skip - 1; }}; } let regex_cache = RegexCache::build(self.shared.language, lines, start, end); for i in start..end { if skip != 0 { skip -= 1; continue; } let window = &lines[i..]; if window.trim().is_empty() { break; } ended_with_comments = false; let is_end_of_quote_or_multi_line = self .parse_end_of_quote(window) .or_else(|| self.parse_end_of_multi_line(window)); if let Some(skip_amount) = is_end_of_quote_or_multi_line { ended_with_comments = true; skip!(skip_amount); continue; } else if self.quote.is_some() { continue; } if let Some(child) = self.parse_context(lines, i, end, config, ®ex_cache) { return AnalysisReport::ChildLanguage(child); } let is_quote_or_multi_line = self .parse_quote(window) .or_else(|| self.parse_multi_line_comment(window)); if let Some(skip_amount) = is_quote_or_multi_line { skip!(skip_amount); continue; } if self.parse_line_comment(window) { ended_with_comments = true; break; } } AnalysisReport::Normal(ended_with_comments) } /// Performs a set of heuristics to determine whether a line is a comment or /// not. The procedure is as follows. /// /// - Yes/No: Counted as Comment /// /// 1. Check if we're in string mode /// 1. Check if string literal is a doc string and whether tokei has /// been configured to treat them as comments. /// - Yes: When the line starts with the doc string or when we are /// continuing from a previous line. /// - No: The string is a normal string literal or tokei isn't /// configured to count them as comments. /// 2. If we're not in string mode, check if we left it this on this line. /// - Yes: When we found a doc quote and we started in comments. /// 3. Yes: When the whole line is a comment e.g. `/* hello */` /// 4. Yes: When the previous line started a multi-line comment. /// 5. Yes: When the line starts with a comment. /// 6. No: Any other input. pub(crate) fn line_is_comment( &self, line: &[u8], config: &crate::Config, _ended_with_comments: bool, started_in_comments: bool, ) -> bool { let trimmed = line.trim(); let whole_line_is_comment = || { self.shared .line_comments .iter() .any(|c| trimmed.starts_with(c.as_bytes())) || self .shared .any_multi_line_comments .iter() .any(|(start, end)| { trimmed.starts_with(start.as_bytes()) && trimmed.ends_with(end.as_bytes()) }) }; let starts_with_comment = || { let quote = match self.stack.last() { Some(q) => q, _ => return false, }; self.shared .any_multi_line_comments .iter() .any(|(start, end)| end == quote && trimmed.starts_with(start.as_bytes())) }; // `Some(true)` in order to respect the current configuration. #[allow(clippy::if_same_then_else)] if self.quote.is_some() { if self.quote_is_doc_quote && config.treat_doc_strings_as_comments == Some(true) { self.quote.map_or(false, |q| line.starts_with(q.as_bytes())) || (self.quote.is_some()) } else { false } } else if self .shared .doc_quotes .iter() .any(|(_, e)| line.contains_slice(e.as_bytes())) && started_in_comments { true } else if (whole_line_is_comment)() { true } else if started_in_comments { true } else { (starts_with_comment)() } } #[inline] pub(crate) fn parse_context( &mut self, lines: &[u8], start: usize, end: usize, config: &Config, regex_cache: &RegexCache, ) -> Option { use std::str::FromStr; // static TYPE_REGEX: Lazy = Lazy::new(|| Regex::new(r#"type="(.*)".*>"#).unwrap()); if self.quote.is_some() || !self.stack.is_empty() { return None; } match regex_cache.family()? { RegexFamily::Markdown(md) => { if !lines[start..end].contains_slice(b"```") { return None; } let opening_fence = md.starts_in_range(start, end)?; let start_of_code = opening_fence.end(); let closing_fence = ENDING_MARKDOWN_REGEX.find(&lines[start_of_code..]); if let Some(m) = &closing_fence { trace!("{:?}", String::from_utf8_lossy(m.as_bytes())); } let end_of_code = closing_fence .map_or_else(|| lines.len(), |fence| start_of_code + fence.start()); let end_of_code_block = closing_fence.map_or_else(|| lines.len(), |fence| start_of_code + fence.end()); let balanced = closing_fence.is_some(); let identifier = &opening_fence.as_bytes().trim()[3..]; let language = identifier .split(|&b| b == b',') .find_map(|s| LanguageType::from_str(&String::from_utf8_lossy(s)).ok())?; trace!( "{} BLOCK: {:?}", language, String::from_utf8_lossy(&lines[start_of_code..end_of_code]) ); let stats = language.parse_from_slice(lines[start_of_code..end_of_code].trim(), config); Some(FileContext::new( LanguageContext::Markdown { balanced, language }, end_of_code_block, stats, )) } RegexFamily::Rust => { let rest = &lines[start..]; let comment_syntax = if rest.trim_start().starts_with(b"///") { b"///" } else if rest.trim_start().starts_with(b"//!") { b"//!" } else { return None; }; let mut stepper = LineStep::new(b'\n', start, lines.len()); let mut markdown = Vec::new(); let mut end_of_block = lines.len(); while let Some((start, end)) = stepper.next(lines) { if lines[start..].trim().starts_with(comment_syntax) { trace!("{}", String::from_utf8_lossy(&lines[start..end])); let line = lines[start..end].trim_start(); let stripped_line = &line[3.min(line.len())..]; markdown.extend_from_slice(stripped_line); end_of_block = end; } else { end_of_block = start; break; } } trace!("Markdown found: {:?}", String::from_utf8_lossy(&markdown)); let doc_block = LanguageType::Markdown.parse_from_slice(markdown.trim(), config); Some(FileContext::new( LanguageContext::Rust, end_of_block, doc_block, )) } RegexFamily::LinguaFranca(lf) => { let opening_fence = lf.starts_in_range(start, end)?; let start_of_code = opening_fence.end(); let closing_fence = ENDING_LF_BLOCK_REGEX.find(&lines[start_of_code..]); let end_of_code = closing_fence .map_or_else(|| lines.len(), |fence| start_of_code + fence.start()); let block_contents = &lines[start_of_code..end_of_code]; trace!("LF block: {:?}", String::from_utf8_lossy(block_contents)); let stats = self.get_lf_target_language().parse_from_slice( block_contents.trim_first_and_last_line_of_whitespace(), config, ); trace!("-> stats: {:?}", stats); Some(FileContext::new( LanguageContext::LinguaFranca, end_of_code, stats, )) } RegexFamily::HtmlLike(html) => { if let Some(mut captures) = html.start_script_in_range(start, end) { let start_of_code = captures.next().unwrap().end(); let closing_tag = END_SCRIPT.find(&lines[start_of_code..])?; let end_of_code = start_of_code + closing_tag.start(); let language = captures .next() .and_then(|m| { LanguageType::from_mime(&String::from_utf8_lossy(m.as_bytes().trim())) }) .unwrap_or(LanguageType::JavaScript); let script_contents = &lines[start_of_code..end_of_code]; if script_contents.trim().is_empty() { return None; } let stats = language.parse_from_slice( script_contents.trim_first_and_last_line_of_whitespace(), config, ); Some(FileContext::new( LanguageContext::Html { language }, end_of_code, stats, )) } else if let Some(mut captures) = html.start_style_in_range(start, end) { let start_of_code = captures.next().unwrap().end(); let closing_tag = END_STYLE.find(&lines[start_of_code..])?; let end_of_code = start_of_code + closing_tag.start(); let language = captures .next() .and_then(|m| { LanguageType::from_str( &String::from_utf8_lossy(m.as_bytes().trim()).to_lowercase(), ) .ok() }) .unwrap_or(LanguageType::Css); let style_contents = &lines[start_of_code..end_of_code]; if style_contents.trim().is_empty() { return None; } let stats = language.parse_from_slice( style_contents.trim_first_and_last_line_of_whitespace(), config, ); Some(FileContext::new( LanguageContext::Html { language }, end_of_code, stats, )) } else if let Some(mut captures) = html.start_template_in_range(start, end) { let start_of_code = captures.next().unwrap().end(); let closing_tag = END_TEMPLATE.find(&lines[start_of_code..])?; let end_of_code = start_of_code + closing_tag.start(); let language = captures .next() .and_then(|m| { LanguageType::from_str( &String::from_utf8_lossy(m.as_bytes().trim()).to_lowercase(), ) .ok() }) .unwrap_or(LanguageType::Html); let template_contents = &lines[start_of_code..end_of_code]; if template_contents.trim().is_empty() { return None; } let stats = language.parse_from_slice( template_contents.trim_first_and_last_line_of_whitespace(), config, ); Some(FileContext::new( LanguageContext::Html { language }, end_of_code, stats, )) } else { None } } } } #[inline] pub(crate) fn parse_quote(&mut self, window: &[u8]) -> Option { if !self.stack.is_empty() { return None; } if let Some((start, end)) = self .shared .doc_quotes .iter() .find(|(s, _)| window.starts_with(s.as_bytes())) { trace!("Start Doc {:?}", start); self.quote = Some(end); self.quote_is_verbatim = false; self.quote_is_doc_quote = true; return Some(start.len()); } if let Some((start, end)) = self .shared .verbatim_string_literals .iter() .find(|(s, _)| window.starts_with(s.as_bytes())) { trace!("Start verbatim {:?}", start); self.quote = Some(end); self.quote_is_verbatim = true; self.quote_is_doc_quote = false; return Some(start.len()); } if let Some((start, end)) = self .shared .string_literals .iter() .find(|(s, _)| window.starts_with(s.as_bytes())) { trace!("Start {:?}", start); self.quote = Some(end); self.quote_is_verbatim = false; self.quote_is_doc_quote = false; return Some(start.len()); } None } #[inline] pub(crate) fn parse_end_of_quote(&mut self, window: &[u8]) -> Option { #[allow(clippy::if_same_then_else)] if self._is_string_mode() && window.starts_with(self.quote?.as_bytes()) { let quote = self.quote.take().unwrap(); trace!("End {:?}", quote); Some(quote.len()) } else if !self.quote_is_verbatim && window.starts_with(br"\\") { Some(2) } else if !self.quote_is_verbatim && window.starts_with(br"\") && self .shared .string_literals .iter() .any(|(start, _)| window[1..].starts_with(start.as_bytes())) { // Tell the state machine to skip the next character because it // has been escaped if the string isn't a verbatim string. Some(2) } else { None } } #[inline] pub(crate) fn parse_multi_line_comment(&mut self, window: &[u8]) -> Option { if self.quote.is_some() { return None; } let iter = self .shared .multi_line_comments .iter() .chain(self.shared.nested_comments); for &(start, end) in iter { if window.starts_with(start.as_bytes()) { if self.stack.is_empty() || self.shared.allows_nested || self.shared.nested_comments.contains(&(start, end)) { self.stack.push(end); if log_enabled!(Trace) && self.shared.allows_nested { trace!("Start nested {:?}", start); } else { trace!("Start {:?}", start); } } return Some(start.len()); } } None } #[inline] pub(crate) fn parse_end_of_multi_line(&mut self, window: &[u8]) -> Option { if self .stack .last() .map_or(false, |l| window.starts_with(l.as_bytes())) { let last = self.stack.pop().unwrap(); if log_enabled!(Trace) { if self.stack.is_empty() { trace!("End {:?}", last); } else { trace!("End {:?}. Still in comments.", last); } } Some(last.len()) } else { None } } } tokei-13.0.0/src/lib.rs000064400000000000000000000032351046102023000127260ustar 00000000000000//! # Tokei: Count your code quickly. //! //! A simple, efficient library for counting code in directories. This //! functionality is also provided as a //! [CLI utility](//github.com/XAMPPRocky/tokei). Tokei uses a small state //! machine rather than regular expressions found in other code counters. Tokei //! can accurately count a lot more edge cases such as nested comments, or //! comment syntax inside string literals. //! //! # Examples //! //! Gets the total lines of code from all rust files in current directory, //! and all subdirectories. //! //! ```no_run //! use std::collections::BTreeMap; //! use std::fs::File; //! use std::io::Read; //! //! use tokei::{Config, Languages, LanguageType}; //! //! // The paths to search. Accepts absolute, relative, and glob paths. //! let paths = &["src", "tests"]; //! // Exclude any path that contains any of these strings. //! let excluded = &["target"]; //! // `Config` allows you to configure what is searched and counted. //! let config = Config::default(); //! //! let mut languages = Languages::new(); //! languages.get_statistics(paths, excluded, &config); //! let rust = &languages[&LanguageType::Rust]; //! //! println!("Lines of code: {}", rust.code); //! ``` #![deny( trivial_casts, trivial_numeric_casts, unused_variables, unstable_features, unused_import_braces, missing_docs )] #[macro_use] extern crate log; #[macro_use] extern crate serde; #[macro_use] mod utils; mod config; mod consts; mod language; mod sort; mod stats; pub use self::{ config::Config, consts::*, language::{Language, LanguageType, Languages}, sort::Sort, stats::{find_char_boundary, CodeStats, Report}, }; tokei-13.0.0/src/main.rs000064400000000000000000000075701046102023000131120ustar 00000000000000#[macro_use] extern crate log; mod cli; mod cli_utils; mod consts; mod input; use std::{error::Error, io, process}; use tokei::{Config, Languages, Sort}; use crate::{ cli::Cli, cli_utils::Printer, consts::{ BLANKS_COLUMN_WIDTH, CODE_COLUMN_WIDTH, COMMENTS_COLUMN_WIDTH, FALLBACK_ROW_LEN, LANGUAGE_COLUMN_WIDTH, LINES_COLUMN_WIDTH, PATH_COLUMN_WIDTH, }, input::add_input, }; fn main() -> Result<(), Box> { let mut cli = Cli::from_args(); if cli.print_languages { Cli::print_supported_languages()?; process::exit(0); } let config = cli.override_config(Config::from_config_files()); let mut languages = Languages::new(); if let Some(input) = cli.file_input() { if !add_input(input, &mut languages) { Cli::print_input_parse_failure(input); process::exit(1); } } let input = cli.input(); for path in &input { if ::std::fs::metadata(path).is_err() { eprintln!("Error: '{}' not found.", path); process::exit(1); } } let columns = cli .columns .or(config.columns) .or_else(|| { if cli.files { term_size::dimensions().map(|(w, _)| w) } else { None } }) .unwrap_or(FALLBACK_ROW_LEN) .max(FALLBACK_ROW_LEN); if cli.streaming == Some(crate::cli::Streaming::Simple) { println!( "#{:^LANGUAGE_COLUMN_WIDTH$} {:^PATH_COLUMN_WIDTH$} {:^LINES_COLUMN_WIDTH$} {:^CODE_COLUMN_WIDTH$} {:^COMMENTS_COLUMN_WIDTH$} {:^BLANKS_COLUMN_WIDTH$}", "language", "path", "lines", "code", "comments", "blanks" ); println!( "{:>LANGUAGE_COLUMN_WIDTH$} {:LINES_COLUMN_WIDTH$} {:>CODE_COLUMN_WIDTH$} {:>COMMENTS_COLUMN_WIDTH$} {:>BLANKS_COLUMN_WIDTH$}", (0..10).map(|_| "#").collect::(), (0..80).map(|_| "#").collect::(), (0..12).map(|_| "#").collect::(), (0..12).map(|_| "#").collect::(), (0..12).map(|_| "#").collect::(), (0..12).map(|_| "#").collect::() ); } languages.get_statistics(&input, &cli.ignored_directories(), &config); if config.for_each_fn.is_some() { process::exit(0); } if let Some(format) = cli.output { print!("{}", format.print(&languages).unwrap()); process::exit(0); } let mut printer = Printer::new( columns, cli.files, io::BufWriter::new(io::stdout()), cli.number_format, ); if languages.iter().any(|(_, lang)| lang.inaccurate) { printer.print_inaccuracy_warning()?; } printer.print_header()?; let mut is_sorted = false; if let Some(sort_category) = cli.sort.or(config.sort) { for (_, ref mut language) in &mut languages { language.sort_by(sort_category); } let mut languages: Vec<_> = languages.iter().collect(); match sort_category { Sort::Blanks => languages.sort_by(|a, b| b.1.blanks.cmp(&a.1.blanks)), Sort::Comments => languages.sort_by(|a, b| b.1.comments.cmp(&a.1.comments)), Sort::Code => languages.sort_by(|a, b| b.1.code.cmp(&a.1.code)), Sort::Files => languages.sort_by(|a, b| b.1.reports.len().cmp(&a.1.reports.len())), Sort::Lines => languages.sort_by(|a, b| b.1.lines().cmp(&a.1.lines())), } is_sorted = true; if cli.sort_reverse { printer.print_results(languages.into_iter().rev(), cli.compact, is_sorted)?; } else { printer.print_results(languages.into_iter(), cli.compact, is_sorted)?; } } else { printer.print_results(languages.iter(), cli.compact, is_sorted)?; } printer.print_total(&languages)?; Ok(()) } tokei-13.0.0/src/sort.rs000064400000000000000000000027571046102023000131570ustar 00000000000000use std::{borrow::Cow, str::FromStr}; use serde::de::{self, Deserialize, Deserializer}; /// Used for sorting languages. #[derive(Clone, Copy, Debug, Eq, Ord, PartialEq, PartialOrd)] pub enum Sort { /// Sort by number blank lines. Blanks, /// Sort by number comments lines. Comments, /// Sort by number code lines. Code, /// Sort by number files lines. Files, /// Sort by number of lines. Lines, } impl FromStr for Sort { type Err = String; fn from_str(s: &str) -> Result { Ok(if s.eq_ignore_ascii_case("blanks") { Sort::Blanks } else if s.eq_ignore_ascii_case("comments") { Sort::Comments } else if s.eq_ignore_ascii_case("code") { Sort::Code } else if s.eq_ignore_ascii_case("files") { Sort::Files } else if s.eq_ignore_ascii_case("lines") { Sort::Lines } else { return Err(format!("Unsupported sorting option: {}", s)); }) } } impl<'de> Deserialize<'de> for Sort { fn deserialize(deserializer: D) -> Result where D: Deserializer<'de>, { String::deserialize(deserializer)? .parse() .map_err(de::Error::custom) } } impl<'a> From for Cow<'a, Sort> { fn from(from: Sort) -> Self { Cow::Owned(from) } } impl<'a> From<&'a Sort> for Cow<'a, Sort> { fn from(from: &'a Sort) -> Self { Cow::Borrowed(from) } } tokei-13.0.0/src/stats.rs000064400000000000000000000077061046102023000133250ustar 00000000000000use crate::consts::{ BLANKS_COLUMN_WIDTH, CODE_COLUMN_WIDTH, COMMENTS_COLUMN_WIDTH, LINES_COLUMN_WIDTH, }; use crate::LanguageType; use std::{collections::BTreeMap, fmt, ops, path::PathBuf}; /// A struct representing stats about a single blob of code. #[derive(Clone, Debug, Default, PartialEq, serde::Deserialize, serde::Serialize)] #[non_exhaustive] pub struct CodeStats { /// The blank lines in the blob. pub blanks: usize, /// The lines of code in the blob. pub code: usize, /// The lines of comments in the blob. pub comments: usize, /// Language blobs that were contained inside this blob. pub blobs: BTreeMap, } impl CodeStats { /// Creates a new blank `CodeStats`. #[must_use] pub fn new() -> Self { Self::default() } /// Get the total lines in a blob of code. #[must_use] pub fn lines(&self) -> usize { self.blanks + self.code + self.comments } /// Creates a new `CodeStats` from an existing one with all of the child /// blobs merged. #[must_use] pub fn summarise(&self) -> Self { let mut summary = self.clone(); for (_, stats) in std::mem::take(&mut summary.blobs) { let child_summary = stats.summarise(); summary.blanks += child_summary.blanks; summary.comments += child_summary.comments; summary.code += child_summary.code; } summary } } impl ops::AddAssign for CodeStats { fn add_assign(&mut self, rhs: Self) { self.add_assign(&rhs); } } impl ops::AddAssign<&'_ CodeStats> for CodeStats { fn add_assign(&mut self, rhs: &'_ CodeStats) { self.blanks += rhs.blanks; self.code += rhs.code; self.comments += rhs.comments; for (language, stats) in &rhs.blobs { *self.blobs.entry(*language).or_default() += stats; } } } /// A struct representing the statistics of a file. #[derive(Deserialize, Serialize, Clone, Debug, Default, PartialEq)] #[non_exhaustive] pub struct Report { /// The code statistics found in the file. pub stats: CodeStats, /// File name. pub name: PathBuf, } impl Report { /// Create a new `Report` from a [`PathBuf`]. /// /// [`PathBuf`]: //doc.rust-lang.org/std/path/struct.PathBuf.html #[must_use] pub fn new(name: PathBuf) -> Self { Self { name, ..Self::default() } } } impl ops::AddAssign for Report { fn add_assign(&mut self, rhs: CodeStats) { self.stats += rhs; } } #[doc(hidden)] #[must_use] pub fn find_char_boundary(s: &str, index: usize) -> usize { for i in 0..4 { if s.is_char_boundary(index + i) { return index + i; } } unreachable!(); } macro_rules! display_stats { ($f:expr, $this:expr, $name:expr, $max:expr) => { write!( $f, " {: LINES_COLUMN_WIDTH$} {:>CODE_COLUMN_WIDTH$} {:>COMMENTS_COLUMN_WIDTH$} {:>BLANKS_COLUMN_WIDTH$}", $name, $this.stats.lines(), $this.stats.code, $this.stats.comments, $this.stats.blanks, max = $max ) }; } impl fmt::Display for Report { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let name = self.name.to_string_lossy(); let name_length = name.len(); // Added 2 to max length to cover wider Files column (see https://github.com/XAMPPRocky/tokei/issues/891). let max_len = f.width().unwrap_or(27) + 2; if name_length <= max_len { display_stats!(f, self, name, max_len) } else { let mut formatted = String::from("|"); // Add 1 to the index to account for the '|' we add to the output string let from = find_char_boundary(&name, name_length + 1 - max_len); formatted.push_str(&name[from..]); display_stats!(f, self, formatted, max_len) } } } tokei-13.0.0/src/utils/ext.rs000064400000000000000000000103541046102023000141200ustar 00000000000000//! Various extensions to Rust std types. pub(crate) trait AsciiExt { fn is_whitespace(&self) -> bool; fn is_line_ending_whitespace(&self) -> bool; } impl AsciiExt for u8 { fn is_whitespace(&self) -> bool { *self == b' ' || (b'\x09'..=b'\x0d').contains(self) } fn is_line_ending_whitespace(&self) -> bool { *self == b'\n' } } pub(crate) trait SliceExt { fn trim_first_and_last_line_of_whitespace(&self) -> &Self; fn trim_start(&self) -> &Self; fn trim(&self) -> &Self; fn contains_slice(&self, needle: &Self) -> bool; } impl SliceExt for [u8] { fn trim_first_and_last_line_of_whitespace(&self) -> &Self { let start = self .iter() .position(|c| c.is_line_ending_whitespace() || !c.is_whitespace()) .map_or(0, |i| (i + 1).min(self.len().saturating_sub(1))); let end = self .iter() .rposition(|c| c.is_line_ending_whitespace() || !c.is_whitespace()) .map_or_else( || self.len().saturating_sub(1), |i| { // Remove the entire `\r\n` in the case that it was the line ending whitespace if self[i.saturating_sub(1)] == b'\r' && self[i] == b'\n' { i - 1 } else { i } }, ); if self[start..].is_empty() { return &[]; } &self[start..=end] } fn trim_start(&self) -> &Self { let length = self.len(); if length == 0 { return self; } let start = match self.iter().position(|c| !c.is_whitespace()) { Some(start) => start, None => return &[], }; &self[start..] } fn trim(&self) -> &Self { let length = self.len(); if length == 0 { return self; } let start = match self.iter().position(|c| !c.is_whitespace()) { Some(start) => start, None => return &[], }; let end = match self.iter().rposition(|c| !c.is_whitespace()) { Some(end) => end.max(start), _ => length, }; &self[start..=end] } fn contains_slice(&self, needle: &Self) -> bool { let self_length = self.len(); let needle_length = needle.len(); if needle_length == 0 || needle_length > self_length { return false; } else if needle_length == self_length { return self == needle; } for window in self.windows(needle_length) { if needle == window { return true; } } false } } #[cfg(test)] mod tests { use super::*; use proptest::prelude::*; #[test] fn is_whitespace() { assert!(b' '.is_whitespace()); assert!(b'\r'.is_whitespace()); assert!(b'\n'.is_whitespace()); } #[test] fn trim() { assert!([b' ', b' ', b' '].trim().is_empty()); assert!([b' ', b'\r', b'\n'].trim().is_empty()); assert!([b'\n'].trim().is_empty()); assert!([].trim().is_empty()); assert_eq!([b'a', b'b'], [b'a', b'b'].trim()); assert_eq!([b'h', b'i'], [b' ', b'h', b'i'].trim()); assert_eq!([b'h', b'i'], [b'h', b'i', b' '].trim()); assert_eq!([b'h', b'i'], [b' ', b'h', b'i', b' '].trim()); } #[test] fn contains() { assert!([1, 2, 3, 4, 5].contains_slice(&[1, 2, 3, 4, 5])); assert!([1, 2, 3, 4, 5].contains_slice(&[1, 2, 3])); assert!([1, 2, 3, 4, 5].contains_slice(&[3, 4, 5])); assert!([1, 2, 3, 4, 5].contains_slice(&[2, 3, 4])); assert!(![1, 2, 3, 4, 5].contains_slice(&[])); } #[test] fn trim_first_and_last_line_of_whitespace_edge_cases() { assert_eq!(b"", b"\ra ".trim_first_and_last_line_of_whitespace()); assert_eq!(b"a", b"\r\na ".trim_first_and_last_line_of_whitespace()); assert_eq!(b" ", b" ".trim_first_and_last_line_of_whitespace()); } proptest! { #[test] fn trim_first_and_last_line_of_whitespace_doesnt_panic(input: Vec) { let _ = &input.trim_first_and_last_line_of_whitespace(); } } } tokei-13.0.0/src/utils/fs.rs000064400000000000000000000334021046102023000137270ustar 00000000000000use std::{collections::BTreeMap, path::Path}; use ignore::{overrides::OverrideBuilder, DirEntry, WalkBuilder, WalkState::Continue}; use rayon::prelude::*; use crate::{ config::Config, language::{Language, LanguageType}, }; const IGNORE_FILE: &str = ".tokeignore"; pub fn get_all_files>( paths: &[A], ignored_directories: &[&str], languages: &mut BTreeMap, config: &Config, ) { let languages = parking_lot::Mutex::new(languages); let (tx, rx) = crossbeam_channel::unbounded(); let mut paths = paths.iter(); let mut walker = WalkBuilder::new(paths.next().unwrap()); for path in paths { walker.add(path); } if !ignored_directories.is_empty() { let mut overrides = OverrideBuilder::new("."); for ignored in ignored_directories { rs_error!(overrides.add(&format!("!{}", ignored))); } walker.overrides(overrides.build().expect("Excludes provided were invalid")); } let ignore = config.no_ignore.map(|b| !b).unwrap_or(true); let ignore_dot = ignore && config.no_ignore_dot.map(|b| !b).unwrap_or(true); let ignore_vcs = ignore && config.no_ignore_vcs.map(|b| !b).unwrap_or(true); // Custom ignore files always work even if the `ignore` option is false, // so we only add if that option is not present. if ignore_dot { walker.add_custom_ignore_filename(IGNORE_FILE); } walker .git_exclude(ignore_vcs) .git_global(ignore_vcs) .git_ignore(ignore_vcs) .hidden(config.hidden.map(|b| !b).unwrap_or(true)) .ignore(ignore_dot) .parents(ignore && config.no_ignore_parent.map(|b| !b).unwrap_or(true)); walker.build_parallel().run(move || { let tx = tx.clone(); Box::new(move |entry| { let entry = match entry { Ok(entry) => entry, Err(error) => { use ignore::Error; if let Error::WithDepth { err: ref error, .. } = error { if let Error::WithPath { ref path, err: ref error, } = **error { error!("{} reading {}", error, path.display()); return Continue; } } error!("{}", error); return Continue; } }; if entry.file_type().map_or(false, |ft| ft.is_file()) { tx.send(entry).unwrap(); } Continue }) }); let rx_iter = rx .into_iter() .par_bridge() .filter_map(|e| LanguageType::from_path(e.path(), config).map(|l| (e, l))); let process = |(entry, language): (DirEntry, LanguageType)| { let result = language.parse(entry.into_path(), config); let mut lock = languages.lock(); let entry = lock.entry(language).or_insert_with(Language::new); match result { Ok(stats) => { let func = config.for_each_fn; if let Some(f) = func { f(language, stats.clone()) }; entry.add_report(stats) } Err((error, path)) => { entry.mark_inaccurate(); error!("Error reading {}:\n{}", path.display(), error); } } }; if let Some(types) = config.types.as_deref() { rx_iter.filter(|(_, l)| types.contains(l)).for_each(process) } else { rx_iter.for_each(process) } } pub(crate) fn get_extension(path: &Path) -> Option { path.extension().map(|e| e.to_string_lossy().to_lowercase()) } pub(crate) fn get_filename(path: &Path) -> Option { path.file_name().map(|e| e.to_string_lossy().to_lowercase()) } #[cfg(test)] mod tests { use std::fs; use tempfile::TempDir; use super::IGNORE_FILE; use crate::{ config::Config, language::{languages::Languages, LanguageType}, }; const FILE_CONTENTS: &[u8] = b"fn main() {}"; const FILE_NAME: &str = "main.rs"; const IGNORE_PATTERN: &str = "*.rs"; const LANGUAGE: &LanguageType = &LanguageType::Rust; #[test] fn ignore_directory_with_extension() { let mut languages = Languages::new(); let tmp_dir = TempDir::new().expect("Couldn't create temp dir"); let path_name = tmp_dir.path().join("directory.rs"); fs::create_dir(path_name).expect("Couldn't create directory.rs within temp"); super::get_all_files( &[tmp_dir.into_path().to_str().unwrap()], &[], &mut languages, &Config::default(), ); assert!(languages.get(LANGUAGE).is_none()); } #[test] fn hidden() { let dir = TempDir::new().expect("Couldn't create temp dir."); let mut config = Config::default(); let mut languages = Languages::new(); fs::write(dir.path().join(".hidden.rs"), FILE_CONTENTS).unwrap(); super::get_all_files( &[dir.path().to_str().unwrap()], &[], &mut languages, &config, ); assert!(languages.get(LANGUAGE).is_none()); config.hidden = Some(true); super::get_all_files( &[dir.path().to_str().unwrap()], &[], &mut languages, &config, ); assert!(languages.get(LANGUAGE).is_some()); } #[test] fn no_ignore_implies_dot() { let dir = TempDir::new().expect("Couldn't create temp dir."); let mut config = Config::default(); let mut languages = Languages::new(); fs::write(dir.path().join(".ignore"), IGNORE_PATTERN).unwrap(); fs::write(dir.path().join(FILE_NAME), FILE_CONTENTS).unwrap(); super::get_all_files( &[dir.path().to_str().unwrap()], &[], &mut languages, &config, ); assert!(languages.get(LANGUAGE).is_none()); config.no_ignore = Some(true); super::get_all_files( &[dir.path().to_str().unwrap()], &[], &mut languages, &config, ); assert!(languages.get(LANGUAGE).is_some()); } #[test] fn no_ignore_implies_vcs_gitignore() { let dir = TempDir::new().expect("Couldn't create temp dir."); let mut config = Config::default(); let mut languages = Languages::new(); git2::Repository::init(dir.path()).expect("Couldn't create git repo."); fs::write(dir.path().join(".gitignore"), IGNORE_PATTERN).unwrap(); fs::write(dir.path().join(FILE_NAME), FILE_CONTENTS).unwrap(); super::get_all_files( &[dir.path().to_str().unwrap()], &[], &mut languages, &config, ); assert!(languages.get(LANGUAGE).is_none()); config.no_ignore = Some(true); super::get_all_files( &[dir.path().to_str().unwrap()], &[], &mut languages, &config, ); assert!(languages.get(LANGUAGE).is_some()); } #[test] fn no_ignore_parent() { let parent_dir = TempDir::new().expect("Couldn't create temp dir."); let child_dir = parent_dir.path().join("child/"); let mut config = Config::default(); let mut languages = Languages::new(); fs::create_dir_all(&child_dir) .unwrap_or_else(|_| panic!("Couldn't create {:?}", child_dir)); fs::write(parent_dir.path().join(".ignore"), IGNORE_PATTERN) .expect("Couldn't create .gitignore."); fs::write(child_dir.join(FILE_NAME), FILE_CONTENTS).expect("Couldn't create child.rs"); super::get_all_files( &[child_dir.as_path().to_str().unwrap()], &[], &mut languages, &config, ); assert!(languages.get(LANGUAGE).is_none()); config.no_ignore_parent = Some(true); super::get_all_files( &[child_dir.as_path().to_str().unwrap()], &[], &mut languages, &config, ); assert!(languages.get(LANGUAGE).is_some()); } #[test] fn no_ignore_dot() { let dir = TempDir::new().expect("Couldn't create temp dir."); let mut config = Config::default(); let mut languages = Languages::new(); fs::write(dir.path().join(".ignore"), IGNORE_PATTERN).unwrap(); fs::write(dir.path().join(FILE_NAME), FILE_CONTENTS).unwrap(); super::get_all_files( &[dir.path().to_str().unwrap()], &[], &mut languages, &config, ); assert!(languages.get(LANGUAGE).is_none()); config.no_ignore_dot = Some(true); super::get_all_files( &[dir.path().to_str().unwrap()], &[], &mut languages, &config, ); assert!(languages.get(LANGUAGE).is_some()); } #[test] fn no_ignore_dot_still_vcs_gitignore() { let dir = TempDir::new().expect("Couldn't create temp dir."); let mut config = Config::default(); let mut languages = Languages::new(); git2::Repository::init(dir.path()).expect("Couldn't create git repo."); fs::write(dir.path().join(".gitignore"), IGNORE_PATTERN).unwrap(); fs::write(dir.path().join(FILE_NAME), FILE_CONTENTS).unwrap(); config.no_ignore_dot = Some(true); super::get_all_files( &[dir.path().to_str().unwrap()], &[], &mut languages, &config, ); assert!(languages.get(LANGUAGE).is_none()); } #[test] fn no_ignore_dot_includes_custom_ignore() { let dir = TempDir::new().expect("Couldn't create temp dir."); let mut config = Config::default(); let mut languages = Languages::new(); fs::write(dir.path().join(IGNORE_FILE), IGNORE_PATTERN).unwrap(); fs::write(dir.path().join(FILE_NAME), FILE_CONTENTS).unwrap(); super::get_all_files( &[dir.path().to_str().unwrap()], &[], &mut languages, &config, ); assert!(languages.get(LANGUAGE).is_none()); config.no_ignore_dot = Some(true); super::get_all_files( &[dir.path().to_str().unwrap()], &[], &mut languages, &config, ); assert!(languages.get(LANGUAGE).is_some()); } #[test] fn no_ignore_vcs_gitignore() { let dir = TempDir::new().expect("Couldn't create temp dir."); let mut config = Config::default(); let mut languages = Languages::new(); git2::Repository::init(dir.path()).expect("Couldn't create git repo."); fs::write(dir.path().join(".gitignore"), IGNORE_PATTERN).unwrap(); fs::write(dir.path().join(FILE_NAME), FILE_CONTENTS).unwrap(); super::get_all_files( &[dir.path().to_str().unwrap()], &[], &mut languages, &config, ); assert!(languages.get(LANGUAGE).is_none()); config.no_ignore_vcs = Some(true); super::get_all_files( &[dir.path().to_str().unwrap()], &[], &mut languages, &config, ); assert!(languages.get(LANGUAGE).is_some()); } #[test] fn no_ignore_vcs_gitignore_still_dot() { let dir = TempDir::new().expect("Couldn't create temp dir."); let mut config = Config::default(); let mut languages = Languages::new(); fs::write(dir.path().join(".ignore"), IGNORE_PATTERN).unwrap(); fs::write(dir.path().join(FILE_NAME), FILE_CONTENTS).unwrap(); config.no_ignore_vcs = Some(true); super::get_all_files( &[dir.path().to_str().unwrap()], &[], &mut languages, &config, ); assert!(languages.get(LANGUAGE).is_none()); } #[test] fn no_ignore_vcs_gitexclude() { let dir = TempDir::new().expect("Couldn't create temp dir."); let mut config = Config::default(); let mut languages = Languages::new(); git2::Repository::init(dir.path()).expect("Couldn't create git repo."); fs::write(dir.path().join(".git/info/exclude"), IGNORE_PATTERN).unwrap(); fs::write(dir.path().join(FILE_NAME), FILE_CONTENTS).unwrap(); super::get_all_files( &[dir.path().to_str().unwrap()], &[], &mut languages, &config, ); assert!(languages.get(LANGUAGE).is_none()); config.no_ignore_vcs = Some(true); super::get_all_files( &[dir.path().to_str().unwrap()], &[], &mut languages, &config, ); assert!(languages.get(LANGUAGE).is_some()); } #[test] fn custom_ignore() { let dir = TempDir::new().expect("Couldn't create temp dir."); let config = Config::default(); let mut languages = Languages::new(); git2::Repository::init(dir.path()).expect("Couldn't create git repo."); fs::write(dir.path().join(IGNORE_FILE), IGNORE_PATTERN).unwrap(); fs::write(dir.path().join(FILE_NAME), FILE_CONTENTS).unwrap(); super::get_all_files( &[dir.path().to_str().unwrap()], &[], &mut languages, &config, ); assert!(languages.get(LANGUAGE).is_none()); fs::remove_file(dir.path().join(IGNORE_FILE)).unwrap(); super::get_all_files( &[dir.path().to_str().unwrap()], &[], &mut languages, &config, ); assert!(languages.get(LANGUAGE).is_some()); } } tokei-13.0.0/src/utils/macros.rs000064400000000000000000000041321046102023000146010ustar 00000000000000#![allow(unused_macros)] macro_rules! opt_warn { ($option:expr, $message:expr) => { match $option { Some(result) => result, None => { warn!($message); continue; } } }; } macro_rules! rs_warn { ($result:expr, $message: expr) => { match $result { Ok(result) => result, Err(error) => { warn!("{}", error); continue; } } }; } macro_rules! opt_error { ($option:expr, $message:expr) => { match $option { Some(result) => result, None => { error!($message); continue; } } }; } macro_rules! rs_error { ($result:expr) => { match $result { Ok(result) => result, Err(error) => { error!("{}", error); continue; } } }; } macro_rules! opt_ret_warn { ($option:expr, $message:expr) => { match $option { Some(result) => result, None => { warn!($message); return None; } } }; } macro_rules! rs_ret_warn { ($result:expr, $message: expr) => { match $result { Ok(result) => result, Err(error) => { warn!("{}", error); return None; } } }; } macro_rules! opt_ret_error { ($option:expr, $message:expr) => { match $option { Some(result) => result, None => { error!($message); return None; } } }; } macro_rules! rs_ret_error { ($result:expr) => { match $result { Ok(result) => result, Err(error) => { error!("{}", error); return None; } } }; } macro_rules! debug { ($fmt:expr) => (if cfg!(debug_assertions) {println!($fmt)}); ($fmt:expr, $($arg:tt)*) => (if cfg!(debug_assertions) {println!($fmt, $($arg)*)}); } tokei-13.0.0/src/utils/mod.rs000064400000000000000000000000711046102023000140720ustar 00000000000000#[macro_use] mod macros; pub(crate) mod ext; pub mod fs;