git-absorb-0.6.11/Cargo.lock0000644000000363510000000000100111350ustar # This file is automatically @generated by Cargo. # It is not intended for manual editing. version = 3 [[package]] name = "ansi_term" version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ee49baf6cb617b853aa8d93bf420db2383fab46d314482ca2803b40d5fde979b" dependencies = [ "winapi", ] [[package]] name = "anyhow" version = "1.0.33" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a1fd36ffbb1fb7c834eac128ea8d0e310c5aeb635548f9d58861e1308d46e71c" [[package]] name = "arrayref" version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a4c527152e37cf757a3f78aae5a06fbeefdb07ccc535c980a3208ee3060dd544" [[package]] name = "arrayvec" version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cff77d8686867eceff3105329d4698d96c2391c176d5d03adc90c7389162b5b8" [[package]] name = "atty" version = "0.2.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8" dependencies = [ "hermit-abi", "libc", "winapi", ] [[package]] name = "autocfg" version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cdb031dd78e28731d87d56cc8ffef4a8f36ca26c38fe2de700543e627f8a464a" [[package]] name = "base64" version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3441f0f7b02788e948e47f457ca01f1d7e6d92c693bc132c22b087d3141c03ff" [[package]] name = "bitflags" version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cf1de2fe8c75bc145a2f577add951f8134889b4795d47466a54a5c846d691693" [[package]] name = "bitflags" version = "2.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "327762f6e5a765692301e5bb513e0d9fef63be86bbc14528052b1cd3e6f03e07" [[package]] name = "blake2b_simd" version = "0.5.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d8fb2d74254a3a0b5cac33ac9f8ed0e44aa50378d9dbb2e5d83bd21ed1dc2c8a" dependencies = [ "arrayref", "arrayvec", "constant_time_eq", ] [[package]] name = "cc" version = "1.0.60" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ef611cc68ff783f18535d77ddd080185275713d852c4f5cbb6122c462a7a825c" dependencies = [ "jobserver", ] [[package]] name = "cfg-if" version = "0.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822" [[package]] name = "chrono" version = "0.4.19" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "670ad68c9088c2a963aaa298cb369688cf3f9465ce5e2d4ca10e6e0098a1ce73" dependencies = [ "libc", "num-integer", "num-traits", "time", "winapi", ] [[package]] name = "clap" version = "2.33.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "37e58ac78573c40708d45522f0d80fa2f01cc4f9b4e2bf749807255454312002" dependencies = [ "ansi_term", "atty", "bitflags 1.2.1", "strsim", "textwrap", "unicode-width", "vec_map", ] [[package]] name = "constant_time_eq" version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "245097e9a4535ee1e3e3931fcfcd55a796a44c643e8596ff6566d68f09b87bbc" [[package]] name = "crossbeam-channel" version = "0.4.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b153fe7cbef478c567df0f972e02e6d736db11affe43dfc9c56a9374d1adfb87" dependencies = [ "crossbeam-utils", "maybe-uninit", ] [[package]] name = "crossbeam-utils" version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c3c7c73a2d1e9fc0886a08b93e98eb643461230d5f1925e4036204d5f2e261a8" dependencies = [ "autocfg", "cfg-if", "lazy_static", ] [[package]] name = "dirs" version = "2.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "13aea89a5c93364a98e9b37b2fa237effbb694d5cfe01c5b70941f7eb087d5e3" dependencies = [ "cfg-if", "dirs-sys", ] [[package]] name = "dirs-sys" version = "0.3.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8e93d7f5705de3e49895a2b5e0b8855a1c27f080192ae9c32a6432d50741a57a" dependencies = [ "libc", "redox_users", "winapi", ] [[package]] name = "getrandom" version = "0.1.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fc587bc0ec293155d5bfa6b9891ec18a1e330c234f896ea47fbada4cadbe47e6" dependencies = [ "cfg-if", "libc", "wasi 0.9.0+wasi-snapshot-preview1", ] [[package]] name = "git-absorb" version = "0.6.11" dependencies = [ "anyhow", "clap", "git2", "memchr", "slog", "slog-async", "slog-term", "tempfile", ] [[package]] name = "git2" version = "0.18.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fbf97ba92db08df386e10c8ede66a2a0369bd277090afd8710e19e38de9ec0cd" dependencies = [ "bitflags 2.4.1", "libc", "libgit2-sys", "log", "url", ] [[package]] name = "hermit-abi" version = "0.1.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5aca5565f760fb5b220e499d72710ed156fdb74e631659e99377d9ebfbd13ae8" dependencies = [ "libc", ] [[package]] name = "idna" version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "02e2673c30ee86b5b96a9cb52ad15718aa1f966f5ab9ad54a8b95d5ca33120a9" dependencies = [ "matches", "unicode-bidi", "unicode-normalization", ] [[package]] name = "jobserver" version = "0.1.21" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5c71313ebb9439f74b00d9d2dcec36440beaf57a6aa0623068441dd7cd81a7f2" dependencies = [ "libc", ] [[package]] name = "lazy_static" version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" [[package]] name = "libc" version = "0.2.79" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2448f6066e80e3bfc792e9c98bf705b4b0fc6e8ef5b43e5889aff0eaa9c58743" [[package]] name = "libgit2-sys" version = "0.16.1+1.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f2a2bb3680b094add03bb3732ec520ece34da31a8cd2d633d1389d0f0fb60d0c" dependencies = [ "cc", "libc", "libz-sys", "pkg-config", ] [[package]] name = "libz-sys" version = "1.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "602113192b08db8f38796c4e85c39e960c145965140e918018bcde1952429655" dependencies = [ "cc", "libc", "pkg-config", "vcpkg", ] [[package]] name = "log" version = "0.4.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4fabed175da42fed1fa0746b0ea71f412aa9d35e76e95e59b192c64b9dc2bf8b" dependencies = [ "cfg-if", ] [[package]] name = "matches" version = "0.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7ffc5c5338469d4d3ea17d269fa8ea3512ad247247c30bd2df69e68309ed0a08" [[package]] name = "maybe-uninit" version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "60302e4db3a61da70c0cb7991976248362f30319e88850c487b9b95bbf059e00" [[package]] name = "memchr" version = "2.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3728d817d99e5ac407411fa471ff9800a778d88a24685968b36824eaf4bee400" [[package]] name = "num-integer" version = "0.1.43" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8d59457e662d541ba17869cf51cf177c0b5f0cbf476c66bdc90bf1edac4f875b" dependencies = [ "autocfg", "num-traits", ] [[package]] name = "num-traits" version = "0.2.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ac267bcc07f48ee5f8935ab0d24f316fb722d7a1292e2913f0cc196b29ffd611" dependencies = [ "autocfg", ] [[package]] name = "once_cell" version = "1.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7709cef83f0c1f58f666e746a08b21e0085f7440fa6a29cc194d68aac97a4225" [[package]] name = "percent-encoding" version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d4fd5641d01c8f18a23da7b6fe29298ff4b55afcccdf78973b24cf3175fee32e" [[package]] name = "pkg-config" version = "0.3.18" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d36492546b6af1463394d46f0c834346f31548646f6ba10849802c9c9a27ac33" [[package]] name = "ppv-lite86" version = "0.2.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c36fa947111f5c62a733b652544dd0016a43ce89619538a8ef92724a6f501a20" [[package]] name = "rand" version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6a6b1679d49b24bbfe0c803429aa1874472f50d9b363131f0e89fc356b544d03" dependencies = [ "getrandom", "libc", "rand_chacha", "rand_core", "rand_hc", ] [[package]] name = "rand_chacha" version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f4c8ed856279c9737206bf725bf36935d8666ead7aa69b52be55af369d193402" dependencies = [ "ppv-lite86", "rand_core", ] [[package]] name = "rand_core" version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "90bde5296fc891b0cef12a6d03ddccc162ce7b2aff54160af9338f8d40df6d19" dependencies = [ "getrandom", ] [[package]] name = "rand_hc" version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ca3129af7b92a17112d59ad498c6f81eaf463253766b90396d39ea7a39d6613c" dependencies = [ "rand_core", ] [[package]] name = "redox_syscall" version = "0.1.57" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "41cc0f7e4d5d4544e8861606a285bb08d3e70712ccc7d2b84d7c0ccfaf4b05ce" [[package]] name = "redox_users" version = "0.3.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "de0737333e7a9502c789a36d7c7fa6092a49895d4faa31ca5df163857ded2e9d" dependencies = [ "getrandom", "redox_syscall", "rust-argon2", ] [[package]] name = "remove_dir_all" version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3acd125665422973a33ac9d3dd2df85edad0f4ae9b00dafb1a05e43a9f5ef8e7" dependencies = [ "winapi", ] [[package]] name = "rust-argon2" version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9dab61250775933275e84053ac235621dfb739556d5c54a2f2e9313b7cf43a19" dependencies = [ "base64", "blake2b_simd", "constant_time_eq", "crossbeam-utils", ] [[package]] name = "slog" version = "2.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1cc9c640a4adbfbcc11ffb95efe5aa7af7309e002adab54b185507dbf2377b99" [[package]] name = "slog-async" version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "51b3336ce47ce2f96673499fc07eb85e3472727b9a7a2959964b002c2ce8fbbb" dependencies = [ "crossbeam-channel", "slog", "take_mut", "thread_local", ] [[package]] name = "slog-term" version = "2.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bab1d807cf71129b05ce36914e1dbb6fbfbdecaf686301cb457f4fa967f9f5b6" dependencies = [ "atty", "chrono", "slog", "term", "thread_local", ] [[package]] name = "strsim" version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8ea5119cdb4c55b55d432abb513a0429384878c15dde60cc77b1c99de1a95a6a" [[package]] name = "take_mut" version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f764005d11ee5f36500a149ace24e00e3da98b0158b3e2d53a7495660d3f4d60" [[package]] name = "tempfile" version = "3.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7a6e24d9338a0a5be79593e2fa15a648add6138caa803e2d5bc782c371732ca9" dependencies = [ "cfg-if", "libc", "rand", "redox_syscall", "remove_dir_all", "winapi", ] [[package]] name = "term" version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c0863a3345e70f61d613eab32ee046ccd1bcc5f9105fe402c61fcd0c13eeb8b5" dependencies = [ "dirs", "winapi", ] [[package]] name = "textwrap" version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d326610f408c7a4eb6f51c37c330e496b08506c9457c9d34287ecc38809fb060" dependencies = [ "unicode-width", ] [[package]] name = "thread_local" version = "1.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5516c27b78311c50bf42c071425c560ac799b11c30b31f87e3081965fe5e0180" dependencies = [ "once_cell", ] [[package]] name = "time" version = "0.1.44" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6db9e6914ab8b1ae1c260a4ae7a49b6c5611b40328a735b21862567685e73255" dependencies = [ "libc", "wasi 0.10.0+wasi-snapshot-preview1", "winapi", ] [[package]] name = "tinyvec" version = "0.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "238ce071d267c5710f9d31451efec16c5ee22de34df17cc05e56cbc92e967117" [[package]] name = "unicode-bidi" version = "0.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "49f2bd0c6468a8230e1db229cff8029217cf623c767ea5d60bfbd42729ea54d5" dependencies = [ "matches", ] [[package]] name = "unicode-normalization" version = "0.1.13" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6fb19cf769fa8c6a80a162df694621ebeb4dafb606470b2b2fce0be40a98a977" dependencies = [ "tinyvec", ] [[package]] name = "unicode-width" version = "0.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9337591893a19b88d8d87f2cec1e73fad5cdfd10e5a6f349f498ad6ea2ffb1e3" [[package]] name = "url" version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "829d4a8476c35c9bf0bbce5a3b23f4106f79728039b726d292bb93bc106787cb" dependencies = [ "idna", "matches", "percent-encoding", ] [[package]] name = "vcpkg" version = "0.2.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6454029bf181f092ad1b853286f23e2c507d8e8194d01d92da4a55c274a5508c" [[package]] name = "vec_map" version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f1bddf1187be692e79c5ffeab891132dfb0f236ed36a43c7ed39f1165ee20191" [[package]] name = "wasi" version = "0.9.0+wasi-snapshot-preview1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519" [[package]] name = "wasi" version = "0.10.0+wasi-snapshot-preview1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1a143597ca7c7793eff794def352d41792a93c481eb1042423ff7ff72ba2c31f" [[package]] name = "winapi" version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" dependencies = [ "winapi-i686-pc-windows-gnu", "winapi-x86_64-pc-windows-gnu", ] [[package]] name = "winapi-i686-pc-windows-gnu" version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" [[package]] name = "winapi-x86_64-pc-windows-gnu" version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" git-absorb-0.6.11/Cargo.toml0000644000000023410000000000100111500ustar # THIS FILE IS AUTOMATICALLY GENERATED BY CARGO # # When uploading crates to the registry Cargo will automatically # "normalize" Cargo.toml files for maximal compatibility # with all versions of Cargo and also rewrite `path` dependencies # to registry (e.g., crates.io) dependencies. # # If you are reading this file be aware that the original Cargo.toml # will likely look very different (and much more reasonable). # See Cargo.toml.orig for the original contents. [package] edition = "2018" name = "git-absorb" version = "0.6.11" authors = ["Stephen Jung "] include = [ "**/*.rs", "Cargo.*", "*.md", ] description = "git commit --fixup, but automatic" homepage = "https://github.com/tummychow/git-absorb" readme = "README.md" license = "BSD-3-Clause" repository = "https://github.com/tummychow/git-absorb" [[bin]] name = "git-absorb" path = "src/main.rs" [dependencies.anyhow] version = "~1.0" [dependencies.clap] version = "~2.33" [dependencies.git2] version = "~0.18" default-features = false [dependencies.memchr] version = "~2.3" [dependencies.slog] version = "~2.5" [dependencies.slog-async] version = "~2.5" [dependencies.slog-term] version = "~2.6" [dev-dependencies.tempfile] version = "~3.1" git-absorb-0.6.11/Cargo.toml.orig000064400000000000000000000012101046102023000146230ustar 00000000000000[package] name = "git-absorb" version = "0.6.11" authors = ["Stephen Jung "] description = "git commit --fixup, but automatic" homepage = "https://github.com/tummychow/git-absorb" repository = "https://github.com/tummychow/git-absorb" readme = "README.md" license = "BSD-3-Clause" edition = "2018" include = [ "**/*.rs", "Cargo.*", "*.md", ] [[bin]] name = "git-absorb" path = "src/main.rs" [dependencies.git2] version = "~0.18" default-features = false [dependencies] clap = "~2.33" slog = "~2.5" slog-term = "~2.6" slog-async = "~2.5" memchr = "~2.3" anyhow = "~1.0" [dev-dependencies] tempfile = "~3.1" git-absorb-0.6.11/Documentation/README.md000064400000000000000000000004301046102023000160270ustar 00000000000000git-absorb manual ================= This project's man page `git-absorb.1.gz` can be generated from `git-absorb.txt` by running `make`. Build dependencies ------------------ - [asciidoc][] (tested with version 8.6.10) - GNU Make [asciidoc]: http://www.methods.co.nz/asciidoc/ git-absorb-0.6.11/LICENSE.md000064400000000000000000000027531046102023000133550ustar 00000000000000# BSD 3-Clause License Copyright (c) 2018, Stephen Jung All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: - Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. - Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. - Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. git-absorb-0.6.11/README.md000064400000000000000000000175171046102023000132340ustar 00000000000000# git absorb This is a port of Facebook's [`hg absorb`](https://www.mercurial-scm.org/repo/hg/rev/5111d11b8719), which I first read about on [mozilla.dev.version-control](https://groups.google.com/forum/#!msg/mozilla.dev.version-control/nh4fITFlEMk/ZNXgnAzxAQAJ): > * Facebook demoed `hg absorb` which is probably the coolest workflow enhancement I've seen to version control in years. Essentially, when your working directory has uncommitted changes on top of draft changesets, you can run `hg absorb` and the uncommitted modifications are automagically folded ("absorbed") into the appropriate draft ancestor changesets. This is essentially doing `hg histedit` + "roll" actions without having to make a commit or manually make history modification rules. The command essentially looks at the lines that were modified, finds a changeset modifying those lines, and amends that changeset to include your uncommitted changes. If the changes can't be made without conflicts, they remain uncommitted. This workflow is insanely useful for things like applying review feedback. You just make file changes, run `hg absorb` and the mapping of changes to commits sorts itself out. It is magical. ## Elevator Pitch You have a feature branch with a few commits. Your teammate reviewed the branch and pointed out a few bugs. You have fixes for the bugs, but you don't want to shove them all into an opaque commit that says `fixes`, because you believe in atomic commits. Instead of manually finding commit SHAs for `git commit --fixup`, or running a manual interactive rebase, do this: ``` git add $FILES_YOU_FIXED git absorb --and-rebase ``` `git absorb` will automatically identify which commits are safe to modify, and which staged changes belong to each of those commits. It will then write `fixup!` commits for each of those changes. With the `--and-rebase` flag, these fixup commits will be automatically integrated into the corresponding ones. Alternatively, you can check its output manually if you don't trust it, and then fold the fixups into your feature branch with git's built-in [autosquash](https://git-scm.com/docs/git-rebase#Documentation/git-rebase.txt---autosquash) functionality: ``` git add $FILES_YOU_FIXED git absorb git log # check the auto-generated fixup commits git rebase -i --autosquash master ``` ## Installing The easiest way to install `git absorb` is to download an artifact from the latest [tagged release](https://github.com/tummychow/git-absorb/releases). Artifacts are available for Windows, MacOS, and Linux (built on Ubuntu with statically linked libgit2). If you need a commit that hasn't been released yet, check the [latest CI artifact](https://github.com/tummychow/git-absorb/actions/workflows/build.yml?query=event%3Apush+branch%3Amaster) or file an issue. Alternatively, `git absorb` is available in the following system package managers: | Repository | Command | | --------------------------- | -------------------------------------------- | | Arch Linux | `pacman -S git-absorb` | | Debian | `apt install git-absorb` | | DPorts | `pkg install git-absorb` | | Fedora | `dnf install git-absorb` | | FreeBSD Ports | `pkg install git-absorb` | | Homebrew and Linuxbrew | `brew install git-absorb` | | nixpkgs stable and unstable | `nix-env -iA nixpkgs.git-absorb` | | Ubuntu | `apt install git-absorb` | | Void Linux | `xbps-install -S git-absorb` | | GNU Guix | `guix install git-absorb` | ## Compiling from Source [![crates.io badge](https://img.shields.io/crates/v/git-absorb.svg)](https://crates.io/crates/git-absorb) [![Build](https://github.com/tummychow/git-absorb/actions/workflows/build.yml/badge.svg?branch=master&event=push)](https://github.com/tummychow/git-absorb/actions/workflows/build.yml) You will need the following: - [cargo](https://github.com/rust-lang/cargo) Then `cargo install git-absorb`. Make sure that `$CARGO_HOME/bin` is on your `$PATH` so that git can find the command. (`$CARGO_HOME` defaults to `~/.cargo`.) Note that `git absorb` does _not_ use the system libgit2. This means you do not need to have libgit2 installed to build or run it. However, this does mean you have to be able to build libgit2. (Due to [recent changes](https://github.com/alexcrichton/git2-rs/commit/76f4b74aef2bc2a54906ddcbf7fbe0018936a69d) in the git2 crate, CMake is no longer needed to build it.) Note: `cargo install` does not currently know how to install manpages ([cargo#2729](https://github.com/rust-lang/cargo/issues/2729)), so if you use `cargo` for installation then `git absorb --help` will not work. Here is a manual workaround, assuming your system has a `~/.local/share/man/man1` directory that `man --path` knows about: ``` wget https://raw.githubusercontent.com/tummychow/git-absorb/master/Documentation/git-absorb.1 mv git-absorb.1 ~/.local/share/man/man1 ``` ## Usage 1. `git add` any changes that you want to absorb. By design, `git absorb` will only consider content in the git index (staging area). 2. `git absorb`. This will create a sequence of commits on `HEAD`. Each commit will have a `fixup!` message indicating the message (if unique) or SHA of the commit it should be squashed into. 3. If you are satisfied with the output, `git rebase -i --autosquash` to squash the `fixup!` commits into their predecessors. You can set the [`GIT_SEQUENCE_EDITOR`](https://stackoverflow.com/a/29094904) environment variable if you don't need to edit the rebase TODO file. 4. If you are not satisfied (or if something bad happened), `git reset --soft` to the pre-absorption commit to recover your old state. (You can find the commit in question with `git reflog`.) And if you think `git absorb` is at fault, please [file an issue](https://github.com/tummychow/git-absorb/issues/new). ## How it works (roughly) `git absorb` works by checking if two patches P1 and P2 *commute*, that is, if applying P1 before P2 gives the same result as applying P2 before P1. `git absorb` considers a range of commits ending at HEAD. The first commit can be specified explicitly with `--base `. By default the last 10 commits will be considered (see [Configuration](#configuration) below for how to change this). For each hunk in the index, `git absorb` will check if that hunk commutes with the last commit, then the one before that, etc. When it finds a commit that does not commute with the hunk, it infers that this is the right parent commit for this change, and the hunk is turned into a fixup commit. If the hunk commutes with all commits in the range, it means we have not found a suitable parent commit for this change; a warning is displayed, and this hunk remains uncommitted in the index. ## Configuration ### Stack size When run without `--base`, git-absorb will only search for candidate commits to fixup within a certain range (by default 10). If you get an error like this: ``` WARN stack limit reached, limit: 10 ``` edit your local or global `.gitconfig` and add the following section ```ini [absorb] maxStack=50 # Or any other reasonable value for your project ``` ## TODO - implement force flag - implement remote default branch check - add smaller force flags to disable individual safety checks - stop using `failure::err_msg` and ensure all error output is actionable by the user - slightly more log output in the success case - more tests (esp main module and integration tests) - document stack and commute details - more commutation cases (esp copy/rename detection) - don't load all hunks in memory simultaneously because they could be huge - implement some kind of index locking to protect against concurrent modifications git-absorb-0.6.11/src/commute.rs000064400000000000000000000200521046102023000145470ustar 00000000000000use crate::owned; /// Tests if all elements of the iterator are equal to each other. /// /// An empty iterator returns `true`. /// /// `uniform()` is short-circuiting. It will stop processing as soon /// as it finds two pairwise inequal elements. fn uniform(iter: I) -> bool where I: IntoIterator, E: Eq, { let mut iter = iter.into_iter(); match iter.next() { Some(first) => iter.all(|e| e == first), None => true, } } pub fn commute(first: &owned::Hunk, second: &owned::Hunk) -> Option<(owned::Hunk, owned::Hunk)> { let (_, _, first_upper, first_lower) = first.anchors(); let (second_upper, second_lower, _, _) = second.anchors(); // represent hunks in content order rather than application order let (first_above, above, below) = { if first_lower <= second_upper { (true, first, second) } else if second_lower <= first_upper { (false, second, first) } else { // if both hunks are exclusively adding or removing, and // both hunks are composed entirely of the same line being // repeated, then they commute no matter what their // offsets are, because they can be interleaved in any // order without changing the final result if (first.added.lines.is_empty() && second.added.lines.is_empty() && uniform(first.removed.lines.iter().chain(&*second.removed.lines))) || (first.removed.lines.is_empty() && second.removed.lines.is_empty() && uniform(first.added.lines.iter().chain(&*second.added.lines))) { // TODO: removed/added start positions probably need to be // tweaked here return Some((second.clone(), first.clone())); } // these hunks overlap and cannot be interleaved, so they // do not commute return None; } }; let above = above.clone(); let mut below = below.clone(); let above_change_offset = (above.added.lines.len() as i64 - above.removed.lines.len() as i64) * if first_above { -1 } else { 1 }; below.added.start = (below.added.start as i64 + above_change_offset) as usize; below.removed.start = (below.removed.start as i64 + above_change_offset) as usize; Some(if first_above { (below, above) } else { (above, below) }) } pub fn commute_diff_before<'a, I>(after: &owned::Hunk, before: I) -> Option where I: IntoIterator, ::IntoIter: DoubleEndedIterator, { before .into_iter() // the patch's hunks must be iterated in reverse application // order (last applied to first applied), which also happens // to be reverse line order (bottom to top), which also // happens to be reverse of the order they're stored .rev() .fold(Some(after.clone()), |after, next| { after .and_then(|after| commute(next, &after)) .map(|(commuted_after, _)| commuted_after) }) } #[cfg(test)] mod tests { use super::*; use std::rc::Rc; #[test] fn test_commute() { // example init: <>()), trailing_newline: true, }, removed: owned::Block { start: 0, lines: Rc::new(vec![]), trailing_newline: true, }, }; let hunk2 = owned::Hunk { added: owned::Block { start: 1, lines: Rc::new((&mut line).take(2).collect::>()), trailing_newline: true, }, removed: owned::Block { start: 0, lines: Rc::new(vec![]), trailing_newline: true, }, }; let (new1, new2) = commute(&hunk1, &hunk2).unwrap(); assert_eq!(new1.added.lines.len(), 2); assert_eq!(new2.added.lines.len(), 4); } #[test] fn test_commute_trivial_remove() { let mut line = ::std::iter::repeat(b"bar\n".to_vec()); let hunk1 = owned::Hunk { added: owned::Block { start: 1, lines: Rc::new(vec![]), trailing_newline: true, }, removed: owned::Block { start: 4, lines: Rc::new((&mut line).take(4).collect::>()), trailing_newline: true, }, }; let hunk2 = owned::Hunk { added: owned::Block { start: 1, lines: Rc::new(vec![]), trailing_newline: true, }, removed: owned::Block { start: 2, lines: Rc::new((&mut line).take(2).collect::>()), trailing_newline: true, }, }; let (new1, new2) = commute(&hunk1, &hunk2).unwrap(); assert_eq!(new1.removed.lines.len(), 2); assert_eq!(new2.removed.lines.len(), 4); } #[test] fn test_commute_patch() { // example init: < { pub dry_run: bool, pub force: bool, pub base: Option<&'a str>, pub and_rebase: bool, pub whole_file: bool, pub one_fixup_per_commit: bool, pub logger: &'a slog::Logger, } pub fn run(config: &Config) -> Result<()> { let repo = git2::Repository::open_from_env()?; debug!(config.logger, "repository found"; "path" => repo.path().to_str()); run_with_repo(config, &repo) } fn run_with_repo(config: &Config, repo: &git2::Repository) -> Result<()> { let stack = stack::working_stack(&repo, config.base, config.force, config.logger)?; if stack.is_empty() { crit!(config.logger, "No commits available to fix up, exiting"); return Ok(()); } let mut diff_options = Some({ let mut ret = git2::DiffOptions::new(); ret.context_lines(0) .id_abbrev(40) .ignore_filemode(true) .ignore_submodules(true); ret }); let (stack, summary_counts): (Vec<_>, _) = { let mut diffs = Vec::with_capacity(stack.len()); for commit in &stack { let diff = owned::Diff::new( &repo.diff_tree_to_tree( if commit.parents().len() == 0 { None } else { Some(commit.parent(0)?.tree()?) } .as_ref(), Some(&commit.tree()?), diff_options.as_mut(), )?, )?; trace!(config.logger, "parsed commit diff"; "commit" => commit.id().to_string(), "diff" => format!("{:?}", diff), ); diffs.push(diff); } let summary_counts = stack::summary_counts(&stack); ( stack.into_iter().zip(diffs.into_iter()).collect(), summary_counts, ) }; let mut head_tree = repo.head()?.peel_to_tree()?; let index = owned::Diff::new(&repo.diff_tree_to_index( Some(&head_tree), None, diff_options.as_mut(), )?)?; trace!(config.logger, "parsed index"; "index" => format!("{:?}", index), ); let signature = repo .signature() .or_else(|_| git2::Signature::now("nobody", "nobody@example.com"))?; let mut head_commit = repo.head()?.peel_to_commit()?; let mut hunks_with_commit = vec![]; let mut patches_considered = 0usize; 'patch: for index_patch in index.iter() { let old_path = index_patch.new_path.as_slice(); if index_patch.status != git2::Delta::Modified { debug!(config.logger, "skipped non-modified hunk"; "path" => String::from_utf8_lossy(old_path).into_owned(), "status" => format!("{:?}", index_patch.status), ); continue 'patch; } patches_considered += 1; let mut preceding_hunks_offset = 0isize; let mut applied_hunks_offset = 0isize; 'hunk: for index_hunk in &index_patch.hunks { debug!(config.logger, "next hunk"; "header" => index_hunk.header(), "path" => String::from_utf8_lossy(old_path).into_owned(), ); // To properly handle files ("patches" in libgit2 lingo) with multiple hunks, we // need to find the updated line coordinates (`header`) of the current hunk in // two cases: // 1) As if it were the only hunk in the index. This only involves shifting the // "added" side *up* by the offset introduced by the preceding hunks: let isolated_hunk = index_hunk .clone() .shift_added_block(-preceding_hunks_offset); // 2) When applied on top of the previously committed hunks. This requires shifting // both the "added" and the "removed" sides of the previously isolated hunk *down* // by the offset of the committed hunks: let hunk_to_apply = isolated_hunk .clone() .shift_both_blocks(applied_hunks_offset); // The offset is the number of lines added minus the number of lines removed by a hunk: let hunk_offset = index_hunk.changed_offset(); // To aid in understanding these arithmetic, here's an illustration. // There are two hunks in the original patch, each adding one line ("line2" and // "line5"). Assuming the first hunk (with offset = -1) was already processed // and applied, the table shows the three versions of the patch, with line numbers // on the dded and emoved sides for each: // |----------------|-----------|------------------| // | | | applied on top | // | original patch | isolated | of the preceding | // |----------------|-----------|------------------| // | | | | // |----------------|-----------|------------------| // | 1 1 line1 | 1 1 | 1 1 line1 | // | 2 line2 | 2 2 | 2 2 line3 | // | 3 2 line3 | 3 3 | 3 3 line4 | // | 4 3 line4 | 4 4 | 4 line5 | // | 5 line5 | 5 | | // |----------------|-----------|------------------| // | So the second hunk's `header` is: | // | -5,1 +3,0 | -5,1 +4,0 | -4,1 +3,0 | // |----------------|-----------|------------------| debug!(config.logger, ""; "to apply" => hunk_to_apply.header(), "to commute" => isolated_hunk.header(), "preceding hunks" => format!("{}/{}", applied_hunks_offset, preceding_hunks_offset), ); preceding_hunks_offset += hunk_offset; // find the newest commit that the hunk cannot commute with let mut dest_commit = None; let mut commuted_old_path = old_path; let mut commuted_index_hunk = isolated_hunk; 'commit: for &(ref commit, ref diff) in &stack { let c_logger = config.logger.new(o!( "commit" => commit.id().to_string(), )); let next_patch = match diff.by_new(commuted_old_path) { Some(patch) => patch, // this commit doesn't touch the hunk's file, so // they trivially commute, and the next commit // should be considered None => { debug!(c_logger, "skipped commit with no path"); continue 'commit; } }; // sometimes we just forget some change (eg: intializing some object) that // happens in a completely unrelated place with the current hunks. In those // cases, might be helpful to just match the first commit touching the same // file as the current hunk. Use this option with care! if config.whole_file { debug!( c_logger, "Commit touches the hunk file and match whole file is enabled" ); dest_commit = Some(commit); break 'commit; } if next_patch.status == git2::Delta::Added { debug!(c_logger, "found noncommutative commit by add"); dest_commit = Some(commit); break 'commit; } if commuted_old_path != next_patch.old_path.as_slice() { debug!(c_logger, "changed commute path"; "path" => String::from_utf8_lossy(&next_patch.old_path).into_owned(), ); commuted_old_path = next_patch.old_path.as_slice(); } commuted_index_hunk = match commute::commute_diff_before( &commuted_index_hunk, &next_patch.hunks, ) { Some(hunk) => { debug!(c_logger, "commuted hunk with commit"; "offset" => (hunk.added.start as i64) - (commuted_index_hunk.added.start as i64), ); hunk } // this commit contains a hunk that cannot // commute with the hunk being absorbed None => { debug!(c_logger, "found noncommutative commit by conflict"); dest_commit = Some(commit); break 'commit; } }; } let dest_commit = match dest_commit { Some(commit) => commit, // the hunk commutes with every commit in the stack, // so there is no commit to absorb it into None => { warn!( config.logger, "Could not find a commit to fix up, use \ --base to increase the search range." ); continue 'hunk; } }; let hunk_with_commit = HunkWithCommit { hunk_to_apply, dest_commit, index_patch, }; hunks_with_commit.push(hunk_with_commit); applied_hunks_offset += hunk_offset; } } hunks_with_commit.sort_by_key(|h| h.dest_commit.id()); // * apply all hunks that are going to be fixed up into `dest_commit` // * commit the fixup // * repeat for all `dest_commit`s // // the `.zip` here will gives us something similar to `.windows`, but with // an extra iteration for the last element (otherwise we would have to // special case the last element and commit it separately) for (current, next) in hunks_with_commit .iter() .zip(hunks_with_commit.iter().skip(1).map(Some).chain([None])) { let new_head_tree = apply_hunk_to_tree( &repo, &head_tree, ¤t.hunk_to_apply, ¤t.index_patch.old_path, )?; // whether there are no more hunks to apply to `dest_commit` let commit_fixup = next.map_or(true, |next| { // if the next hunk is for a different commit -- commit what we have so far !config.one_fixup_per_commit || next.dest_commit.id() != current.dest_commit.id() }); if commit_fixup { // TODO: the git2 api only supports utf8 commit messages, // so it's okay to use strings instead of bytes here // https://docs.rs/git2/0.7.5/src/git2/repo.rs.html#998 // https://libgit2.org/libgit2/#HEAD/group/commit/git_commit_create let dest_commit_id = current.dest_commit.id().to_string(); let dest_commit_locator = current .dest_commit .summary() .filter(|&msg| summary_counts[msg] == 1) .unwrap_or(&dest_commit_id); let diff = repo .diff_tree_to_tree(Some(&head_commit.tree()?), Some(&new_head_tree), None)? .stats()?; if !config.dry_run { head_tree = new_head_tree; head_commit = repo.find_commit(repo.commit( Some("HEAD"), &signature, &signature, &format!("fixup! {}\n", dest_commit_locator), &head_tree, &[&head_commit], )?)?; info!(config.logger, "committed"; "commit" => head_commit.id().to_string(), "header" => format!("+{},-{}", diff.insertions(), diff.deletions()), ); } else { info!(config.logger, "would have committed"; "fixup" => dest_commit_locator, "header" => format!("+{},-{}", diff.insertions(), diff.deletions()), ); } } else { // we didn't commit anything, but we applied a hunk head_tree = new_head_tree; } } if patches_considered == 0 { warn!( config.logger, "No additions staged, try adding something to the index." ); } else if config.and_rebase { use std::process::Command; // unwrap() is safe here, as we exit early if the stack is empty let last_commit_in_stack = &stack.last().unwrap().0; // The stack isn't supposed to have any merge commits, per the check in working_stack() let number_of_parents = last_commit_in_stack.parents().len(); assert!(number_of_parents <= 1); let mut command = Command::new("git"); command.args(&["rebase", "--interactive", "--autosquash"]); if number_of_parents == 0 { command.arg("--root"); } else { // Use a range that is guaranteed to include all the commits we might have // committed "fixup!" commits for. let base_commit_sha = last_commit_in_stack.parent(0)?.id().to_string(); command.arg(&base_commit_sha); } // Don't check that we have successfully absorbed everything, nor git's // exit code -- as git will print helpful messages on its own. command.status().expect("could not run git rebase"); } Ok(()) } struct HunkWithCommit<'c, 'r, 'p> { hunk_to_apply: owned::Hunk, dest_commit: &'c git2::Commit<'r>, index_patch: &'p owned::Patch, } fn apply_hunk_to_tree<'repo>( repo: &'repo git2::Repository, base: &git2::Tree, hunk: &owned::Hunk, path: &[u8], ) -> Result> { let mut treebuilder = repo.treebuilder(Some(base))?; // recurse into nested tree if applicable if let Some(slash) = path.iter().position(|&x| x == b'/') { let (first, rest) = path.split_at(slash); let rest = &rest[1..]; let (subtree, submode) = { let entry = treebuilder .get(first)? .ok_or_else(|| anyhow!("couldn't find tree entry in tree for path"))?; (repo.find_tree(entry.id())?, entry.filemode()) }; // TODO: loop instead of recursing to avoid potential stack overflow let result_subtree = apply_hunk_to_tree(repo, &subtree, hunk, rest)?; treebuilder.insert(first, result_subtree.id(), submode)?; return Ok(repo.find_tree(treebuilder.write()?)?); } let (blob, mode) = { let entry = treebuilder .get(path)? .ok_or_else(|| anyhow!("couldn't find blob entry in tree for path"))?; (repo.find_blob(entry.id())?, entry.filemode()) }; // TODO: convert path to OsStr and pass it during blob_writer // creation, to get gitattributes handling (note that converting // &[u8] to &std::path::Path is only possible on unixy platforms) let mut blobwriter = repo.blob_writer(None)?; let old_content = blob.content(); let (old_start, _, _, _) = hunk.anchors(); // first, write the lines from the old content that are above the // hunk let old_content = { let (pre, post) = split_lines_after(old_content, old_start); blobwriter.write_all(pre)?; post }; // next, write the added side of the hunk for line in &*hunk.added.lines { blobwriter.write_all(line)?; } // if this hunk removed lines from the old content, those must be // skipped let (_, old_content) = split_lines_after(old_content, hunk.removed.lines.len()); // finally, write the remaining lines of the old content blobwriter.write_all(old_content)?; treebuilder.insert(path, blobwriter.commit()?, mode)?; Ok(repo.find_tree(treebuilder.write()?)?) } /// Return slices for lines [1..n] and [n+1; ...] fn split_lines_after(content: &[u8], n: usize) -> (&[u8], &[u8]) { let split_index = if n > 0 { memchr::Memchr::new(b'\n', content) .fuse() // TODO: is fuse necessary here? .nth(n - 1) // the position of '\n' ending the `n`-th line .map(|x| x + 1) .unwrap_or_else(|| content.len()) } else { 0 }; content.split_at(split_index) } #[cfg(test)] mod tests { use std::path::{Path, PathBuf}; use super::*; struct Context { repo: git2::Repository, dir: tempfile::TempDir, } impl Context { fn join(&self, p: &Path) -> PathBuf { self.dir.path().join(p) } } /// Prepare a fresh git repository with an initial commit and a file. fn prepare_repo() -> (Context, PathBuf) { let dir = tempfile::tempdir().unwrap(); let repo = git2::Repository::init(dir.path()).unwrap(); let path = PathBuf::from("test-file.txt"); std::fs::write( dir.path().join(&path), br#" line line more lines "#, ) .unwrap(); // make the borrow-checker happy by introducing a new scope { let tree = add(&repo, &path); let signature = repo .signature() .or_else(|_| git2::Signature::now("nobody", "nobody@example.com")) .unwrap(); repo.commit( Some("HEAD"), &signature, &signature, "Initial commit.", &tree, &[], ) .unwrap(); } (Context { repo, dir }, path) } /// Stage the changes made to `path`. fn add<'r>(repo: &'r git2::Repository, path: &Path) -> git2::Tree<'r> { let mut index = repo.index().unwrap(); index.add_path(&path).unwrap(); index.write().unwrap(); let tree_id = index.write_tree_to(&repo).unwrap(); repo.find_tree(tree_id).unwrap() } /// Prepare an empty repo, and stage some changes. fn prepare_and_stage() -> Context { let (ctx, file_path) = prepare_repo(); // add some lines to our file let path = ctx.join(&file_path); let contents = std::fs::read_to_string(&path).unwrap(); let modifications = format!("new_line1\n{contents}\nnew_line2"); std::fs::write(&path, &modifications).unwrap(); // stage it add(&ctx.repo, &file_path); ctx } fn nothing_left_in_index(ctx: Context) { let head = ctx.repo.head().unwrap().peel_to_tree().unwrap(); let diff = ctx .repo .diff_tree_to_index(Some(&head), Some(&ctx.repo.index().unwrap()), None) .unwrap(); let stats = diff.stats().unwrap(); assert_eq!(stats.files_changed(), 0); assert_eq!(stats.insertions(), 0); assert_eq!(stats.deletions(), 0); } #[test] fn multiple_fixups_per_commit() { let ctx = prepare_and_stage(); // run 'git-absorb' let drain = slog::Discard; let logger = slog::Logger::root(drain, o!()); let config = Config { dry_run: false, force: false, base: None, and_rebase: false, whole_file: false, one_fixup_per_commit: false, logger: &logger, }; run_with_repo(&config, &ctx.repo).unwrap(); let mut revwalk = ctx.repo.revwalk().unwrap(); revwalk.push_head().unwrap(); assert_eq!(revwalk.count(), 3); nothing_left_in_index(ctx); } #[test] fn one_fixup_per_commit() { let ctx = prepare_and_stage(); // run 'git-absorb' let drain = slog::Discard; let logger = slog::Logger::root(drain, o!()); let config = Config { dry_run: false, force: false, base: None, and_rebase: false, whole_file: false, one_fixup_per_commit: true, logger: &logger, }; run_with_repo(&config, &ctx.repo).unwrap(); let mut revwalk = ctx.repo.revwalk().unwrap(); revwalk.push_head().unwrap(); assert_eq!(revwalk.count(), 2); nothing_left_in_index(ctx); } } git-absorb-0.6.11/src/main.rs000064400000000000000000000102371046102023000140260ustar 00000000000000#[macro_use] extern crate clap; #[macro_use] extern crate slog; use clap::Shell; use slog::Drain; use std::io; fn main() { let args = app_from_crate!() .about("Automatically absorb staged changes into your current branch") .arg( clap::Arg::with_name("base") .help("Use this commit as the base of the absorb stack") .short("b") .long("base") .takes_value(true), ) .arg( clap::Arg::with_name("dry-run") .help("Don't make any actual changes") .short("n") .long("dry-run") .takes_value(false), ) .arg( clap::Arg::with_name("force") .help("Skip safety checks") .short("f") .long("force") .takes_value(false), ) .arg( clap::Arg::with_name("verbose") .help("Display more output") .short("v") .long("verbose") .takes_value(false), ) .arg( clap::Arg::with_name("and-rebase") .help("Run rebase if successful") .short("r") .long("and-rebase") .takes_value(false), ) .arg( clap::Arg::with_name("gen-completions") .help("Generate completions") .long("gen-completions") .takes_value(true) .possible_values(&["bash", "fish", "zsh", "powershell", "elvish"]), ) .arg( clap::Arg::with_name("whole-file") .help("Match the change against the complete file ") .short("w") .long("whole-file") .takes_value(false), ) .arg( clap::Arg::with_name("one-fixup-per-commit") .help("Only generate one fixup per commit") .short("F") .long("one-fixup-per-commit") .takes_value(false), ); let mut args_clone = args.clone(); let args = args.get_matches(); if let Some(shell) = args.value_of("gen-completions") { let app_name = "git-absorb"; match shell { "bash" => { args_clone.gen_completions_to(app_name, Shell::Bash, &mut io::stdout()); } "fish" => { args_clone.gen_completions_to(app_name, Shell::Fish, &mut io::stdout()); } "zsh" => { args_clone.gen_completions_to(app_name, Shell::Zsh, &mut io::stdout()); } "powershell" => { args_clone.gen_completions_to(app_name, Shell::PowerShell, &mut io::stdout()); } "elvish" => { args_clone.gen_completions_to(app_name, Shell::Elvish, &mut io::stdout()); } _ => unreachable!(), } return; } let decorator = slog_term::TermDecorator::new().build(); let drain = slog_term::FullFormat::new(decorator).build().fuse(); let drain = slog_async::Async::new(drain).build().fuse(); let drain = slog::LevelFilter::new( drain, if args.is_present("verbose") { slog::Level::Debug } else { slog::Level::Info }, ) .fuse(); let mut logger = slog::Logger::root(drain, o!()); if args.is_present("verbose") { logger = logger.new(o!( "module" => slog::FnValue(|record| record.module()), "line" => slog::FnValue(|record| record.line()), )); } if let Err(e) = git_absorb::run(&git_absorb::Config { dry_run: args.is_present("dry-run"), force: args.is_present("force"), base: args.value_of("base"), and_rebase: args.is_present("and-rebase"), whole_file: args.is_present("whole-file"), one_fixup_per_commit: args.is_present("one-fixup-per-commit"), logger: &logger, }) { crit!(logger, "absorb failed"; "err" => e.to_string()); // wait for async logger to finish writing messages drop(logger); ::std::process::exit(1); } } git-absorb-0.6.11/src/owned.rs000064400000000000000000000202361046102023000142160ustar 00000000000000use anyhow::{anyhow, Result}; use std::collections::hash_map::HashMap; use std::rc::Rc; #[derive(Debug)] pub struct Diff { patches: Vec, by_new: HashMap, usize>, by_old: HashMap, usize>, } impl ::std::ops::Deref for Diff { type Target = [Patch]; fn deref(&self) -> &[Patch] { self.patches.as_slice() } } impl Diff { pub fn new(diff: &git2::Diff) -> Result { let mut ret = Diff { patches: Vec::new(), by_old: HashMap::new(), by_new: HashMap::new(), }; for (delta_idx, _delta) in diff.deltas().enumerate() { let patch = Patch::new( &mut git2::Patch::from_diff(diff, delta_idx)? .ok_or_else(|| anyhow!("got empty delta"))?, )?; if ret.by_old.contains_key(&patch.old_path) { // TODO: would this case be hit if the diff was put through copy detection? return Err(anyhow!("old path already occupied")); } ret.by_old.insert(patch.old_path.clone(), ret.patches.len()); if ret.by_new.contains_key(&patch.new_path) { return Err(anyhow!("new path already occupied")); } ret.by_new.insert(patch.new_path.clone(), ret.patches.len()); ret.patches.push(patch); } Ok(ret) } pub fn by_new(&self, path: &[u8]) -> Option<&Patch> { self.by_new.get(path).map(|&idx| &self.patches[idx]) } } #[derive(Debug, Clone)] pub struct Block { pub start: usize, pub lines: Rc>>, pub trailing_newline: bool, } #[derive(Debug, Clone)] pub struct Hunk { pub added: Block, pub removed: Block, } impl Hunk { pub fn new(patch: &mut git2::Patch, idx: usize) -> Result { let (added_start, removed_start, mut added_lines, mut removed_lines) = { let (hunk, _size) = patch.hunk(idx)?; ( hunk.new_start() as usize, hunk.old_start() as usize, Vec::with_capacity(hunk.new_lines() as usize), Vec::with_capacity(hunk.old_lines() as usize), ) }; let mut added_trailing_newline = true; let mut removed_trailing_newline = true; for line_idx in 0..patch.num_lines_in_hunk(idx)? { let line = patch.line_in_hunk(idx, line_idx)?; match line.origin() { '+' => { if line.num_lines() > 1 { return Err(anyhow!("wrong number of lines in hunk")); } if line .new_lineno() .ok_or_else(|| anyhow!("added line did not have lineno"))? as usize != added_start + added_lines.len() { return Err(anyhow!("added line did not reach expected lineno")); } added_lines.push(Vec::from(line.content())) } '-' => { if line.num_lines() > 1 { return Err(anyhow!("wrong number of lines in hunk")); } if line .old_lineno() .ok_or_else(|| anyhow!("removed line did not have lineno"))? as usize != removed_start + removed_lines.len() { return Err(anyhow!("removed line did not reach expected lineno",)); } removed_lines.push(Vec::from(line.content())) } '>' => { if !removed_trailing_newline { return Err(anyhow!("removed nneof was already detected")); }; removed_trailing_newline = false } '<' => { if !added_trailing_newline { return Err(anyhow!("added nneof was already detected")); }; added_trailing_newline = false } _ => return Err(anyhow!("unknown line type {:?}", line.origin())), }; } { let (hunk, _size) = patch.hunk(idx)?; if added_lines.len() != hunk.new_lines() as usize { return Err(anyhow!("hunk added block size mismatch")); } if removed_lines.len() != hunk.old_lines() as usize { return Err(anyhow!("hunk removed block size mismatch")); } } Ok(Hunk { added: Block { start: added_start, lines: Rc::new(added_lines), trailing_newline: added_trailing_newline, }, removed: Block { start: removed_start, lines: Rc::new(removed_lines), trailing_newline: removed_trailing_newline, }, }) } /// Returns the unchanged lines around this hunk. /// /// Any given hunk has four anchor points: /// /// - the last unchanged line before it, on the removed side /// - the first unchanged line after it, on the removed side /// - the last unchanged line before it, on the added side /// - the first unchanged line after it, on the added side /// /// This function returns those four line numbers, in that order. pub fn anchors(&self) -> (usize, usize, usize, usize) { match (self.removed.lines.len(), self.added.lines.len()) { (0, 0) => (0, 1, 0, 1), (removed_len, 0) => ( self.removed.start - 1, self.removed.start + removed_len, self.removed.start - 1, self.removed.start, ), (0, added_len) => ( self.added.start - 1, self.added.start, self.added.start - 1, self.added.start + added_len, ), (removed_len, added_len) => ( self.removed.start - 1, self.removed.start + removed_len, self.added.start - 1, self.added.start + added_len, ), } } pub fn changed_offset(&self) -> isize { self.added.lines.len() as isize - self.removed.lines.len() as isize } pub fn header(&self) -> String { format!( "-{},{} +{},{}", self.removed.start, self.removed.lines.len(), self.added.start, self.added.lines.len() ) } pub fn shift_added_block(mut self, by: isize) -> Self { self.added.start = (self.added.start as isize + by) as usize; self } pub fn shift_both_blocks(mut self, by: isize) -> Self { self.removed.start = (self.removed.start as isize + by) as usize; self.added.start = (self.added.start as isize + by) as usize; self } } #[derive(Debug)] pub struct Patch { pub old_path: Vec, pub old_id: git2::Oid, pub new_path: Vec, pub new_id: git2::Oid, pub status: git2::Delta, pub hunks: Vec, } impl Patch { pub fn new(patch: &mut git2::Patch) -> Result { let mut ret = Patch { old_path: patch .delta() .old_file() .path_bytes() .map(Vec::from) .ok_or_else(|| anyhow!("delta with empty old path"))?, old_id: patch.delta().old_file().id(), new_path: patch .delta() .new_file() .path_bytes() .map(Vec::from) .ok_or_else(|| anyhow!("delta with empty new path"))?, new_id: patch.delta().new_file().id(), status: patch.delta().status(), hunks: Vec::with_capacity(patch.num_hunks()), }; if patch.delta().nfiles() < 1 || patch.delta().nfiles() > 2 { return Err(anyhow!("delta with multiple files")); } for idx in 0..patch.num_hunks() { ret.hunks.push(Hunk::new(patch, idx)?); } Ok(ret) } } git-absorb-0.6.11/src/stack.rs000064400000000000000000000215501046102023000142070ustar 00000000000000use anyhow::{anyhow, Result}; use std::collections::HashMap; pub const MAX_STACK_CONFIG_NAME: &str = "absorb.maxStack"; pub const MAX_STACK: usize = 10; fn max_stack(repo: &git2::Repository) -> usize { match repo .config() .and_then(|config| config.get_i64(MAX_STACK_CONFIG_NAME)) { Ok(max_stack) if max_stack > 0 => max_stack as usize, _ => MAX_STACK, } } pub fn working_stack<'repo>( repo: &'repo git2::Repository, user_provided_base: Option<&str>, force: bool, logger: &slog::Logger, ) -> Result>> { let head = repo.head()?; debug!(logger, "head found"; "head" => head.name()); if !head.is_branch() { if !force { return Err(anyhow!("HEAD is not a branch, use --force to override")); } else { warn!( logger, "HEAD is not a branch, but --force used to continue." ); } } let mut revwalk = repo.revwalk()?; revwalk.set_sorting(git2::Sort::TOPOLOGICAL)?; revwalk.push_head()?; revwalk.simplify_first_parent()?; debug!(logger, "head pushed"; "head" => head.name()); let base_commit = match user_provided_base { // https://github.com/rust-lang/rfcs/issues/1815 Some(commitish) => Some(repo.find_commit(repo.revparse_single(commitish)?.id())?), None => None, }; if let Some(base_commit) = base_commit { revwalk.hide(base_commit.id())?; debug!(logger, "commit hidden"; "commit" => base_commit.id().to_string()); } else { for branch in repo.branches(Some(git2::BranchType::Local))? { let (branch, _) = branch?; let branch = branch.get().name(); match branch { Some(name) if Some(name) != head.name() => { revwalk.hide_ref(name)?; debug!(logger, "branch hidden"; "branch" => branch); } _ => { debug!(logger, "branch not hidden"; "branch" => branch); } }; } } let mut ret = Vec::new(); let mut commits_considered = 0usize; let sig = repo.signature(); for rev in revwalk { commits_considered += 1; let commit = repo.find_commit(rev?)?; if commit.parents().len() > 1 { warn!(logger, "Will not fix up past the merge commit"; "commit" => commit.id().to_string()); break; } if let Ok(ref sig) = sig { if !force && (commit.author().name_bytes() != sig.name_bytes() || commit.author().email_bytes() != sig.email_bytes()) { warn!(logger, "Will not fix up past commits not authored by you, use --force to override"; "commit" => commit.id().to_string()); break; } } if ret.len() == max_stack(repo) && user_provided_base.is_none() { warn!(logger, "stack limit reached, use --base or configure absorb.maxStack to override"; "limit" => ret.len()); break; } debug!(logger, "commit pushed onto stack"; "commit" => commit.id().to_string()); ret.push(commit); } if commits_considered == 0 { if user_provided_base.is_none() { warn!(logger, "Please use --base to specify a base commit."); } else { warn!(logger, "Please try a different --base"); } } Ok(ret) } pub fn summary_counts<'repo, 'a, I>(commits: I) -> HashMap where I: IntoIterator>, // TODO: we have to use a hashmap of owned strings because the // commit summary has the 'a lifetime (the commit outlives this // function, but the reference to the commit does not), it would // be nice if the commit summary had the 'repo lifetime instead 'repo: 'a, { let mut ret = HashMap::new(); for commit in commits { let count = ret // TODO: unnecessary allocation if key already exists .entry(commit.summary().unwrap_or("").to_owned()) .or_insert(0); *count += 1; } ret } #[cfg(test)] mod tests { use tempfile; use super::*; fn empty_slog() -> slog::Logger { slog::Logger::root(slog::Discard, o!()) } fn init_repo() -> (tempfile::TempDir, git2::Repository) { // the repo will be deleted when the tempdir gets dropped let dir = tempfile::TempDir::new().unwrap(); // TODO: use in-memory ODB instead (blocked on git2 support) let repo = git2::Repository::init(&dir).unwrap(); let mut config = repo.config().unwrap(); config.set_str("user.name", "nobody").unwrap(); config.set_str("user.email", "nobody@example.com").unwrap(); (dir, repo) } fn empty_commit<'repo>( repo: &'repo git2::Repository, update_ref: &str, message: &str, parents: &[&git2::Commit], ) -> git2::Commit<'repo> { let sig = repo.signature().unwrap(); let tree = repo .find_tree(repo.treebuilder(None).unwrap().write().unwrap()) .unwrap(); repo.find_commit( repo.commit(Some(update_ref), &sig, &sig, message, &tree, parents) .unwrap(), ) .unwrap() } fn empty_commit_chain<'repo>( repo: &'repo git2::Repository, update_ref: &str, initial_parents: &[&git2::Commit], length: usize, ) -> Vec> { let mut ret = Vec::with_capacity(length); for idx in 0..length { let next = if let Some(last) = ret.last() { // TODO: how to deduplicate the rest of this call if last doesn't live long enough? empty_commit(repo, update_ref, &idx.to_string(), &[last]) } else { empty_commit(repo, update_ref, &idx.to_string(), initial_parents) }; ret.push(next) } assert_eq!(ret.len(), length); ret } fn assert_stack_matches_chain(length: usize, stack: &[git2::Commit], chain: &[git2::Commit]) { assert_eq!(stack.len(), length); for (chain_commit, stack_commit) in chain.iter().rev().take(length).zip(stack) { assert_eq!(stack_commit.id(), chain_commit.id()); } } #[test] fn test_stack_hides_other_branches() { let (_dir, repo) = init_repo(); let commits = empty_commit_chain(&repo, "HEAD", &[], 2); repo.branch("hide", &commits[0], false).unwrap(); assert_stack_matches_chain( 1, &working_stack(&repo, None, false, &empty_slog()).unwrap(), &commits, ); } #[test] fn test_stack_uses_custom_base() { let (_dir, repo) = init_repo(); let commits = empty_commit_chain(&repo, "HEAD", &[], 3); repo.branch("hide", &commits[1], false).unwrap(); assert_stack_matches_chain( 2, &working_stack( &repo, Some(&commits[0].id().to_string()), false, &empty_slog(), ) .unwrap(), &commits, ); } #[test] fn test_stack_stops_at_configured_limit() { let (_dir, repo) = init_repo(); let commits = empty_commit_chain(&repo, "HEAD", &[], MAX_STACK + 2); repo.config() .unwrap() .set_i64(MAX_STACK_CONFIG_NAME, (MAX_STACK + 1) as i64) .unwrap(); assert_stack_matches_chain( MAX_STACK + 1, &working_stack(&repo, None, false, &empty_slog()).unwrap(), &commits, ); } #[test] fn test_stack_stops_at_foreign_author() { let (_dir, repo) = init_repo(); let old_commits = empty_commit_chain(&repo, "HEAD", &[], 3); repo.config() .unwrap() .set_str("user.name", "nobody2") .unwrap(); let new_commits = empty_commit_chain(&repo, "HEAD", &[old_commits.last().unwrap()], 2); assert_stack_matches_chain( 2, &working_stack(&repo, None, false, &empty_slog()).unwrap(), &new_commits, ); } #[test] fn test_stack_stops_at_merges() { let (_dir, repo) = init_repo(); let first = empty_commit(&repo, "HEAD", "first", &[]); // equivalent to checkout --orphan repo.set_head("refs/heads/new").unwrap(); let second = empty_commit(&repo, "HEAD", "second", &[]); // the current commit must be the first parent let merge = empty_commit(&repo, "HEAD", "merge", &[&second, &first]); let commits = empty_commit_chain(&repo, "HEAD", &[&merge], 2); assert_stack_matches_chain( 2, &working_stack(&repo, None, false, &empty_slog()).unwrap(), &commits, ); } }