breezyshim-0.7.5/.cargo_vcs_info.json0000644000000001360000000000100132260ustar { "git": { "sha1": "23ca73b0b4e243168ea3efadc9a1d4392744975f" }, "path_in_vcs": "" }breezyshim-0.7.5/COPYING.txt000064400000000000000000000432541046102023000136770ustar 00000000000000 GNU GENERAL PUBLIC LICENSE Version 2, June 1991 Copyright (C) 1989, 1991 Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed. Preamble The licenses for most software are designed to take away your freedom to share and change it. By contrast, the GNU General Public License is intended to guarantee your freedom to share and change free software--to make sure the software is free for all its users. This General Public License applies to most of the Free Software Foundation's software and to any other program whose authors commit to using it. (Some other Free Software Foundation software is covered by the GNU Lesser General Public License instead.) You can apply it to your programs, too. When we speak of free software, we are referring to freedom, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for this service if you wish), that you receive source code or can get it if you want it, that you can change the software or use pieces of it in new free programs; and that you know you can do these things. To protect your rights, we need to make restrictions that forbid anyone to deny you these rights or to ask you to surrender the rights. These restrictions translate to certain responsibilities for you if you distribute copies of the software, or if you modify it. For example, if you distribute copies of such a program, whether gratis or for a fee, you must give the recipients all the rights that you have. You must make sure that they, too, receive or can get the source code. And you must show them these terms so they know their rights. We protect your rights with two steps: (1) copyright the software, and (2) offer you this license which gives you legal permission to copy, distribute and/or modify the software. Also, for each author's protection and ours, we want to make certain that everyone understands that there is no warranty for this free software. If the software is modified by someone else and passed on, we want its recipients to know that what they have is not the original, so that any problems introduced by others will not reflect on the original authors' reputations. Finally, any free program is threatened constantly by software patents. We wish to avoid the danger that redistributors of a free program will individually obtain patent licenses, in effect making the program proprietary. To prevent this, we have made it clear that any patent must be licensed for everyone's free use or not licensed at all. The precise terms and conditions for copying, distribution and modification follow. GNU GENERAL PUBLIC LICENSE TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION 0. This License applies to any program or other work which contains a notice placed by the copyright holder saying it may be distributed under the terms of this General Public License. The "Program", below, refers to any such program or work, and a "work based on the Program" means either the Program or any derivative work under copyright law: that is to say, a work containing the Program or a portion of it, either verbatim or with modifications and/or translated into another language. (Hereinafter, translation is included without limitation in the term "modification".) Each licensee is addressed as "you". Activities other than copying, distribution and modification are not covered by this License; they are outside its scope. The act of running the Program is not restricted, and the output from the Program is covered only if its contents constitute a work based on the Program (independent of having been made by running the Program). Whether that is true depends on what the Program does. 1. You may copy and distribute verbatim copies of the Program's source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice and disclaimer of warranty; keep intact all the notices that refer to this License and to the absence of any warranty; and give any other recipients of the Program a copy of this License along with the Program. You may charge a fee for the physical act of transferring a copy, and you may at your option offer warranty protection in exchange for a fee. 2. You may modify your copy or copies of the Program or any portion of it, thus forming a work based on the Program, and copy and distribute such modifications or work under the terms of Section 1 above, provided that you also meet all of these conditions: a) You must cause the modified files to carry prominent notices stating that you changed the files and the date of any change. b) You must cause any work that you distribute or publish, that in whole or in part contains or is derived from the Program or any part thereof, to be licensed as a whole at no charge to all third parties under the terms of this License. c) If the modified program normally reads commands interactively when run, you must cause it, when started running for such interactive use in the most ordinary way, to print or display an announcement including an appropriate copyright notice and a notice that there is no warranty (or else, saying that you provide a warranty) and that users may redistribute the program under these conditions, and telling the user how to view a copy of this License. (Exception: if the Program itself is interactive but does not normally print such an announcement, your work based on the Program is not required to print an announcement.) These requirements apply to the modified work as a whole. If identifiable sections of that work are not derived from the Program, and can be reasonably considered independent and separate works in themselves, then this License, and its terms, do not apply to those sections when you distribute them as separate works. But when you distribute the same sections as part of a whole which is a work based on the Program, the distribution of the whole must be on the terms of this License, whose permissions for other licensees extend to the entire whole, and thus to each and every part regardless of who wrote it. Thus, it is not the intent of this section to claim rights or contest your rights to work written entirely by you; rather, the intent is to exercise the right to control the distribution of derivative or collective works based on the Program. In addition, mere aggregation of another work not based on the Program with the Program (or with a work based on the Program) on a volume of a storage or distribution medium does not bring the other work under the scope of this License. 3. You may copy and distribute the Program (or a work based on it, under Section 2) in object code or executable form under the terms of Sections 1 and 2 above provided that you also do one of the following: a) Accompany it with the complete corresponding machine-readable source code, which must be distributed under the terms of Sections 1 and 2 above on a medium customarily used for software interchange; or, b) Accompany it with a written offer, valid for at least three years, to give any third party, for a charge no more than your cost of physically performing source distribution, a complete machine-readable copy of the corresponding source code, to be distributed under the terms of Sections 1 and 2 above on a medium customarily used for software interchange; or, c) Accompany it with the information you received as to the offer to distribute corresponding source code. (This alternative is allowed only for noncommercial distribution and only if you received the program in object code or executable form with such an offer, in accord with Subsection b above.) The source code for a work means the preferred form of the work for making modifications to it. For an executable work, complete source code means all the source code for all modules it contains, plus any associated interface definition files, plus the scripts used to control compilation and installation of the executable. However, as a special exception, the source code distributed need not include anything that is normally distributed (in either source or binary form) with the major components (compiler, kernel, and so on) of the operating system on which the executable runs, unless that component itself accompanies the executable. If distribution of executable or object code is made by offering access to copy from a designated place, then offering equivalent access to copy the source code from the same place counts as distribution of the source code, even though third parties are not compelled to copy the source along with the object code. 4. You may not copy, modify, sublicense, or distribute the Program except as expressly provided under this License. Any attempt otherwise to copy, modify, sublicense or distribute the Program is void, and will automatically terminate your rights under this License. However, parties who have received copies, or rights, from you under this License will not have their licenses terminated so long as such parties remain in full compliance. 5. You are not required to accept this License, since you have not signed it. However, nothing else grants you permission to modify or distribute the Program or its derivative works. These actions are prohibited by law if you do not accept this License. Therefore, by modifying or distributing the Program (or any work based on the Program), you indicate your acceptance of this License to do so, and all its terms and conditions for copying, distributing or modifying the Program or works based on it. 6. Each time you redistribute the Program (or any work based on the Program), the recipient automatically receives a license from the original licensor to copy, distribute or modify the Program subject to these terms and conditions. You may not impose any further restrictions on the recipients' exercise of the rights granted herein. You are not responsible for enforcing compliance by third parties to this License. 7. If, as a consequence of a court judgment or allegation of patent infringement or for any other reason (not limited to patent issues), conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot distribute so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not distribute the Program at all. For example, if a patent license would not permit royalty-free redistribution of the Program by all those who receive copies directly or indirectly through you, then the only way you could satisfy both it and this License would be to refrain entirely from distribution of the Program. If any portion of this section is held invalid or unenforceable under any particular circumstance, the balance of the section is intended to apply and the section as a whole is intended to apply in other circumstances. It is not the purpose of this section to induce you to infringe any patents or other property right claims or to contest validity of any such claims; this section has the sole purpose of protecting the integrity of the free software distribution system, which is implemented by public license practices. Many people have made generous contributions to the wide range of software distributed through that system in reliance on consistent application of that system; it is up to the author/donor to decide if he or she is willing to distribute software through any other system and a licensee cannot impose that choice. This section is intended to make thoroughly clear what is believed to be a consequence of the rest of this License. 8. If the distribution and/or use of the Program is restricted in certain countries either by patents or by copyrighted interfaces, the original copyright holder who places the Program under this License may add an explicit geographical distribution limitation excluding those countries, so that distribution is permitted only in or among countries not thus excluded. In such case, this License incorporates the limitation as if written in the body of this License. 9. The Free Software Foundation may publish revised and/or new versions of the General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns. Each version is given a distinguishing version number. If the Program specifies a version number of this License which applies to it and "any later version", you have the option of following the terms and conditions either of that version or of any later version published by the Free Software Foundation. If the Program does not specify a version number of this License, you may choose any version ever published by the Free Software Foundation. 10. If you wish to incorporate parts of the Program into other free programs whose distribution conditions are different, write to the author to ask for permission. For software which is copyrighted by the Free Software Foundation, write to the Free Software Foundation; we sometimes make exceptions for this. Our decision will be guided by the two goals of preserving the free status of all derivatives of our free software and of promoting the sharing and reuse of software generally. NO WARRANTY 11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. 12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. END OF TERMS AND CONDITIONS How to Apply These Terms to Your New Programs If you develop a new program, and you want it to be of the greatest possible use to the public, the best way to achieve this is to make it free software which everyone can redistribute and change under these terms. To do so, attach the following notices to the program. It is safest to attach them to the start of each source file to most effectively convey the exclusion of warranty; and each file should have at least the "copyright" line and a pointer to where the full notice is found. Copyright (C) This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. Also add information on how to contact you by electronic and paper mail. If the program is interactive, make it output a short notice like this when it starts in an interactive mode: Gnomovision version 69, Copyright (C) year name of author Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'. This is free software, and you are welcome to redistribute it under certain conditions; type `show c' for details. The hypothetical commands `show w' and `show c' should show the appropriate parts of the General Public License. Of course, the commands you use may be called something other than `show w' and `show c'; they could even be mouse-clicks or menu items--whatever suits your program. You should also get your employer (if you work as a programmer) or your school, if any, to sign a "copyright disclaimer" for the program, if necessary. Here is a sample; alter the names: Yoyodyne, Inc., hereby disclaims all copyright interest in the program `Gnomovision' (which makes passes at compilers) written by James Hacker. , 1 April 1989 Ty Coon, President of Vice This General Public License does not permit incorporating your program into proprietary programs. If your program is a subroutine library, you may consider it more useful to permit linking proprietary applications with the library. If this is what you want to do, use the GNU Lesser General Public License instead of this License. breezyshim-0.7.5/Cargo.lock0000644000002503030000000000100112040ustar # This file is automatically @generated by Cargo. # It is not intended for manual editing. version = 4 [[package]] name = "aho-corasick" version = "1.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ddd31a130427c27518df266943a5308ed92d4b226cc639f5a8f1002816174301" dependencies = [ "memchr", ] [[package]] name = "allocator-api2" version = "0.2.21" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923" [[package]] name = "android_system_properties" version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311" dependencies = [ "libc", ] [[package]] name = "atoi" version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f28d99ec8bfea296261ca1af174f24225171fea9664ba9003cbebee704810528" dependencies = [ "num-traits", ] [[package]] name = "atomic-waker" version = "1.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0" [[package]] name = "autocfg" version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8" [[package]] name = "base64" version = "0.22.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" [[package]] name = "base64ct" version = "1.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "55248b47b0caf0546f7988906588779981c43bb1bc9d0c44087278f80cdb44ba" [[package]] name = "bitflags" version = "1.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" [[package]] name = "bitflags" version = "2.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "812e12b5285cc515a9c72a5c1d3b6d46a19dac5acfef5265968c166106e31dd3" dependencies = [ "serde_core", ] [[package]] name = "block-buffer" version = "0.10.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71" dependencies = [ "generic-array", ] [[package]] name = "breezyshim" version = "0.7.5" dependencies = [ "chrono", "ctor", "deb822-lossless", "debian-changelog", "debian-control", "debversion", "difflib", "dirty-tracker", "launchpadlib", "lazy-regex", "lazy_static", "log", "maplit", "patchkit", "percent-encoding", "pyo3", "pyo3-filelike", "regex", "serde", "serial_test", "sqlx", "tempfile", "url", "whoami", ] [[package]] name = "bumpalo" version = "3.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "46c5e41b57b8bba42a04676d81cb89e9ee8e859a1a66f80a5a72e1cb76b34d43" [[package]] name = "byteorder" version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" [[package]] name = "bytes" version = "1.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b35204fbdc0b3f4446b89fc1ac2cf84a8a68971995d0bf2e925ec7cd960f9cb3" [[package]] name = "cc" version = "1.2.48" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c481bdbf0ed3b892f6f806287d72acd515b352a4ec27a208489b8c1bc839633a" dependencies = [ "find-msvc-tools", "shlex", ] [[package]] name = "cesu8" version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6d43a04d8753f35258c91f8ec639f792891f748a1edbd759cf1dcea3382ad83c" [[package]] name = "cfg-if" version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9330f8b2ff13f34540b44e946ef35111825727b38d33286ef986142615121801" [[package]] name = "chrono" version = "0.4.42" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "145052bdd345b87320e369255277e3fb5152762ad123a901ef5c262dd38fe8d2" dependencies = [ "iana-time-zone", "js-sys", "num-traits", "serde", "wasm-bindgen", "windows-link", ] [[package]] name = "combine" version = "4.6.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ba5a308b75df32fe02788e748662718f03fde005016435c444eea572398219fd" dependencies = [ "bytes", "memchr", ] [[package]] name = "concurrent-queue" version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4ca0197aee26d1ae37445ee532fefce43251d24cc7c166799f4d46817f1d3973" dependencies = [ "crossbeam-utils", ] [[package]] name = "const-oid" version = "0.9.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c2459377285ad874054d797f3ccebf984978aa39129f6eafde5cdc8315b612f8" [[package]] name = "core-foundation" version = "0.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "91e195e091a93c46f7102ec7818a2aa394e1e1771c3ab4825963fa03e45afb8f" dependencies = [ "core-foundation-sys", "libc", ] [[package]] name = "core-foundation-sys" version = "0.8.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b" [[package]] name = "countme" version = "3.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7704b5fdd17b18ae31c4c1da5a2e0305a2bf17b5249300a9ee9ed7b72114c636" [[package]] name = "cpufeatures" version = "0.2.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "59ed5838eebb26a2bb2e58f6d5b5316989ae9d08bab10e0e6d103e656d1b0280" dependencies = [ "libc", ] [[package]] name = "crc" version = "3.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5eb8a2a1cd12ab0d987a5d5e825195d372001a4094a0376319d5a0ad71c1ba0d" dependencies = [ "crc-catalog", ] [[package]] name = "crc-catalog" version = "2.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "19d374276b40fb8bbdee95aef7c7fa6b5316ec764510eb64b8dd0e2ed0d7e7f5" [[package]] name = "crossbeam-channel" version = "0.5.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "82b8f8f868b36967f9606790d1903570de9ceaf870a7bf9fbbd3016d636a2cb2" dependencies = [ "crossbeam-utils", ] [[package]] name = "crossbeam-queue" version = "0.3.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0f58bbc28f91df819d0aa2a2c00cd19754769c2fad90579b3592b1c9ba7a3115" dependencies = [ "crossbeam-utils", ] [[package]] name = "crossbeam-utils" version = "0.8.21" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28" [[package]] name = "crypto-common" version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "78c8292055d1c1df0cce5d180393dc8cce0abec0a7102adb6c7b1eef6016d60a" dependencies = [ "generic-array", "typenum", ] [[package]] name = "ctor" version = "0.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "eb230974aaf0aca4d71665bed0aca156cf43b764fcb9583b69c6c3e686f35e72" dependencies = [ "ctor-proc-macro", "dtor", ] [[package]] name = "ctor-proc-macro" version = "0.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "52560adf09603e58c9a7ee1fe1dcb95a16927b17c127f0ac02d6e768a0e25bc1" [[package]] name = "deb822-derive" version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "86bf2d0fa4ce2457e94bd7efb15aeadc115297f04b660bd0da706729e0d91442" dependencies = [ "proc-macro2", "quote", "syn", ] [[package]] name = "deb822-fast" version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f410ccb5cbd9b81d56b290131bad4350ecf8b46416fb901e759dc1e6916a8198" dependencies = [ "deb822-derive", ] [[package]] name = "deb822-lossless" version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cdcadf12851ddb37dc938e724beeb50e83bfe1a1fda3c15b997dc1105ec49e3d" dependencies = [ "pyo3", "regex", "rowan", "serde", ] [[package]] name = "debian-changelog" version = "0.2.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c77ec2b64e3c7baf5413982f17c78933713069eba0601064d78a75bbde8b5b4d" dependencies = [ "chrono", "debversion", "lazy-regex", "log", "rowan", "textwrap", "whoami", ] [[package]] name = "debian-control" version = "0.2.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bfc2596d8356567e2fdd8653210d94dd06ca8c4ab9679ec6edf443f9efaeb9c3" dependencies = [ "chrono", "deb822-fast", "deb822-lossless", "debversion", "pyo3", "regex", "rowan", "url", ] [[package]] name = "debversion" version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a4f5cc9ce1d5067bee8060dd75208525dd0133ffea0b2960fef64ab85d58c4c5" dependencies = [ "chrono", "lazy-regex", "num-bigint", "pyo3", "serde", ] [[package]] name = "der" version = "0.7.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e7c1832837b905bbfb5101e07cc24c8deddf52f93225eee6ead5f4d63d53ddcb" dependencies = [ "const-oid", "pem-rfc7468", "zeroize", ] [[package]] name = "difflib" version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6184e33543162437515c2e2b48714794e37845ec9851711914eec9d308f6ebe8" [[package]] name = "digest" version = "0.10.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" dependencies = [ "block-buffer", "const-oid", "crypto-common", "subtle", ] [[package]] name = "dirty-tracker" version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "57f673af5cabab0d10b822fae4b348c2f5fdc56d90474e26f5dcde0f94fce488" dependencies = [ "notify", "tempfile", ] [[package]] name = "displaydoc" version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" dependencies = [ "proc-macro2", "quote", "syn", ] [[package]] name = "dotenvy" version = "0.15.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1aaf95b3e5c8f23aa320147307562d361db0ae0d51242340f558153b4eb2439b" [[package]] name = "dtor" version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "404d02eeb088a82cfd873006cb713fe411306c7d182c344905e101fb1167d301" dependencies = [ "dtor-proc-macro", ] [[package]] name = "dtor-proc-macro" version = "0.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f678cf4a922c215c63e0de95eb1ff08a958a81d47e485cf9da1e27bf6305cfa5" [[package]] name = "either" version = "1.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719" dependencies = [ "serde", ] [[package]] name = "encoding_rs" version = "0.8.35" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "75030f3c4f45dafd7586dd6780965a8c7e8e285a5ecb86713e63a79c5b2766f3" dependencies = [ "cfg-if", ] [[package]] name = "equivalent" version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f" [[package]] name = "errno" version = "0.3.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "39cab71617ae0d63f51a36d69f866391735b51691dbda63cf6f96d042b63efeb" dependencies = [ "libc", "windows-sys 0.61.2", ] [[package]] name = "etcetera" version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "136d1b5283a1ab77bd9257427ffd09d8667ced0570b6f938942bc7568ed5b943" dependencies = [ "cfg-if", "home", "windows-sys 0.48.0", ] [[package]] name = "event-listener" version = "5.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e13b66accf52311f30a0db42147dadea9850cb48cd070028831ae5f5d4b856ab" dependencies = [ "concurrent-queue", "parking", "pin-project-lite", ] [[package]] name = "fastrand" version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" [[package]] name = "filetime" version = "0.2.26" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bc0505cd1b6fa6580283f6bdf70a73fcf4aba1184038c90902b92b3dd0df63ed" dependencies = [ "cfg-if", "libc", "libredox", "windows-sys 0.60.2", ] [[package]] name = "find-msvc-tools" version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3a3076410a55c90011c298b04d0cfa770b00fa04e1e3c97d3f6c9de105a03844" [[package]] name = "flume" version = "0.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "da0e4dd2a88388a1f4ccc7c9ce104604dab68d9f408dc34cd45823d5a9069095" dependencies = [ "futures-core", "futures-sink", "spin", ] [[package]] name = "fnv" version = "1.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" [[package]] name = "foldhash" version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2" [[package]] name = "foreign-types" version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1" dependencies = [ "foreign-types-shared", ] [[package]] name = "foreign-types-shared" version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" [[package]] name = "form_urlencoded" version = "1.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cb4cb245038516f5f85277875cdaa4f7d2c9a0fa0468de06ed190163b1581fcf" dependencies = [ "percent-encoding", ] [[package]] name = "fsevent-sys" version = "4.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "76ee7a02da4d231650c7cea31349b889be2f45ddb3ef3032d2ec8185f6313fd2" dependencies = [ "libc", ] [[package]] name = "futf" version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "df420e2e84819663797d1ec6544b13c5be84629e7bb00dc960d6917db2987843" dependencies = [ "mac", "new_debug_unreachable", ] [[package]] name = "futures-channel" version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2dff15bf788c671c1934e366d07e30c1814a8ef514e1af724a602e8a2fbe1b10" dependencies = [ "futures-core", "futures-sink", ] [[package]] name = "futures-core" version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e" [[package]] name = "futures-executor" version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1e28d1d997f585e54aebc3f97d39e72338912123a67330d723fdbb564d646c9f" dependencies = [ "futures-core", "futures-task", "futures-util", ] [[package]] name = "futures-intrusive" version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1d930c203dd0b6ff06e0201a4a2fe9149b43c684fd4420555b26d21b1a02956f" dependencies = [ "futures-core", "lock_api", "parking_lot", ] [[package]] name = "futures-io" version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9e5c1b78ca4aae1ac06c48a526a655760685149f0d465d21f37abfe57ce075c6" [[package]] name = "futures-sink" version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e575fab7d1e0dcb8d0c7bcf9a63ee213816ab51902e6d244a95819acacf1d4f7" [[package]] name = "futures-task" version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f90f7dce0722e95104fcb095585910c0977252f286e354b5e3bd38902cd99988" [[package]] name = "futures-util" version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81" dependencies = [ "futures-core", "futures-io", "futures-sink", "futures-task", "memchr", "pin-project-lite", "pin-utils", "slab", ] [[package]] name = "generic-array" version = "0.14.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" dependencies = [ "typenum", "version_check", ] [[package]] name = "getrandom" version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "335ff9f135e4384c8150d6f27c6daed433577f86b4750418338c01a1a2528592" dependencies = [ "cfg-if", "libc", "wasi", ] [[package]] name = "getrandom" version = "0.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "899def5c37c4fd7b2664648c28120ecec138e4d395b459e5ca34f9cce2dd77fd" dependencies = [ "cfg-if", "libc", "r-efi", "wasip2", ] [[package]] name = "h2" version = "0.4.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f3c0b69cfcb4e1b9f1bf2f53f95f766e4661169728ec61cd3fe5a0166f2d1386" dependencies = [ "atomic-waker", "bytes", "fnv", "futures-core", "futures-sink", "http", "indexmap", "slab", "tokio", "tokio-util", "tracing", ] [[package]] name = "hashbrown" version = "0.14.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" [[package]] name = "hashbrown" version = "0.15.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9229cfe53dfd69f0609a49f65461bd93001ea1ef889cd5529dd176593f5338a1" dependencies = [ "allocator-api2", "equivalent", "foldhash", ] [[package]] name = "hashbrown" version = "0.16.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "841d1cc9bed7f9236f321df977030373f4a4163ae1a7dbfe1a51a2c1a51d9100" [[package]] name = "hashlink" version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7382cf6263419f2d8df38c55d7da83da5c18aef87fc7a7fc1fb1e344edfe14c1" dependencies = [ "hashbrown 0.15.5", ] [[package]] name = "heck" version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" [[package]] name = "hex" version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" [[package]] name = "hkdf" version = "0.12.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7b5f8eb2ad728638ea2c7d47a21db23b7b58a72ed6a38256b8a1849f15fbbdf7" dependencies = [ "hmac", ] [[package]] name = "hmac" version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6c49c37c09c17a53d937dfbb742eb3a961d65a994e6bcdcf37e7399d0cc8ab5e" dependencies = [ "digest", ] [[package]] name = "home" version = "0.5.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cc627f471c528ff0c4a49e1d5e60450c8f6461dd6d10ba9dcd3a61d3dff7728d" dependencies = [ "windows-sys 0.61.2", ] [[package]] name = "html2md" version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8cff9891f2e0d9048927fbdfc28b11bf378f6a93c7ba70b23d0fbee9af6071b4" dependencies = [ "html5ever", "jni", "lazy_static", "markup5ever_rcdom", "percent-encoding", "regex", ] [[package]] name = "html5ever" version = "0.27.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c13771afe0e6e846f1e67d038d4cb29998a6779f93c809212e4e9c32efd244d4" dependencies = [ "log", "mac", "markup5ever", "proc-macro2", "quote", "syn", ] [[package]] name = "http" version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e3ba2a386d7f85a81f119ad7498ebe444d2e22c2af0b86b069416ace48b3311a" dependencies = [ "bytes", "itoa", ] [[package]] name = "http-body" version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184" dependencies = [ "bytes", "http", ] [[package]] name = "http-body-util" version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b021d93e26becf5dc7e1b75b1bed1fd93124b374ceb73f43d4d4eafec896a64a" dependencies = [ "bytes", "futures-core", "http", "http-body", "pin-project-lite", ] [[package]] name = "httparse" version = "1.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6dbf3de79e51f3d586ab4cb9d5c3e2c14aa28ed23d180cf89b4df0454a69cc87" [[package]] name = "hyper" version = "1.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2ab2d4f250c3d7b1c9fcdff1cece94ea4e2dfbec68614f7b87cb205f24ca9d11" dependencies = [ "atomic-waker", "bytes", "futures-channel", "futures-core", "h2", "http", "http-body", "httparse", "itoa", "pin-project-lite", "pin-utils", "smallvec", "tokio", "want", ] [[package]] name = "hyper-rustls" version = "0.27.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e3c93eb611681b207e1fe55d5a71ecf91572ec8a6705cdb6857f7d8d5242cf58" dependencies = [ "http", "hyper", "hyper-util", "rustls", "rustls-pki-types", "tokio", "tokio-rustls", "tower-service", ] [[package]] name = "hyper-tls" version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "70206fc6890eaca9fde8a0bf71caa2ddfc9fe045ac9e5c70df101a7dbde866e0" dependencies = [ "bytes", "http-body-util", "hyper", "hyper-util", "native-tls", "tokio", "tokio-native-tls", "tower-service", ] [[package]] name = "hyper-util" version = "0.1.18" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "52e9a2a24dc5c6821e71a7030e1e14b7b632acac55c40e9d2e082c621261bb56" dependencies = [ "base64", "bytes", "futures-channel", "futures-core", "futures-util", "http", "http-body", "hyper", "ipnet", "libc", "percent-encoding", "pin-project-lite", "socket2", "system-configuration", "tokio", "tower-service", "tracing", "windows-registry", ] [[package]] name = "iana-time-zone" version = "0.1.64" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "33e57f83510bb73707521ebaffa789ec8caf86f9657cad665b092b581d40e9fb" dependencies = [ "android_system_properties", "core-foundation-sys", "iana-time-zone-haiku", "js-sys", "log", "wasm-bindgen", "windows-core", ] [[package]] name = "iana-time-zone-haiku" version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f31827a206f56af32e590ba56d5d2d085f558508192593743f16b2306495269f" dependencies = [ "cc", ] [[package]] name = "icu_collections" version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4c6b649701667bbe825c3b7e6388cb521c23d88644678e83c0c4d0a621a34b43" dependencies = [ "displaydoc", "potential_utf", "yoke", "zerofrom", "zerovec", ] [[package]] name = "icu_locale_core" version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "edba7861004dd3714265b4db54a3c390e880ab658fec5f7db895fae2046b5bb6" dependencies = [ "displaydoc", "litemap", "tinystr", "writeable", "zerovec", ] [[package]] name = "icu_normalizer" version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5f6c8828b67bf8908d82127b2054ea1b4427ff0230ee9141c54251934ab1b599" dependencies = [ "icu_collections", "icu_normalizer_data", "icu_properties", "icu_provider", "smallvec", "zerovec", ] [[package]] name = "icu_normalizer_data" version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7aedcccd01fc5fe81e6b489c15b247b8b0690feb23304303a9e560f37efc560a" [[package]] name = "icu_properties" version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e93fcd3157766c0c8da2f8cff6ce651a31f0810eaa1c51ec363ef790bbb5fb99" dependencies = [ "icu_collections", "icu_locale_core", "icu_properties_data", "icu_provider", "zerotrie", "zerovec", ] [[package]] name = "icu_properties_data" version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "02845b3647bb045f1100ecd6480ff52f34c35f82d9880e029d329c21d1054899" [[package]] name = "icu_provider" version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "85962cf0ce02e1e0a629cc34e7ca3e373ce20dda4c4d7294bbd0bf1fdb59e614" dependencies = [ "displaydoc", "icu_locale_core", "writeable", "yoke", "zerofrom", "zerotrie", "zerovec", ] [[package]] name = "idna" version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3b0875f23caa03898994f6ddc501886a45c7d3d62d04d2d90788d47be1b1e4de" dependencies = [ "idna_adapter", "smallvec", "utf8_iter", ] [[package]] name = "idna_adapter" version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3acae9609540aa318d1bc588455225fb2085b9ed0c4f6bd0d9d5bcd86f1a0344" dependencies = [ "icu_normalizer", "icu_properties", ] [[package]] name = "indexmap" version = "2.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0ad4bb2b565bca0645f4d68c5c9af97fba094e9791da685bf83cb5f3ce74acf2" dependencies = [ "equivalent", "hashbrown 0.16.1", ] [[package]] name = "indoc" version = "2.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "79cf5c93f93228cf8efb3ba362535fb11199ac548a09ce117c9b1adc3030d706" dependencies = [ "rustversion", ] [[package]] name = "inotify" version = "0.9.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f8069d3ec154eb856955c1c0fbffefbf5f3c40a104ec912d4797314c1801abff" dependencies = [ "bitflags 1.3.2", "inotify-sys", "libc", ] [[package]] name = "inotify-sys" version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e05c02b5e89bff3b946cedeca278abc628fe811e604f027c45a8aa3cf793d0eb" dependencies = [ "libc", ] [[package]] name = "ipnet" version = "2.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "469fb0b9cefa57e3ef31275ee7cacb78f2fdca44e4765491884a2b119d4eb130" [[package]] name = "iri-string" version = "0.7.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4f867b9d1d896b67beb18518eda36fdb77a32ea590de864f1325b294a6d14397" dependencies = [ "memchr", "serde", ] [[package]] name = "itoa" version = "1.0.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c" [[package]] name = "jni" version = "0.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c6df18c2e3db7e453d3c6ac5b3e9d5182664d28788126d39b91f2d1e22b017ec" dependencies = [ "cesu8", "combine", "jni-sys", "log", "thiserror 1.0.69", "walkdir", ] [[package]] name = "jni-sys" version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8eaf4bc02d17cbdd7ff4c7438cafcdf7fb9a4613313ad11b4f8fefe7d3fa0130" [[package]] name = "js-sys" version = "0.3.83" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "464a3709c7f55f1f721e5389aa6ea4e3bc6aba669353300af094b29ffbdde1d8" dependencies = [ "once_cell", "wasm-bindgen", ] [[package]] name = "kqueue" version = "1.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "eac30106d7dce88daf4a3fcb4879ea939476d5074a9b7ddd0fb97fa4bed5596a" dependencies = [ "kqueue-sys", "libc", ] [[package]] name = "kqueue-sys" version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ed9625ffda8729b85e45cf04090035ac368927b8cebc34898e7c120f52e4838b" dependencies = [ "bitflags 1.3.2", "libc", ] [[package]] name = "launchpadlib" version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2d7ea8dcc3b9f05d1bb422a4b0041ad79994faa5000e806ba1576dd9b9df7bca" dependencies = [ "chrono", "debversion", "form_urlencoded", "lazy_static", "log", "mime", "percent-encoding", "rand 0.9.2", "reqwest", "serde", "serde_json", "url", "wadl", ] [[package]] name = "lazy-regex" version = "3.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "191898e17ddee19e60bccb3945aa02339e81edd4a8c50e21fd4d48cdecda7b29" dependencies = [ "lazy-regex-proc_macros", "once_cell", "regex", ] [[package]] name = "lazy-regex-proc_macros" version = "3.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c35dc8b0da83d1a9507e12122c80dea71a9c7c613014347392483a83ea593e04" dependencies = [ "proc-macro2", "quote", "regex", "syn", ] [[package]] name = "lazy_static" version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" dependencies = [ "spin", ] [[package]] name = "libc" version = "0.2.178" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "37c93d8daa9d8a012fd8ab92f088405fb202ea0b6ab73ee2482ae66af4f42091" [[package]] name = "libm" version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f9fbbcab51052fe104eb5e5d351cf728d30a5be1fe14d9be8a3b097481fb97de" [[package]] name = "libredox" version = "0.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "416f7e718bdb06000964960ffa43b4335ad4012ae8b99060261aa4a8088d5ccb" dependencies = [ "bitflags 2.10.0", "libc", "redox_syscall", ] [[package]] name = "libsqlite3-sys" version = "0.30.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2e99fb7a497b1e3339bc746195567ed8d3e24945ecd636e3619d20b9de9e9149" dependencies = [ "pkg-config", "vcpkg", ] [[package]] name = "linux-raw-sys" version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "df1d3c3b53da64cf5760482273a98e575c651a67eec7f77df96b5b642de8f039" [[package]] name = "litemap" version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6373607a59f0be73a39b6fe456b8192fcc3585f602af20751600e974dd455e77" [[package]] name = "lock_api" version = "0.4.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "224399e74b87b5f3557511d98dff8b14089b3dadafcab6bb93eab67d3aace965" dependencies = [ "scopeguard", ] [[package]] name = "log" version = "0.4.29" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5e5032e24019045c762d3c0f28f5b6b8bbf38563a65908389bf7978758920897" [[package]] name = "mac" version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c41e0c4fef86961ac6d6f8a82609f55f31b05e4fce149ac5710e439df7619ba4" [[package]] name = "maplit" version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3e2e65a1a2e43cfcb47a895c4c8b10d1f4a61097f9f254f183aee60cad9c651d" [[package]] name = "markup5ever" version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "16ce3abbeba692c8b8441d036ef91aea6df8da2c6b6e21c7e14d3c18e526be45" dependencies = [ "log", "phf", "phf_codegen", "string_cache", "string_cache_codegen", "tendril", ] [[package]] name = "markup5ever_rcdom" version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "edaa21ab3701bfee5099ade5f7e1f84553fd19228cf332f13cd6e964bf59be18" dependencies = [ "html5ever", "markup5ever", "tendril", "xml5ever", ] [[package]] name = "md-5" version = "0.10.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d89e7ee0cfbedfc4da3340218492196241d89eefb6dab27de5df917a6d2e78cf" dependencies = [ "cfg-if", "digest", ] [[package]] name = "memchr" version = "2.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f52b00d39961fc5b2736ea853c9cc86238e165017a493d1d5c8eac6bdc4cc273" [[package]] name = "memoffset" version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "488016bfae457b036d996092f6cb448677611ce4449e970ceaf42695203f218a" dependencies = [ "autocfg", ] [[package]] name = "mime" version = "0.3.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" [[package]] name = "mime_guess" version = "2.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f7c44f8e672c00fe5308fa235f821cb4198414e1c77935c1ab6948d3fd78550e" dependencies = [ "mime", "unicase", ] [[package]] name = "mio" version = "0.8.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a4a650543ca06a924e8b371db273b2756685faae30f8487da1b56505a8f78b0c" dependencies = [ "libc", "log", "wasi", "windows-sys 0.48.0", ] [[package]] name = "mio" version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "69d83b0086dc8ecf3ce9ae2874b2d1290252e2a30720bea58a5c6639b0092873" dependencies = [ "libc", "wasi", "windows-sys 0.61.2", ] [[package]] name = "native-tls" version = "0.2.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "87de3442987e9dbec73158d5c715e7ad9072fda936bb03d19d7fa10e00520f0e" dependencies = [ "libc", "log", "openssl", "openssl-probe", "openssl-sys", "schannel", "security-framework", "security-framework-sys", "tempfile", ] [[package]] name = "new_debug_unreachable" version = "1.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "650eef8c711430f1a879fdd01d4745a7deea475becfb90269c06775983bbf086" [[package]] name = "notify" version = "6.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6205bd8bb1e454ad2e27422015fb5e4f2bcc7e08fa8f27058670d208324a4d2d" dependencies = [ "bitflags 2.10.0", "crossbeam-channel", "filetime", "fsevent-sys", "inotify", "kqueue", "libc", "log", "mio 0.8.11", "walkdir", "windows-sys 0.48.0", ] [[package]] name = "num-bigint" version = "0.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a5e44f723f1133c9deac646763579fdb3ac745e418f2a7af9cd0c431da1f20b9" dependencies = [ "num-integer", "num-traits", ] [[package]] name = "num-bigint-dig" version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e661dda6640fad38e827a6d4a310ff4763082116fe217f279885c97f511bb0b7" dependencies = [ "lazy_static", "libm", "num-integer", "num-iter", "num-traits", "rand 0.8.5", "smallvec", "zeroize", ] [[package]] name = "num-integer" version = "0.1.46" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7969661fd2958a5cb096e56c8e1ad0444ac2bbcd0061bd28660485a44879858f" dependencies = [ "num-traits", ] [[package]] name = "num-iter" version = "0.1.45" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1429034a0490724d0075ebb2bc9e875d6503c3cf69e235a8941aa757d83ef5bf" dependencies = [ "autocfg", "num-integer", "num-traits", ] [[package]] name = "num-traits" version = "0.2.19" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841" dependencies = [ "autocfg", "libm", ] [[package]] name = "once_cell" version = "1.21.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d" [[package]] name = "openssl" version = "0.10.75" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "08838db121398ad17ab8531ce9de97b244589089e290a384c900cb9ff7434328" dependencies = [ "bitflags 2.10.0", "cfg-if", "foreign-types", "libc", "once_cell", "openssl-macros", "openssl-sys", ] [[package]] name = "openssl-macros" version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" dependencies = [ "proc-macro2", "quote", "syn", ] [[package]] name = "openssl-probe" version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d05e27ee213611ffe7d6348b942e8f942b37114c00cc03cec254295a4a17852e" [[package]] name = "openssl-sys" version = "0.9.111" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "82cab2d520aa75e3c58898289429321eb788c3106963d0dc886ec7a5f4adc321" dependencies = [ "cc", "libc", "pkg-config", "vcpkg", ] [[package]] name = "parking" version = "2.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f38d5652c16fde515bb1ecef450ab0f6a219d619a7274976324d5e377f7dceba" [[package]] name = "parking_lot" version = "0.12.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "93857453250e3077bd71ff98b6a65ea6621a19bb0f559a85248955ac12c45a1a" dependencies = [ "lock_api", "parking_lot_core", ] [[package]] name = "parking_lot_core" version = "0.9.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2621685985a2ebf1c516881c026032ac7deafcda1a2c9b7850dc81e3dfcb64c1" dependencies = [ "cfg-if", "libc", "redox_syscall", "smallvec", "windows-link", ] [[package]] name = "patchkit" version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f21e87e02a475262c3166d32fea34710510448b37117cc448c1be03975816baf" dependencies = [ "chrono", "lazy-regex", "lazy_static", "once_cell", "proc-macro2", "regex", "rowan", ] [[package]] name = "pem-rfc7468" version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "88b39c9bfcfc231068454382784bb460aae594343fb030d46e9f50a645418412" dependencies = [ "base64ct", ] [[package]] name = "percent-encoding" version = "2.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9b4f627cb1b25917193a259e49bdad08f671f8d9708acfd5fe0a8c1455d87220" [[package]] name = "phf" version = "0.11.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1fd6780a80ae0c52cc120a26a1a42c1ae51b247a253e4e06113d23d2c2edd078" dependencies = [ "phf_shared", ] [[package]] name = "phf_codegen" version = "0.11.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "aef8048c789fa5e851558d709946d6d79a8ff88c0440c587967f8e94bfb1216a" dependencies = [ "phf_generator", "phf_shared", ] [[package]] name = "phf_generator" version = "0.11.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3c80231409c20246a13fddb31776fb942c38553c51e871f8cbd687a4cfb5843d" dependencies = [ "phf_shared", "rand 0.8.5", ] [[package]] name = "phf_shared" version = "0.11.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "67eabc2ef2a60eb7faa00097bd1ffdb5bd28e62bf39990626a582201b7a754e5" dependencies = [ "siphasher", ] [[package]] name = "pin-project-lite" version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3b3cff922bd51709b605d9ead9aa71031d81447142d828eb4a6eba76fe619f9b" [[package]] name = "pin-utils" version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" [[package]] name = "pkcs1" version = "0.7.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c8ffb9f10fa047879315e6625af03c164b16962a5368d724ed16323b68ace47f" dependencies = [ "der", "pkcs8", "spki", ] [[package]] name = "pkcs8" version = "0.10.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f950b2377845cebe5cf8b5165cb3cc1a5e0fa5cfa3e1f7f55707d8fd82e0a7b7" dependencies = [ "der", "spki", ] [[package]] name = "pkg-config" version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7edddbd0b52d732b21ad9a5fab5c704c14cd949e5e9a1ec5929a24fded1b904c" [[package]] name = "portable-atomic" version = "1.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f84267b20a16ea918e43c6a88433c2d54fa145c92a811b5b047ccbe153674483" [[package]] name = "potential_utf" version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b73949432f5e2a09657003c25bca5e19a0e9c84f8058ca374f49e0ebe605af77" dependencies = [ "zerovec", ] [[package]] name = "ppv-lite86" version = "0.2.21" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "85eae3c4ed2f50dcfe72643da4befc30deadb458a9b590d720cde2f2b1e97da9" dependencies = [ "zerocopy", ] [[package]] name = "precomputed-hash" version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "925383efa346730478fb4838dbe9137d2a47675ad789c546d150a6e1dd4ab31c" [[package]] name = "proc-macro2" version = "1.0.103" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5ee95bc4ef87b8d5ba32e8b7714ccc834865276eab0aed5c9958d00ec45f49e8" dependencies = [ "unicode-ident", ] [[package]] name = "pyo3" version = "0.27.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ab53c047fcd1a1d2a8820fe84f05d6be69e9526be40cb03b73f86b6b03e6d87d" dependencies = [ "chrono", "indoc", "libc", "memoffset", "once_cell", "portable-atomic", "pyo3-build-config", "pyo3-ffi", "pyo3-macros", "serde", "unindent", ] [[package]] name = "pyo3-build-config" version = "0.27.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b455933107de8642b4487ed26d912c2d899dec6114884214a0b3bb3be9261ea6" dependencies = [ "target-lexicon", ] [[package]] name = "pyo3-ffi" version = "0.27.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1c85c9cbfaddf651b1221594209aed57e9e5cff63c4d11d1feead529b872a089" dependencies = [ "libc", "pyo3-build-config", ] [[package]] name = "pyo3-filelike" version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "57429f455b9811f2a8af73d8bae91e028fbf6f62ad4011073c2248bb028a2288" dependencies = [ "pyo3", ] [[package]] name = "pyo3-macros" version = "0.27.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0a5b10c9bf9888125d917fb4d2ca2d25c8df94c7ab5a52e13313a07e050a3b02" dependencies = [ "proc-macro2", "pyo3-macros-backend", "quote", "syn", ] [[package]] name = "pyo3-macros-backend" version = "0.27.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "03b51720d314836e53327f5871d4c0cfb4fb37cc2c4a11cc71907a86342c40f9" dependencies = [ "heck", "proc-macro2", "pyo3-build-config", "quote", "syn", ] [[package]] name = "quote" version = "1.0.42" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a338cc41d27e6cc6dce6cefc13a0729dfbb81c262b1f519331575dd80ef3067f" dependencies = [ "proc-macro2", ] [[package]] name = "r-efi" version = "5.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f" [[package]] name = "rand" version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" dependencies = [ "libc", "rand_chacha 0.3.1", "rand_core 0.6.4", ] [[package]] name = "rand" version = "0.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6db2770f06117d490610c7488547d543617b21bfa07796d7a12f6f1bd53850d1" dependencies = [ "rand_chacha 0.9.0", "rand_core 0.9.3", ] [[package]] name = "rand_chacha" version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" dependencies = [ "ppv-lite86", "rand_core 0.6.4", ] [[package]] name = "rand_chacha" version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d3022b5f1df60f26e1ffddd6c66e8aa15de382ae63b3a0c1bfc0e4d3e3f325cb" dependencies = [ "ppv-lite86", "rand_core 0.9.3", ] [[package]] name = "rand_core" version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" dependencies = [ "getrandom 0.2.16", ] [[package]] name = "rand_core" version = "0.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "99d9a13982dcf210057a8a78572b2217b667c3beacbf3a0d8b454f6f82837d38" dependencies = [ "getrandom 0.3.4", ] [[package]] name = "redox_syscall" version = "0.5.18" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ed2bf2547551a7053d6fdfafda3f938979645c44812fbfcda098faae3f1a362d" dependencies = [ "bitflags 2.10.0", ] [[package]] name = "regex" version = "1.12.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "843bc0191f75f3e22651ae5f1e72939ab2f72a4bc30fa80a066bd66edefc24d4" dependencies = [ "aho-corasick", "memchr", "regex-automata", "regex-syntax", ] [[package]] name = "regex-automata" version = "0.4.13" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5276caf25ac86c8d810222b3dbb938e512c55c6831a10f3e6ed1c93b84041f1c" dependencies = [ "aho-corasick", "memchr", "regex-syntax", ] [[package]] name = "regex-syntax" version = "0.8.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7a2d987857b319362043e95f5353c0535c1f58eec5336fdfcf626430af7def58" [[package]] name = "reqwest" version = "0.12.24" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9d0946410b9f7b082a427e4ef5c8ff541a88b357bc6c637c40db3a68ac70a36f" dependencies = [ "base64", "bytes", "encoding_rs", "futures-channel", "futures-core", "futures-util", "h2", "http", "http-body", "http-body-util", "hyper", "hyper-rustls", "hyper-tls", "hyper-util", "js-sys", "log", "mime", "mime_guess", "native-tls", "percent-encoding", "pin-project-lite", "rustls-pki-types", "serde", "serde_json", "serde_urlencoded", "sync_wrapper", "tokio", "tokio-native-tls", "tower", "tower-http", "tower-service", "url", "wasm-bindgen", "wasm-bindgen-futures", "web-sys", ] [[package]] name = "ring" version = "0.17.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a4689e6c2294d81e88dc6261c768b63bc4fcdb852be6d1352498b114f61383b7" dependencies = [ "cc", "cfg-if", "getrandom 0.2.16", "libc", "untrusted", "windows-sys 0.52.0", ] [[package]] name = "rowan" version = "0.16.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "417a3a9f582e349834051b8a10c8d71ca88da4211e4093528e36b9845f6b5f21" dependencies = [ "countme", "hashbrown 0.14.5", "rustc-hash", "text-size", ] [[package]] name = "rsa" version = "0.9.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "40a0376c50d0358279d9d643e4bf7b7be212f1f4ff1da9070a7b54d22ef75c88" dependencies = [ "const-oid", "digest", "num-bigint-dig", "num-integer", "num-traits", "pkcs1", "pkcs8", "rand_core 0.6.4", "signature", "spki", "subtle", "zeroize", ] [[package]] name = "rustc-hash" version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" [[package]] name = "rustix" version = "1.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cd15f8a2c5551a84d56efdc1cd049089e409ac19a3072d5037a17fd70719ff3e" dependencies = [ "bitflags 2.10.0", "errno", "libc", "linux-raw-sys", "windows-sys 0.61.2", ] [[package]] name = "rustls" version = "0.23.35" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "533f54bc6a7d4f647e46ad909549eda97bf5afc1585190ef692b4286b198bd8f" dependencies = [ "once_cell", "rustls-pki-types", "rustls-webpki", "subtle", "zeroize", ] [[package]] name = "rustls-pki-types" version = "1.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "708c0f9d5f54ba0272468c1d306a52c495b31fa155e91bc25371e6df7996908c" dependencies = [ "zeroize", ] [[package]] name = "rustls-webpki" version = "0.103.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2ffdfa2f5286e2247234e03f680868ac2815974dc39e00ea15adc445d0aafe52" dependencies = [ "ring", "rustls-pki-types", "untrusted", ] [[package]] name = "rustversion" version = "1.0.22" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b39cdef0fa800fc44525c84ccb54a029961a8215f9619753635a9c0d2538d46d" [[package]] name = "ryu" version = "1.0.20" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f" [[package]] name = "same-file" version = "1.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502" dependencies = [ "winapi-util", ] [[package]] name = "scc" version = "2.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "46e6f046b7fef48e2660c57ed794263155d713de679057f2d0c169bfc6e756cc" dependencies = [ "sdd", ] [[package]] name = "schannel" version = "0.1.28" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "891d81b926048e76efe18581bf793546b4c0eaf8448d72be8de2bbee5fd166e1" dependencies = [ "windows-sys 0.61.2", ] [[package]] name = "scopeguard" version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" [[package]] name = "sdd" version = "3.0.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "490dcfcbfef26be6800d11870ff2df8774fa6e86d047e3e8c8a76b25655e41ca" [[package]] name = "security-framework" version = "2.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "897b2245f0b511c87893af39b033e5ca9cce68824c4d7e7630b5a1d339658d02" dependencies = [ "bitflags 2.10.0", "core-foundation", "core-foundation-sys", "libc", "security-framework-sys", ] [[package]] name = "security-framework-sys" version = "2.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cc1f0cbffaac4852523ce30d8bd3c5cdc873501d96ff467ca09b6767bb8cd5c0" dependencies = [ "core-foundation-sys", "libc", ] [[package]] name = "serde" version = "1.0.228" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9a8e94ea7f378bd32cbbd37198a4a91436180c5bb472411e48b5ec2e2124ae9e" dependencies = [ "serde_core", "serde_derive", ] [[package]] name = "serde_core" version = "1.0.228" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "41d385c7d4ca58e59fc732af25c3983b67ac852c1a25000afe1175de458b67ad" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" version = "1.0.228" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79" dependencies = [ "proc-macro2", "quote", "syn", ] [[package]] name = "serde_json" version = "1.0.145" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "402a6f66d8c709116cf22f558eab210f5a50187f702eb4d7e5ef38d9a7f1c79c" dependencies = [ "itoa", "memchr", "ryu", "serde", "serde_core", ] [[package]] name = "serde_urlencoded" version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd" dependencies = [ "form_urlencoded", "itoa", "ryu", "serde", ] [[package]] name = "serial_test" version = "3.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1b258109f244e1d6891bf1053a55d63a5cd4f8f4c30cf9a1280989f80e7a1fa9" dependencies = [ "once_cell", "parking_lot", "scc", "serial_test_derive", ] [[package]] name = "serial_test_derive" version = "3.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5d69265a08751de7844521fd15003ae0a888e035773ba05695c5c759a6f89eef" dependencies = [ "proc-macro2", "quote", "syn", ] [[package]] name = "sha1" version = "0.10.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba" dependencies = [ "cfg-if", "cpufeatures", "digest", ] [[package]] name = "sha2" version = "0.10.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a7507d819769d01a365ab707794a4084392c824f54a7a6a7862f8c3d0892b283" dependencies = [ "cfg-if", "cpufeatures", "digest", ] [[package]] name = "shlex" version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" [[package]] name = "signature" version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "77549399552de45a898a580c1b41d445bf730df867cc44e6c0233bbc4b8329de" dependencies = [ "digest", "rand_core 0.6.4", ] [[package]] name = "siphasher" version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "56199f7ddabf13fe5074ce809e7d3f42b42ae711800501b5b16ea82ad029c39d" [[package]] name = "slab" version = "0.4.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7a2ae44ef20feb57a68b23d846850f861394c2e02dc425a50098ae8c90267589" [[package]] name = "smallvec" version = "1.15.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03" dependencies = [ "serde", ] [[package]] name = "smawk" version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b7c388c1b5e93756d0c740965c41e8822f866621d41acbdf6336a6a168f8840c" [[package]] name = "socket2" version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "17129e116933cf371d018bb80ae557e889637989d8638274fb25622827b03881" dependencies = [ "libc", "windows-sys 0.60.2", ] [[package]] name = "spin" version = "0.9.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67" dependencies = [ "lock_api", ] [[package]] name = "spki" version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d91ed6c858b01f942cd56b37a94b3e0a1798290327d1236e4d9cf4eaca44d29d" dependencies = [ "base64ct", "der", ] [[package]] name = "sqlx" version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1fefb893899429669dcdd979aff487bd78f4064e5e7907e4269081e0ef7d97dc" dependencies = [ "sqlx-core", "sqlx-macros", "sqlx-mysql", "sqlx-postgres", "sqlx-sqlite", ] [[package]] name = "sqlx-core" version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ee6798b1838b6a0f69c007c133b8df5866302197e404e8b6ee8ed3e3a5e68dc6" dependencies = [ "base64", "bytes", "crc", "crossbeam-queue", "either", "event-listener", "futures-core", "futures-intrusive", "futures-io", "futures-util", "hashbrown 0.15.5", "hashlink", "indexmap", "log", "memchr", "once_cell", "percent-encoding", "serde", "serde_json", "sha2", "smallvec", "thiserror 2.0.17", "tracing", "url", ] [[package]] name = "sqlx-macros" version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a2d452988ccaacfbf5e0bdbc348fb91d7c8af5bee192173ac3636b5fb6e6715d" dependencies = [ "proc-macro2", "quote", "sqlx-core", "sqlx-macros-core", "syn", ] [[package]] name = "sqlx-macros-core" version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "19a9c1841124ac5a61741f96e1d9e2ec77424bf323962dd894bdb93f37d5219b" dependencies = [ "dotenvy", "either", "heck", "hex", "once_cell", "proc-macro2", "quote", "serde", "serde_json", "sha2", "sqlx-core", "sqlx-mysql", "sqlx-postgres", "sqlx-sqlite", "syn", "url", ] [[package]] name = "sqlx-mysql" version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "aa003f0038df784eb8fecbbac13affe3da23b45194bd57dba231c8f48199c526" dependencies = [ "atoi", "base64", "bitflags 2.10.0", "byteorder", "bytes", "crc", "digest", "dotenvy", "either", "futures-channel", "futures-core", "futures-io", "futures-util", "generic-array", "hex", "hkdf", "hmac", "itoa", "log", "md-5", "memchr", "once_cell", "percent-encoding", "rand 0.8.5", "rsa", "serde", "sha1", "sha2", "smallvec", "sqlx-core", "stringprep", "thiserror 2.0.17", "tracing", "whoami", ] [[package]] name = "sqlx-postgres" version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "db58fcd5a53cf07c184b154801ff91347e4c30d17a3562a635ff028ad5deda46" dependencies = [ "atoi", "base64", "bitflags 2.10.0", "byteorder", "crc", "dotenvy", "etcetera", "futures-channel", "futures-core", "futures-util", "hex", "hkdf", "hmac", "home", "itoa", "log", "md-5", "memchr", "once_cell", "rand 0.8.5", "serde", "serde_json", "sha2", "smallvec", "sqlx-core", "stringprep", "thiserror 2.0.17", "tracing", "whoami", ] [[package]] name = "sqlx-sqlite" version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c2d12fe70b2c1b4401038055f90f151b78208de1f9f89a7dbfd41587a10c3eea" dependencies = [ "atoi", "flume", "futures-channel", "futures-core", "futures-executor", "futures-intrusive", "futures-util", "libsqlite3-sys", "log", "percent-encoding", "serde", "serde_urlencoded", "sqlx-core", "thiserror 2.0.17", "tracing", "url", ] [[package]] name = "stable_deref_trait" version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6ce2be8dc25455e1f91df71bfa12ad37d7af1092ae736f3a6cd0e37bc7810596" [[package]] name = "string_cache" version = "0.8.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bf776ba3fa74f83bf4b63c3dcbbf82173db2632ed8452cb2d891d33f459de70f" dependencies = [ "new_debug_unreachable", "parking_lot", "phf_shared", "precomputed-hash", "serde", ] [[package]] name = "string_cache_codegen" version = "0.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c711928715f1fe0fe509c53b43e993a9a557babc2d0a3567d0a3006f1ac931a0" dependencies = [ "phf_generator", "phf_shared", "proc-macro2", "quote", ] [[package]] name = "stringprep" version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7b4df3d392d81bd458a8a621b8bffbd2302a12ffe288a9d931670948749463b1" dependencies = [ "unicode-bidi", "unicode-normalization", "unicode-properties", ] [[package]] name = "subtle" version = "2.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" [[package]] name = "syn" version = "2.0.111" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "390cc9a294ab71bdb1aa2e99d13be9c753cd2d7bd6560c77118597410c4d2e87" dependencies = [ "proc-macro2", "quote", "unicode-ident", ] [[package]] name = "sync_wrapper" version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0bf256ce5efdfa370213c1dabab5935a12e49f2c58d15e9eac2870d3b4f27263" dependencies = [ "futures-core", ] [[package]] name = "synstructure" version = "0.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "728a70f3dbaf5bab7f0c4b1ac8d7ae5ea60a4b5549c8a5914361c99147a709d2" dependencies = [ "proc-macro2", "quote", "syn", ] [[package]] name = "system-configuration" version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3c879d448e9d986b661742763247d3693ed13609438cf3d006f51f5368a5ba6b" dependencies = [ "bitflags 2.10.0", "core-foundation", "system-configuration-sys", ] [[package]] name = "system-configuration-sys" version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8e1d1b10ced5ca923a1fcb8d03e96b8d3268065d724548c0211415ff6ac6bac4" dependencies = [ "core-foundation-sys", "libc", ] [[package]] name = "target-lexicon" version = "0.13.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "df7f62577c25e07834649fc3b39fafdc597c0a3527dc1c60129201ccfcbaa50c" [[package]] name = "tempfile" version = "3.23.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2d31c77bdf42a745371d260a26ca7163f1e0924b64afa0b688e61b5a9fa02f16" dependencies = [ "fastrand", "getrandom 0.3.4", "once_cell", "rustix", "windows-sys 0.61.2", ] [[package]] name = "tendril" version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d24a120c5fc464a3458240ee02c299ebcb9d67b5249c8848b09d639dca8d7bb0" dependencies = [ "futf", "mac", "utf-8", ] [[package]] name = "text-size" version = "1.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f18aa187839b2bdb1ad2fa35ead8c4c2976b64e4363c386d45ac0f7ee85c9233" [[package]] name = "textwrap" version = "0.16.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c13547615a44dc9c452a8a534638acdf07120d4b6847c8178705da06306a3057" dependencies = [ "smawk", "unicode-linebreak", "unicode-width", ] [[package]] name = "thiserror" version = "1.0.69" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52" dependencies = [ "thiserror-impl 1.0.69", ] [[package]] name = "thiserror" version = "2.0.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f63587ca0f12b72a0600bcba1d40081f830876000bb46dd2337a3051618f4fc8" dependencies = [ "thiserror-impl 2.0.17", ] [[package]] name = "thiserror-impl" version = "1.0.69" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" dependencies = [ "proc-macro2", "quote", "syn", ] [[package]] name = "thiserror-impl" version = "2.0.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3ff15c8ecd7de3849db632e14d18d2571fa09dfc5ed93479bc4485c7a517c913" dependencies = [ "proc-macro2", "quote", "syn", ] [[package]] name = "tinystr" version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "42d3e9c45c09de15d06dd8acf5f4e0e399e85927b7f00711024eb7ae10fa4869" dependencies = [ "displaydoc", "zerovec", ] [[package]] name = "tinyvec" version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bfa5fdc3bce6191a1dbc8c02d5c8bffcf557bafa17c124c5264a458f1b0613fa" dependencies = [ "tinyvec_macros", ] [[package]] name = "tinyvec_macros" version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tokio" version = "1.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ff360e02eab121e0bc37a2d3b4d4dc622e6eda3a8e5253d5435ecf5bd4c68408" dependencies = [ "bytes", "libc", "mio 1.1.0", "pin-project-lite", "socket2", "windows-sys 0.61.2", ] [[package]] name = "tokio-native-tls" version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bbae76ab933c85776efabc971569dd6119c580d8f5d448769dec1764bf796ef2" dependencies = [ "native-tls", "tokio", ] [[package]] name = "tokio-rustls" version = "0.26.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1729aa945f29d91ba541258c8df89027d5792d85a8841fb65e8bf0f4ede4ef61" dependencies = [ "rustls", "tokio", ] [[package]] name = "tokio-util" version = "0.7.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2efa149fe76073d6e8fd97ef4f4eca7b67f599660115591483572e406e165594" dependencies = [ "bytes", "futures-core", "futures-sink", "pin-project-lite", "tokio", ] [[package]] name = "tower" version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d039ad9159c98b70ecfd540b2573b97f7f52c3e8d9f8ad57a24b916a536975f9" dependencies = [ "futures-core", "futures-util", "pin-project-lite", "sync_wrapper", "tokio", "tower-layer", "tower-service", ] [[package]] name = "tower-http" version = "0.6.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9cf146f99d442e8e68e585f5d798ccd3cad9a7835b917e09728880a862706456" dependencies = [ "bitflags 2.10.0", "bytes", "futures-util", "http", "http-body", "iri-string", "pin-project-lite", "tower", "tower-layer", "tower-service", ] [[package]] name = "tower-layer" version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "121c2a6cda46980bb0fcd1647ffaf6cd3fc79a013de288782836f6df9c48780e" [[package]] name = "tower-service" version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3" [[package]] name = "tracing" version = "0.1.43" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2d15d90a0b5c19378952d479dc858407149d7bb45a14de0142f6c534b16fc647" dependencies = [ "log", "pin-project-lite", "tracing-attributes", "tracing-core", ] [[package]] name = "tracing-attributes" version = "0.1.31" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7490cfa5ec963746568740651ac6781f701c9c5ea257c58e057f3ba8cf69e8da" dependencies = [ "proc-macro2", "quote", "syn", ] [[package]] name = "tracing-core" version = "0.1.35" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7a04e24fab5c89c6a36eb8558c9656f30d81de51dfa4d3b45f26b21d61fa0a6c" dependencies = [ "once_cell", ] [[package]] name = "try-lock" version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" [[package]] name = "typenum" version = "1.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "562d481066bde0658276a35467c4af00bdc6ee726305698a55b86e61d7ad82bb" [[package]] name = "unicase" version = "2.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "75b844d17643ee918803943289730bec8aac480150456169e647ed0b576ba539" [[package]] name = "unicode-bidi" version = "0.3.18" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5c1cb5db39152898a79168971543b1cb5020dff7fe43c8dc468b0885f5e29df5" [[package]] name = "unicode-ident" version = "1.0.22" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9312f7c4f6ff9069b165498234ce8be658059c6728633667c526e27dc2cf1df5" [[package]] name = "unicode-linebreak" version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3b09c83c3c29d37506a3e260c08c03743a6bb66a9cd432c6934ab501a190571f" [[package]] name = "unicode-normalization" version = "0.1.25" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5fd4f6878c9cb28d874b009da9e8d183b5abc80117c40bbd187a1fde336be6e8" dependencies = [ "tinyvec", ] [[package]] name = "unicode-properties" version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7df058c713841ad818f1dc5d3fd88063241cc61f49f5fbea4b951e8cf5a8d71d" [[package]] name = "unicode-width" version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b4ac048d71ede7ee76d585517add45da530660ef4390e49b098733c6e897f254" [[package]] name = "unindent" version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7264e107f553ccae879d21fbea1d6724ac785e8c3bfc762137959b5802826ef3" [[package]] name = "untrusted" version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" [[package]] name = "url" version = "2.5.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "08bc136a29a3d1758e07a9cca267be308aeebf5cfd5a10f3f67ab2097683ef5b" dependencies = [ "form_urlencoded", "idna", "percent-encoding", "serde", ] [[package]] name = "utf-8" version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "09cc8ee72d2a9becf2f2febe0205bbed8fc6615b7cb429ad062dc7b7ddd036a9" [[package]] name = "utf8_iter" version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be" [[package]] name = "vcpkg" version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" [[package]] name = "version_check" version = "0.9.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" [[package]] name = "wadl" version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "289ef5a1ae1bfce466d46663945372ae57744357b2b47c69fa91335ce0c9e939" dependencies = [ "form_urlencoded", "html2md", "iri-string", "lazy_static", "log", "mime", "proc-macro2", "quote", "reqwest", "serde_json", "syn", "url", "xmltree", ] [[package]] name = "walkdir" version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "29790946404f91d9c5d06f9874efddea1dc06c5efe94541a7d6863108e3a5e4b" dependencies = [ "same-file", "winapi-util", ] [[package]] name = "want" version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bfa7760aed19e106de2c7c0b581b509f2f25d3dacaf737cb82ac61bc6d760b0e" dependencies = [ "try-lock", ] [[package]] name = "wasi" version = "0.11.1+wasi-snapshot-preview1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ccf3ec651a847eb01de73ccad15eb7d99f80485de043efb2f370cd654f4ea44b" [[package]] name = "wasip2" version = "1.0.1+wasi-0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0562428422c63773dad2c345a1882263bbf4d65cf3f42e90921f787ef5ad58e7" dependencies = [ "wit-bindgen", ] [[package]] name = "wasite" version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b8dad83b4f25e74f184f64c43b150b91efe7647395b42289f38e50566d82855b" [[package]] name = "wasm-bindgen" version = "0.2.106" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0d759f433fa64a2d763d1340820e46e111a7a5ab75f993d1852d70b03dbb80fd" dependencies = [ "cfg-if", "once_cell", "rustversion", "wasm-bindgen-macro", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-futures" version = "0.4.56" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "836d9622d604feee9e5de25ac10e3ea5f2d65b41eac0d9ce72eb5deae707ce7c" dependencies = [ "cfg-if", "js-sys", "once_cell", "wasm-bindgen", "web-sys", ] [[package]] name = "wasm-bindgen-macro" version = "0.2.106" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "48cb0d2638f8baedbc542ed444afc0644a29166f1595371af4fecf8ce1e7eeb3" dependencies = [ "quote", "wasm-bindgen-macro-support", ] [[package]] name = "wasm-bindgen-macro-support" version = "0.2.106" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cefb59d5cd5f92d9dcf80e4683949f15ca4b511f4ac0a6e14d4e1ac60c6ecd40" dependencies = [ "bumpalo", "proc-macro2", "quote", "syn", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-shared" version = "0.2.106" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cbc538057e648b67f72a982e708d485b2efa771e1ac05fec311f9f63e5800db4" dependencies = [ "unicode-ident", ] [[package]] name = "web-sys" version = "0.3.83" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9b32828d774c412041098d182a8b38b16ea816958e07cf40eec2bc080ae137ac" dependencies = [ "js-sys", "wasm-bindgen", ] [[package]] name = "whoami" version = "1.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5d4a4db5077702ca3015d3d02d74974948aba2ad9e12ab7df718ee64ccd7e97d" dependencies = [ "libredox", "wasite", "web-sys", ] [[package]] name = "winapi-util" version = "0.1.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c2a7b1c03c876122aa43f3020e6c3c3ee5c05081c9a00739faf7503aeba10d22" dependencies = [ "windows-sys 0.61.2", ] [[package]] name = "windows-core" version = "0.62.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b8e83a14d34d0623b51dce9581199302a221863196a1dde71a7663a4c2be9deb" dependencies = [ "windows-implement", "windows-interface", "windows-link", "windows-result", "windows-strings", ] [[package]] name = "windows-implement" version = "0.60.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "053e2e040ab57b9dc951b72c264860db7eb3b0200ba345b4e4c3b14f67855ddf" dependencies = [ "proc-macro2", "quote", "syn", ] [[package]] name = "windows-interface" version = "0.59.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3f316c4a2570ba26bbec722032c4099d8c8bc095efccdc15688708623367e358" dependencies = [ "proc-macro2", "quote", "syn", ] [[package]] name = "windows-link" version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5" [[package]] name = "windows-registry" version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "02752bf7fbdcce7f2a27a742f798510f3e5ad88dbe84871e5168e2120c3d5720" dependencies = [ "windows-link", "windows-result", "windows-strings", ] [[package]] name = "windows-result" version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7781fa89eaf60850ac3d2da7af8e5242a5ea78d1a11c49bf2910bb5a73853eb5" dependencies = [ "windows-link", ] [[package]] name = "windows-strings" version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7837d08f69c77cf6b07689544538e017c1bfcf57e34b4c0ff58e6c2cd3b37091" dependencies = [ "windows-link", ] [[package]] name = "windows-sys" version = "0.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" dependencies = [ "windows-targets 0.48.5", ] [[package]] name = "windows-sys" version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" dependencies = [ "windows-targets 0.52.6", ] [[package]] name = "windows-sys" version = "0.60.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f2f500e4d28234f72040990ec9d39e3a6b950f9f22d3dba18416c35882612bcb" dependencies = [ "windows-targets 0.53.5", ] [[package]] name = "windows-sys" version = "0.61.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ae137229bcbd6cdf0f7b80a31df61766145077ddf49416a728b02cb3921ff3fc" dependencies = [ "windows-link", ] [[package]] name = "windows-targets" version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c" dependencies = [ "windows_aarch64_gnullvm 0.48.5", "windows_aarch64_msvc 0.48.5", "windows_i686_gnu 0.48.5", "windows_i686_msvc 0.48.5", "windows_x86_64_gnu 0.48.5", "windows_x86_64_gnullvm 0.48.5", "windows_x86_64_msvc 0.48.5", ] [[package]] name = "windows-targets" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" dependencies = [ "windows_aarch64_gnullvm 0.52.6", "windows_aarch64_msvc 0.52.6", "windows_i686_gnu 0.52.6", "windows_i686_gnullvm 0.52.6", "windows_i686_msvc 0.52.6", "windows_x86_64_gnu 0.52.6", "windows_x86_64_gnullvm 0.52.6", "windows_x86_64_msvc 0.52.6", ] [[package]] name = "windows-targets" version = "0.53.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4945f9f551b88e0d65f3db0bc25c33b8acea4d9e41163edf90dcd0b19f9069f3" dependencies = [ "windows-link", "windows_aarch64_gnullvm 0.53.1", "windows_aarch64_msvc 0.53.1", "windows_i686_gnu 0.53.1", "windows_i686_gnullvm 0.53.1", "windows_i686_msvc 0.53.1", "windows_x86_64_gnu 0.53.1", "windows_x86_64_gnullvm 0.53.1", "windows_x86_64_msvc 0.53.1", ] [[package]] name = "windows_aarch64_gnullvm" version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" [[package]] name = "windows_aarch64_gnullvm" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" [[package]] name = "windows_aarch64_gnullvm" version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a9d8416fa8b42f5c947f8482c43e7d89e73a173cead56d044f6a56104a6d1b53" [[package]] name = "windows_aarch64_msvc" version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" [[package]] name = "windows_aarch64_msvc" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" [[package]] name = "windows_aarch64_msvc" version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b9d782e804c2f632e395708e99a94275910eb9100b2114651e04744e9b125006" [[package]] name = "windows_i686_gnu" version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" [[package]] name = "windows_i686_gnu" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" [[package]] name = "windows_i686_gnu" version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "960e6da069d81e09becb0ca57a65220ddff016ff2d6af6a223cf372a506593a3" [[package]] name = "windows_i686_gnullvm" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" [[package]] name = "windows_i686_gnullvm" version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fa7359d10048f68ab8b09fa71c3daccfb0e9b559aed648a8f95469c27057180c" [[package]] name = "windows_i686_msvc" version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" [[package]] name = "windows_i686_msvc" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" [[package]] name = "windows_i686_msvc" version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1e7ac75179f18232fe9c285163565a57ef8d3c89254a30685b57d83a38d326c2" [[package]] name = "windows_x86_64_gnu" version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" [[package]] name = "windows_x86_64_gnu" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" [[package]] name = "windows_x86_64_gnu" version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9c3842cdd74a865a8066ab39c8a7a473c0778a3f29370b5fd6b4b9aa7df4a499" [[package]] name = "windows_x86_64_gnullvm" version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" [[package]] name = "windows_x86_64_gnullvm" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" [[package]] name = "windows_x86_64_gnullvm" version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0ffa179e2d07eee8ad8f57493436566c7cc30ac536a3379fdf008f47f6bb7ae1" [[package]] name = "windows_x86_64_msvc" version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" [[package]] name = "windows_x86_64_msvc" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" [[package]] name = "windows_x86_64_msvc" version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d6bbff5f0aada427a1e5a6da5f1f98158182f26556f345ac9e04d36d0ebed650" [[package]] name = "wit-bindgen" version = "0.46.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f17a85883d4e6d00e8a97c586de764dabcc06133f7f1d55dce5cdc070ad7fe59" [[package]] name = "writeable" version = "0.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9edde0db4769d2dc68579893f2306b26c6ecfbe0ef499b013d731b7b9247e0b9" [[package]] name = "xml" version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2df5825faced2427b2da74d9100f1e2e93c533fff063506a81ede1cf517b2e7e" [[package]] name = "xml5ever" version = "0.18.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9bbb26405d8e919bc1547a5aa9abc95cbfa438f04844f5fdd9dc7596b748bf69" dependencies = [ "log", "mac", "markup5ever", ] [[package]] name = "xmltree" version = "0.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cbc04313cab124e498ab1724e739720807b6dc405b9ed0edc5860164d2e4ff70" dependencies = [ "xml", ] [[package]] name = "yoke" version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "72d6e5c6afb84d73944e5cedb052c4680d5657337201555f9f2a16b7406d4954" dependencies = [ "stable_deref_trait", "yoke-derive", "zerofrom", ] [[package]] name = "yoke-derive" version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b659052874eb698efe5b9e8cf382204678a0086ebf46982b79d6ca3182927e5d" dependencies = [ "proc-macro2", "quote", "syn", "synstructure", ] [[package]] name = "zerocopy" version = "0.8.31" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fd74ec98b9250adb3ca554bdde269adf631549f51d8a8f8f0a10b50f1cb298c3" dependencies = [ "zerocopy-derive", ] [[package]] name = "zerocopy-derive" version = "0.8.31" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d8a8d209fdf45cf5138cbb5a506f6b52522a25afccc534d1475dad8e31105c6a" dependencies = [ "proc-macro2", "quote", "syn", ] [[package]] name = "zerofrom" version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "50cc42e0333e05660c3587f3bf9d0478688e15d870fab3346451ce7f8c9fbea5" dependencies = [ "zerofrom-derive", ] [[package]] name = "zerofrom-derive" version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d71e5d6e06ab090c67b5e44993ec16b72dcbaabc526db883a360057678b48502" dependencies = [ "proc-macro2", "quote", "syn", "synstructure", ] [[package]] name = "zeroize" version = "1.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b97154e67e32c85465826e8bcc1c59429aaaf107c1e4a9e53c8d8ccd5eff88d0" [[package]] name = "zerotrie" version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2a59c17a5562d507e4b54960e8569ebee33bee890c70aa3fe7b97e85a9fd7851" dependencies = [ "displaydoc", "yoke", "zerofrom", ] [[package]] name = "zerovec" version = "0.11.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6c28719294829477f525be0186d13efa9a3c602f7ec202ca9e353d310fb9a002" dependencies = [ "yoke", "zerofrom", "zerovec-derive", ] [[package]] name = "zerovec-derive" version = "0.11.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "eadce39539ca5cb3985590102671f2567e659fca9666581ad3411d59207951f3" dependencies = [ "proc-macro2", "quote", "syn", ] breezyshim-0.7.5/Cargo.toml0000644000000056260000000000100112350ustar # THIS FILE IS AUTOMATICALLY GENERATED BY CARGO # # When uploading crates to the registry Cargo will automatically # "normalize" Cargo.toml files for maximal compatibility # with all versions of Cargo and also rewrite `path` dependencies # to registry (e.g., crates.io) dependencies. # # If you are reading this file be aware that the original Cargo.toml # will likely look very different (and much more reasonable). # See Cargo.toml.orig for the original contents. [package] edition = "2021" rust-version = "1.83" name = "breezyshim" version = "0.7.5" authors = [ "Martin Packman ", "Jelmer Vernooij ", ] build = false include = [ "src/*.rs", "Cargo.toml", "COPYING.txt", "src/*/*.rs", ] autolib = false autobins = false autoexamples = false autotests = false autobenches = false description = "Rust shim around the Breezy Python API" homepage = "https://www.breezy-vcs.org/" documentation = "https://docs.rs/breezyshim" readme = "README.md" license = "GPL-2.0+" repository = "https://github.com/breezy-team/breezyshim" [features] auto-initialize = ["dep:ctor"] debian = [ "dep:debversion", "dep:debian-control", "dep:debian-changelog", "dep:difflib", "dep:deb822-lossless", ] default = ["auto-initialize"] dirty-tracker = ["dep:dirty-tracker"] launchpad = ["dep:launchpadlib"] sqlx = ["sqlx/postgres"] [lib] name = "breezyshim" path = "src/lib.rs" [dependencies.chrono] version = "0.4" [dependencies.ctor] version = ">=0.1,<0.7" optional = true [dependencies.deb822-lossless] version = ">=0.4,<0.6" optional = true [dependencies.debian-changelog] version = ">=0.2.3,<0.3" optional = true [dependencies.debian-control] version = ">=0.1.44,<0.3" features = ["python-debian"] optional = true [dependencies.debversion] version = ">=0.5.0,<0.6" features = ["python-debian"] optional = true [dependencies.difflib] version = "0.4.0" optional = true [dependencies.dirty-tracker] version = "0.3" optional = true [dependencies.launchpadlib] version = ">=0.4.5,<0.6" optional = true default-features = false [dependencies.lazy-regex] version = ">=3.4,<4" [dependencies.lazy_static] version = "1.4.0" [dependencies.log] version = "0.4" [dependencies.patchkit] version = ">=0.2.2,<0.3" [dependencies.percent-encoding] version = "2.3.1" [dependencies.pyo3] version = ">=0.27, <0.28" features = [ "chrono", "serde", ] [dependencies.pyo3-filelike] version = ">=0.5.1,<0.6" [dependencies.regex] version = ">=1.10,<2" [dependencies.serde] version = "1" features = ["derive"] [dependencies.sqlx] version = ">=0.8,<0.9" optional = true [dependencies.tempfile] version = "3.20.0" [dependencies.url] version = "2" features = ["serde"] [dependencies.whoami] version = ">=1.5,<2" [dev-dependencies.maplit] version = "1" [dev-dependencies.pyo3] version = ">=0.27, <0.28" features = ["chrono"] [dev-dependencies.serial_test] version = ">=2,<4" default-features = false breezyshim-0.7.5/Cargo.toml.orig000064400000000000000000000037101046102023000147060ustar 00000000000000[package] name = "breezyshim" version = "0.7.5" edition = "2021" rust-version = "1.83" license = "GPL-2.0+" description = "Rust shim around the Breezy Python API" homepage = "https://www.breezy-vcs.org/" repository = "https://github.com/breezy-team/breezyshim" documentation = "https://docs.rs/breezyshim" authors = [ "Martin Packman ", "Jelmer Vernooij ",] include = ["src/*.rs", "Cargo.toml", "COPYING.txt", "src/*/*.rs"] [dependencies] pyo3-filelike = ">=0.5.1,<0.6" tempfile = "3.20.0" lazy_static = "1.4.0" chrono = "0.4" ctor = { version = ">=0.1,<0.7", optional = true } lazy-regex = ">=3.4,<4" regex = ">=1.10,<2" patchkit = ">=0.2.2,<0.3" log = "0.4" whoami = ">=1.5,<2" debian-control = { version = ">=0.1.44,<0.3", optional = true, features = ["python-debian"] } deb822-lossless = { version = ">=0.4,<0.6", optional = true } dirty-tracker = { version = "0.3", optional = true } debian-changelog = { version = ">=0.2.3,<0.3", optional = true } difflib = { version = "0.4.0", optional = true } percent-encoding = "2.3.1" sqlx = { version = ">=0.8,<0.9", optional = true } launchpadlib = { version = ">=0.4.5,<0.6", optional = true, default-features = false } [features] default = ["auto-initialize"] debian = ["dep:debversion", "dep:debian-control", "dep:debian-changelog", "dep:difflib", "dep:deb822-lossless"] dirty-tracker = ["dep:dirty-tracker"] auto-initialize = ["dep:ctor"] sqlx = ["sqlx/postgres"] launchpad = ["dep:launchpadlib"] [dependencies.debversion] version = ">=0.5.0,<0.6" optional = true features = [ "python-debian",] [dependencies.url] version = "2" features = [ "serde",] [dependencies.serde] version = "1" features = [ "derive",] [dependencies.pyo3] workspace = true features = [ "serde",] [workspace.dependencies] pyo3 = { version = ">=0.27, <0.28", features = ["chrono"] } [dev-dependencies] maplit = "1" serial_test = { version = ">=2,<4", default-features = false } [dev-dependencies.pyo3] workspace = true breezyshim-0.7.5/README.md000064400000000000000000000011471046102023000133000ustar 00000000000000# Rust wrapper for Breezy This crate contains a rust wrapper for the Breezy API, which is written in Python. Breezy itself is being ported to Rust, but until that port has completed, this crate allows access to the most important Breezy APIs via Rust. The Rust API here will follow the Breezy 4.0 Rust API as much as possible, to make porting easier. ## prelude This crate provides a prelude module that re-exports the most important types and traits from the Breezy API. This allows you to use the Breezy API without having to import each type and trait individually. ```rust use breezyshim::prelude::*; ``` breezyshim-0.7.5/src/bazaar/mod.rs000064400000000000000000000117301046102023000151740ustar 00000000000000//! Bazaar-specific functionality. //! //! This module provides types and functions for working with Bazaar repositories. //! Bazaar was the original version control system that Breezy evolved from. use pyo3::exceptions::PyModuleNotFoundError; use pyo3::prelude::*; pub mod tree; /// A Bazaar file identifier. /// /// Bazaar uses unique identifiers for files, which allow it to track files across /// renames and other operations. #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct FileId(Vec); impl Default for FileId { fn default() -> Self { Self::new() } } impl FileId { /// Create a new empty file identifier. /// /// # Returns /// /// A new FileId instance with an empty identifier. pub fn new() -> Self { Self(vec![]) } } impl From<&str> for FileId { fn from(s: &str) -> Self { Self(s.as_bytes().to_vec()) } } impl From for FileId { fn from(s: String) -> Self { Self(s.into_bytes()) } } impl From<&[u8]> for FileId { fn from(s: &[u8]) -> Self { Self(s.to_vec()) } } impl From> for FileId { fn from(s: Vec) -> Self { Self(s) } } impl From for Vec { fn from(s: FileId) -> Self { s.0 } } impl From for String { fn from(s: FileId) -> Self { String::from_utf8(s.0).unwrap() } } impl<'py> pyo3::IntoPyObject<'py> for FileId { type Target = pyo3::PyAny; type Output = pyo3::Bound<'py, Self::Target>; type Error = pyo3::PyErr; fn into_pyobject(self, py: pyo3::Python<'py>) -> Result { self.0.into_pyobject(py) } } impl<'a, 'py> pyo3::FromPyObject<'a, 'py> for FileId { type Error = PyErr; fn extract(ob: Borrowed<'a, 'py, PyAny>) -> PyResult { let bytes = ob.extract::>()?; Ok(Self(bytes)) } } /// Generate a Bazaar revision identifier. /// /// # Parameters /// /// * `username` - The username to associate with the revision. /// * `timestamp` - Optional timestamp for the revision, in seconds since the epoch. /// /// # Returns /// /// A byte vector containing the generated revision identifier. pub fn gen_revision_id(username: &str, timestamp: Option) -> Vec { Python::attach(|py| { let m = py.import("breezy.bzr.generate_ids").unwrap(); let gen_revision_id = m.getattr("gen_revision_id").unwrap(); gen_revision_id .call1((username, timestamp)) .unwrap() .extract() .unwrap() }) } #[test] fn test_gen_revision_id() { crate::init(); gen_revision_id("user", None); } /// Generate a Bazaar file identifier from a name. /// /// # Parameters /// /// * `name` - The name to use for generating the file identifier. /// /// # Returns /// /// A byte vector containing the generated file identifier. pub fn gen_file_id(name: &str) -> Vec { Python::attach(|py| { let m = py.import("breezy.bzr.generate_ids").unwrap(); let gen_file_id = m.getattr("gen_file_id").unwrap(); gen_file_id.call1((name,)).unwrap().extract().unwrap() }) } #[test] fn test_file_id() { crate::init(); gen_file_id("somename"); } /// A prober for remote Bazaar repositories. /// /// This prober can detect whether a remote location contains a Bazaar repository. pub struct RemoteBzrProber(Py); impl RemoteBzrProber { /// Create a new RemoteBzrProber. /// /// # Returns /// /// Some(RemoteBzrProber) if Bazaar is available, None otherwise. pub fn new() -> Option { Python::attach(|py| { let m = match py.import("breezy.bzr") { Ok(m) => m, Err(e) => { if e.is_instance_of::(py) { return None; } else { e.print_and_set_sys_last_vars(py); panic!("Failed to import breezy.bzr"); } } }; let prober = m .getattr("RemoteBzrProber") .expect("Failed to get RemoteBzrProber"); Some(Self(prober.unbind())) }) } } impl<'a, 'py> FromPyObject<'a, 'py> for RemoteBzrProber { type Error = PyErr; fn extract(obj: Borrowed<'a, 'py, PyAny>) -> PyResult { Ok(Self(obj.to_owned().unbind())) } } impl<'py> IntoPyObject<'py> for RemoteBzrProber { type Target = PyAny; type Output = Bound<'py, Self::Target>; type Error = std::convert::Infallible; fn into_pyobject(self, py: Python<'py>) -> Result { Ok(self.0.into_bound(py)) } } impl std::fmt::Debug for RemoteBzrProber { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { f.write_fmt(format_args!("RemoteBzrProber({:?})", self.0)) } } impl crate::controldir::PyProber for RemoteBzrProber { fn to_object(&self, py: Python) -> Py { self.0.clone_ref(py) } } breezyshim-0.7.5/src/bazaar/tree.rs000064400000000000000000000023661046102023000153610ustar 00000000000000//! Inventory trees use crate::error::Error; use crate::tree::Path; use pyo3::prelude::*; /// Trait for trees that have an inventory and can be modified. /// /// Inventory trees are trees that track file identifiers, which is a feature /// specific to Bazaar trees. pub trait MutableInventoryTree: crate::tree::PyMutableTree { /// Add files to the tree with explicit file identifiers. /// /// # Parameters /// /// * `paths` - The paths of the files to add. /// * `file_ids` - The file identifiers to assign to the files. /// /// # Returns /// /// `Ok(())` on success, or an error if the files could not be added. fn add(&self, paths: &[&Path], file_ids: &[crate::bazaar::FileId]) -> Result<(), Error> { Python::attach(|py| { self.to_object(py).call_method1( py, "add", ( paths .iter() .map(|p| p.to_string_lossy().to_string()) .collect::>(), file_ids.to_vec(), ), ) }) .map_err(|e| e.into()) .map(|_| ()) } } impl MutableInventoryTree for crate::workingtree::GenericWorkingTree {} breezyshim-0.7.5/src/branch.rs000064400000000000000000001057151046102023000144210ustar 00000000000000//! Branches are the primary way to interact with the history of a project in Breezy. //! //! A branch is a named sequence of revisions. Each revision is a snapshot of the project at a //! particular point in time. Revisions are linked together in a chain, forming a history of the //! project. The branch itself is a pointer to the most recent revision in the chain. //! Branches can be pushed to and pulled from other branches, allowing changes to be shared between //! different branches. //! //! Breezy supports several different types of branches, each with different capabilities and //! constraints. use crate::controldir::{ControlDir, GenericControlDir, PyControlDir}; use crate::error::Error; use crate::foreign::VcsType; use crate::lock::Lock; use crate::repository::{GenericRepository, PyRepository, Repository}; use crate::revisionid::RevisionId; use pyo3::intern; use pyo3::prelude::*; use pyo3::types::PyDict; /// Format of a branch in a version control system. /// /// This struct represents the format of a branch, which defines its capabilities /// and constraints. #[derive(Debug)] pub struct BranchFormat(Py); impl Clone for BranchFormat { fn clone(&self) -> Self { Python::attach(|py| BranchFormat(self.0.clone_ref(py))) } } impl BranchFormat { /// Check if this branch format supports stacking. /// /// Stacking allows a branch to reference revisions in another branch /// without duplicating their storage. /// /// # Returns /// /// `true` if the branch format supports stacking, `false` otherwise. pub fn supports_stacking(&self) -> bool { Python::attach(|py| { self.0 .call_method0(py, "supports_stacking") .unwrap() .extract(py) .unwrap() }) } } /// Trait representing a branch in a version control system. /// /// A branch is a named sequence of revisions. Each revision is a snapshot of the project /// at a particular point in time. This trait provides methods for interacting with /// branches across various version control systems. pub trait Branch { /// Get a reference to self as Any for downcasting. fn as_any(&self) -> &dyn std::any::Any; /// Get the format of this branch. /// /// # Returns /// /// The format of this branch. fn format(&self) -> BranchFormat; /// Get the type of version control system for this branch. /// /// # Returns /// /// The version control system type. fn vcs_type(&self) -> VcsType; /// Get the revision number of the last revision in this branch. /// /// # Returns /// /// The revision number. fn revno(&self) -> u32; /// Lock the branch for reading. /// /// This method acquires a read lock on the branch, which allows reading from the /// branch but prevents others from writing to it. /// /// # Returns /// /// A lock object that will release the lock when dropped, or an error if the /// lock could not be acquired. fn lock_read(&self) -> Result; /// Lock the branch for writing. /// /// This method acquires a write lock on the branch, which allows writing to the /// branch but prevents others from reading from or writing to it. /// /// # Returns /// /// A lock object that will release the lock when dropped, or an error if the /// lock could not be acquired. fn lock_write(&self) -> Result; /// Get the tags for this branch. /// /// Tags are names associated with specific revisions in the branch. /// /// # Returns /// /// The tags object for this branch, or an error if the tags could not be retrieved. fn tags(&self) -> Result; /// Get the repository associated with this branch. /// /// # Returns /// /// The repository containing this branch. fn repository(&self) -> GenericRepository; /// Get the last revision in this branch. /// /// # Returns /// /// The revision ID of the last revision in this branch. fn last_revision(&self) -> RevisionId; /// Get the name of this branch. /// /// # Returns /// /// The name of this branch, or None if it doesn't have a name. fn name(&self) -> Option; /// Get the basis tree for this branch. /// /// The basis tree is the tree corresponding to the last revision in this branch. /// /// # Returns /// /// The basis tree, or an error if it could not be retrieved. fn basis_tree(&self) -> Result; /// Get the user-visible URL for this branch. /// /// # Returns /// /// The URL that can be used to access this branch. fn get_user_url(&self) -> url::Url; /// Get the control directory for this branch. /// /// # Returns /// /// The control directory containing this branch. fn controldir( &self, ) -> Box< dyn ControlDir< Branch = GenericBranch, Repository = crate::repository::GenericRepository, WorkingTree = crate::workingtree::GenericWorkingTree, >, >; /// Push this branch to a remote branch. /// /// # Parameters /// /// * `remote_branch` - The remote branch to push to. /// * `overwrite` - Whether to overwrite the remote branch if it has diverged. /// * `stop_revision` - The revision to stop pushing at, or None to push all revisions. /// * `tag_selector` - A function that selects which tags to push, or None to push all tags. /// /// # Returns /// /// `Ok(())` on success, or an error if the push failed. fn push( &self, remote_branch: &dyn PyBranch, overwrite: bool, stop_revision: Option<&RevisionId>, tag_selector: Option bool>>, ) -> Result<(), crate::error::Error>; /// Pull from a source branch into this branch. /// /// # Parameters /// /// * `source_branch` - The branch to pull from. /// * `overwrite` - Whether to overwrite this branch if it has diverged from the source. /// /// # Returns /// /// `Ok(())` on success, or an error if the pull failed. fn pull(&self, source_branch: &dyn PyBranch, overwrite: Option) -> Result<(), Error>; /// Get the parent branch location. /// /// # Returns /// /// The parent branch location as a string, or None if there is no parent branch. fn get_parent(&self) -> Option; /// Set the parent branch location. /// /// # Parameters /// /// * `parent` - The new parent branch location. fn set_parent(&mut self, parent: &str); /// Get the public branch location. /// /// # Returns /// /// The public branch location as a string, or None if there is no public branch. fn get_public_branch(&self) -> Option; /// Get the push location for this branch. /// /// # Returns /// /// The push location as a string, or None if there is no push location. fn get_push_location(&self) -> Option; /// Get the submit branch location. /// /// # Returns /// /// The submit branch location as a string, or None if there is no submit branch. fn get_submit_branch(&self) -> Option; /// Get a transport for accessing this branch's user files. /// /// # Returns /// /// A transport for accessing this branch's user files. fn user_transport(&self) -> crate::transport::Transport; /// Get the configuration for this branch. /// /// # Returns /// /// The branch configuration. fn get_config(&self) -> crate::config::BranchConfig; /// Get the configuration stack for this branch. /// /// # Returns /// /// The configuration stack for this branch, which includes branch-specific, /// repository-specific, and global configuration. fn get_config_stack(&self) -> crate::config::ConfigStack; /// Create a new branch from this branch. /// /// # Parameters /// /// * `to_controldir` - The control directory to create the new branch in. /// * `to_branch_name` - The name of the new branch. /// /// # Returns /// /// `Ok(())` on success, or an error if the branch could not be created. fn sprout(&self, to_controldir: &dyn PyControlDir, to_branch_name: &str) -> Result<(), Error>; /// Create a checkout of this branch. /// /// # Parameters /// /// * `to_location` - The location to create the checkout at. /// /// # Returns /// /// The working tree for the checkout, or an error if the checkout could not be created. fn create_checkout( &self, to_location: &std::path::Path, ) -> Result; /// Generate the revision history for this branch. /// /// # Parameters /// /// * `last_revision` - The last revision to include in the history. /// /// # Returns /// /// `Ok(())` on success, or an error if the history could not be generated. fn generate_revision_history(&self, last_revision: &RevisionId) -> Result<(), Error>; /// Bind this branch to another branch. /// /// Binding a branch means that commits to this branch will also be made /// to the master branch. /// /// # Parameters /// /// * `other` - The branch to bind to. /// /// # Returns /// /// `Ok(())` on success, or an error if the branch could not be bound. fn bind(&self, other: &dyn Branch) -> Result<(), Error>; /// Unbind this branch from any master branch. /// /// After unbinding, commits will only be made to this branch. /// /// # Returns /// /// `Ok(())` on success, or an error if the branch could not be unbound. fn unbind(&self) -> Result<(), Error>; /// Get the location of the branch this branch is bound to. /// /// # Returns /// /// The URL of the bound branch as a string, or None if not bound. fn get_bound_location(&self) -> Option; /// Get the location this branch used to be bound to. /// /// # Returns /// /// The URL of the old bound branch as a string, or None if there was no previous binding. fn get_old_bound_location(&self) -> Option; /// Check if this branch is locked. /// /// # Returns /// /// `true` if the branch is locked, `false` otherwise. fn is_locked(&self) -> bool; /// Get the current lock mode of the branch. /// /// # Returns /// /// 'r' for read lock, 'w' for write lock, or None if not locked. fn peek_lock_mode(&self) -> Option; /// Get the revision ID for a given revision number. /// /// # Parameters /// /// * `revno` - The revision number. /// /// # Returns /// /// The revision ID corresponding to the revision number. fn get_rev_id(&self, revno: u32) -> Result; /// Convert a revision ID to its revision number. /// /// # Parameters /// /// * `revision_id` - The revision ID to convert. /// /// # Returns /// /// The revision number, or an error if the revision ID is not in the branch. fn revision_id_to_revno(&self, revision_id: &RevisionId) -> Result; /// Check whether a revision number corresponds to a real revision. /// /// # Parameters /// /// * `revno` - The revision number to check. /// /// # Returns /// /// `true` if the revision number corresponds to a real revision, `false` otherwise. fn check_real_revno(&self, revno: u32) -> bool; /// Get information about the last revision. /// /// # Returns /// /// A tuple containing the revision number and revision ID of the last revision. fn last_revision_info(&self) -> (u32, RevisionId); /// Set the last revision information for this branch. /// /// # Parameters /// /// * `revno` - The revision number. /// * `revision_id` - The revision ID. /// /// # Returns /// /// `Ok(())` on success, or an error if the information could not be set. fn set_last_revision_info(&self, revno: u32, revision_id: &RevisionId) -> Result<(), Error>; /// Get the URL this branch is stacked on. /// /// # Returns /// /// The URL of the stacked-on branch, or an error if not stacked. fn get_stacked_on_url(&self) -> Result; /// Set the URL this branch is stacked on. /// /// # Parameters /// /// * `url` - The URL to stack on. /// /// # Returns /// /// `Ok(())` on success, or an error if stacking could not be set. fn set_stacked_on_url(&self, url: &str) -> Result<(), Error>; /// Copy revisions from another branch into this branch. /// /// # Parameters /// /// * `from_branch` - The branch to fetch revisions from. /// * `last_revision` - The last revision to fetch, or None to fetch all. /// /// # Returns /// /// `Ok(())` on success, or an error if the fetch failed. fn fetch( &self, from_branch: &dyn Branch, last_revision: Option<&RevisionId>, ) -> Result<(), Error>; /// Update this branch to match the master branch. /// /// This is used when the branch is bound to synchronize changes. /// /// # Returns /// /// `Ok(())` on success, or an error if the update failed. fn update(&self) -> Result<(), Error>; /// Set the location to push this branch to. /// /// # Parameters /// /// * `location` - The push location URL. /// /// # Returns /// /// `Ok(())` on success, or an error if the location could not be set. fn set_push_location(&self, location: &str) -> Result<(), Error>; /// Set the public branch location. /// /// # Parameters /// /// * `location` - The public branch URL. /// /// # Returns /// /// `Ok(())` on success, or an error if the location could not be set. fn set_public_branch(&self, location: &str) -> Result<(), Error>; /// Check if this branch is configured to only allow appending revisions. /// /// # Returns /// /// `true` if only appending is allowed, `false` otherwise. fn get_append_revisions_only(&self) -> bool; /// Set whether this branch should only allow appending revisions. /// /// # Parameters /// /// * `value` - Whether to only allow appending. /// /// # Returns /// /// `Ok(())` on success, or an error if the setting could not be changed. fn set_append_revisions_only(&self, value: bool) -> Result<(), Error>; } /// Trait for branches that wrap Python branch objects. /// /// This trait is implemented by branch types that wrap Breezy's Python branch objects. pub trait PyBranch: Branch + Send + std::any::Any { /// Get the underlying Python object. fn to_object(&self, py: Python<'_>) -> Py; } impl dyn PyBranch { /// Get a reference to self as a Branch trait object. pub fn as_branch(&self) -> &dyn Branch { self } } impl Branch for T { fn as_any(&self) -> &dyn std::any::Any { self } fn format(&self) -> BranchFormat { Python::attach(|py| BranchFormat(self.to_object(py).getattr(py, "_format").unwrap())) } fn vcs_type(&self) -> VcsType { self.repository().vcs_type() } fn revno(&self) -> u32 { Python::attach(|py| { self.to_object(py) .call_method0(py, "revno") .unwrap() .extract(py) .unwrap() }) } fn lock_read(&self) -> Result { Python::attach(|py| { Ok(Lock::from( self.to_object(py) .call_method0(py, intern!(py, "lock_read"))?, )) }) } fn lock_write(&self) -> Result { Python::attach(|py| { Ok(Lock::from( self.to_object(py) .call_method0(py, intern!(py, "lock_write"))?, )) }) } fn tags(&self) -> Result { Python::attach(|py| { Ok(crate::tags::Tags::from( self.to_object(py).getattr(py, "tags")?, )) }) } fn repository(&self) -> GenericRepository { Python::attach(|py| { GenericRepository::new(self.to_object(py).getattr(py, "repository").unwrap()) }) } fn last_revision(&self) -> RevisionId { Python::attach(|py| { self.to_object(py) .call_method0(py, intern!(py, "last_revision")) .unwrap() .extract(py) .unwrap() }) } fn name(&self) -> Option { Python::attach(|py| { self.to_object(py) .getattr(py, "name") .unwrap() .extract::>(py) .unwrap() }) } fn basis_tree(&self) -> Result { Python::attach(|py| { Ok(crate::tree::RevisionTree( self.to_object(py).call_method0(py, "basis_tree")?, )) }) } fn get_user_url(&self) -> url::Url { Python::attach(|py| { let url = self .to_object(py) .getattr(py, "user_url") .unwrap() .extract::(py) .unwrap(); url.parse::().unwrap() }) } fn controldir( &self, ) -> Box< dyn ControlDir< Branch = GenericBranch, Repository = crate::repository::GenericRepository, WorkingTree = crate::workingtree::GenericWorkingTree, >, > { Python::attach(|py| { Box::new(GenericControlDir::new( self.to_object(py).getattr(py, "controldir").unwrap(), )) as Box< dyn ControlDir< Branch = GenericBranch, Repository = crate::repository::GenericRepository, WorkingTree = crate::workingtree::GenericWorkingTree, >, > }) } fn push( &self, remote_branch: &dyn PyBranch, overwrite: bool, stop_revision: Option<&RevisionId>, tag_selector: Option bool>>, ) -> Result<(), crate::error::Error> { Python::attach(|py| { let kwargs = PyDict::new(py); kwargs.set_item("overwrite", overwrite)?; if let Some(stop_revision) = stop_revision { kwargs.set_item("stop_revision", stop_revision.clone())?; } if let Some(tag_selector) = tag_selector { kwargs.set_item("tag_selector", py_tag_selector(py, tag_selector)?)?; } self.to_object(py).call_method( py, "push", (&remote_branch.to_object(py),), Some(&kwargs), )?; Ok(()) }) } fn pull(&self, source_branch: &dyn PyBranch, overwrite: Option) -> Result<(), Error> { Python::attach(|py| { let kwargs = PyDict::new(py); if let Some(overwrite) = overwrite { kwargs.set_item("overwrite", overwrite)?; } self.to_object(py).call_method( py, "pull", (&source_branch.to_object(py),), Some(&kwargs), )?; Ok(()) }) } fn get_parent(&self) -> Option { Python::attach(|py| { self.to_object(py) .call_method0(py, "get_parent") .unwrap() .extract(py) .unwrap() }) } fn set_parent(&mut self, parent: &str) { Python::attach(|py| { self.to_object(py) .call_method1(py, "set_parent", (parent,)) .unwrap(); }) } fn get_public_branch(&self) -> Option { Python::attach(|py| { self.to_object(py) .call_method0(py, "get_public_branch") .unwrap() .extract(py) .unwrap() }) } fn get_push_location(&self) -> Option { Python::attach(|py| { self.to_object(py) .call_method0(py, "get_push_location") .unwrap() .extract(py) .unwrap() }) } fn get_submit_branch(&self) -> Option { Python::attach(|py| { self.to_object(py) .call_method0(py, "get_submit_branch") .unwrap() .extract(py) .unwrap() }) } fn user_transport(&self) -> crate::transport::Transport { Python::attach(|py| { crate::transport::Transport::new( self.to_object(py).getattr(py, "user_transport").unwrap(), ) }) } fn get_config(&self) -> crate::config::BranchConfig { Python::attach(|py| { crate::config::BranchConfig::new( self.to_object(py).call_method0(py, "get_config").unwrap(), ) }) } fn get_config_stack(&self) -> crate::config::ConfigStack { Python::attach(|py| { crate::config::ConfigStack::new( self.to_object(py) .call_method0(py, "get_config_stack") .unwrap(), ) }) } fn sprout(&self, to_controldir: &dyn PyControlDir, to_branch_name: &str) -> Result<(), Error> { Python::attach(|py| { let kwargs = PyDict::new(py); kwargs.set_item("name", to_branch_name)?; self.to_object(py).call_method( py, "sprout", (to_controldir.to_object(py),), Some(&kwargs), )?; Ok(()) }) } fn create_checkout( &self, to_location: &std::path::Path, ) -> Result { Python::attach(|py| { self.to_object(py) .call_method1( py, "create_checkout", (to_location.to_string_lossy().to_string(),), ) .map(crate::workingtree::GenericWorkingTree) .map_err(|e| e.into()) }) } fn generate_revision_history(&self, last_revision: &RevisionId) -> Result<(), Error> { Python::attach(|py| { self.to_object(py).call_method1( py, "generate_revision_history", (last_revision.clone().into_pyobject(py).unwrap(),), )?; Ok(()) }) } fn bind(&self, other: &dyn Branch) -> Result<(), Error> { Python::attach(|py| { // Try to downcast to concrete PyBranch types if let Some(gb) = other.as_any().downcast_ref::() { self.to_object(py) .call_method1(py, "bind", (gb.to_object(py),))?; } else if let Some(mb) = other.as_any().downcast_ref::() { self.to_object(py) .call_method1(py, "bind", (mb.to_object(py),))?; } else { return Err(Error::Other(pyo3::exceptions::PyTypeError::new_err( "Branch must be a PyBranch", ))); } Ok(()) }) } fn unbind(&self) -> Result<(), Error> { Python::attach(|py| { self.to_object(py).call_method0(py, "unbind")?; Ok(()) }) } fn get_bound_location(&self) -> Option { Python::attach(|py| { self.to_object(py) .call_method0(py, "get_bound_location") .unwrap() .extract(py) .unwrap() }) } fn get_old_bound_location(&self) -> Option { Python::attach(|py| { self.to_object(py) .call_method0(py, "get_old_bound_location") .unwrap() .extract(py) .unwrap() }) } fn is_locked(&self) -> bool { Python::attach(|py| { self.to_object(py) .call_method0(py, "is_locked") .unwrap() .extract(py) .unwrap() }) } fn peek_lock_mode(&self) -> Option { Python::attach(|py| { let result = self .to_object(py) .call_method0(py, "peek_lock_mode") .unwrap(); if result.is_none(py) { None } else { let mode: String = result.extract(py).unwrap(); mode.chars().next() } }) } fn get_rev_id(&self, revno: u32) -> Result { Python::attach(|py| { self.to_object(py) .call_method1(py, "get_rev_id", (revno,))? .extract(py) .map_err(Into::into) }) } fn revision_id_to_revno(&self, revision_id: &RevisionId) -> Result { Python::attach(|py| { self.to_object(py) .call_method1(py, "revision_id_to_revno", (revision_id.clone(),))? .extract(py) .map_err(Into::into) }) } fn check_real_revno(&self, revno: u32) -> bool { Python::attach(|py| { self.to_object(py) .call_method1(py, "check_real_revno", (revno,)) .unwrap() .extract(py) .unwrap() }) } fn last_revision_info(&self) -> (u32, RevisionId) { Python::attach(|py| { self.to_object(py) .call_method0(py, "last_revision_info") .unwrap() .extract(py) .unwrap() }) } fn set_last_revision_info(&self, revno: u32, revision_id: &RevisionId) -> Result<(), Error> { Python::attach(|py| { self.to_object(py).call_method1( py, "set_last_revision_info", (revno, revision_id.clone()), )?; Ok(()) }) } fn get_stacked_on_url(&self) -> Result { Python::attach(|py| { self.to_object(py) .call_method0(py, "get_stacked_on_url")? .extract(py) .map_err(Into::into) }) } fn set_stacked_on_url(&self, url: &str) -> Result<(), Error> { Python::attach(|py| { self.to_object(py) .call_method1(py, "set_stacked_on_url", (url,))?; Ok(()) }) } fn fetch( &self, from_branch: &dyn Branch, last_revision: Option<&RevisionId>, ) -> Result<(), Error> { Python::attach(|py| { let kwargs = PyDict::new(py); if let Some(rev) = last_revision { kwargs.set_item("last_revision", rev.clone())?; } // Try to downcast to concrete PyBranch types if let Some(gb) = from_branch.as_any().downcast_ref::() { self.to_object(py) .call_method(py, "fetch", (gb.to_object(py),), Some(&kwargs))?; } else if let Some(mb) = from_branch.as_any().downcast_ref::() { self.to_object(py) .call_method(py, "fetch", (mb.to_object(py),), Some(&kwargs))?; } else { return Err(Error::Other(pyo3::exceptions::PyTypeError::new_err( "Branch must be a PyBranch", ))); } Ok(()) }) } fn update(&self) -> Result<(), Error> { Python::attach(|py| { self.to_object(py).call_method0(py, "update")?; Ok(()) }) } fn set_push_location(&self, location: &str) -> Result<(), Error> { Python::attach(|py| { self.to_object(py) .call_method1(py, "set_push_location", (location,))?; Ok(()) }) } fn set_public_branch(&self, location: &str) -> Result<(), Error> { Python::attach(|py| { self.to_object(py) .call_method1(py, "set_public_branch", (location,))?; Ok(()) }) } fn get_append_revisions_only(&self) -> bool { Python::attach(|py| { self.to_object(py) .call_method0(py, "get_append_revisions_only") .unwrap() .extract(py) .unwrap() }) } fn set_append_revisions_only(&self, value: bool) -> Result<(), Error> { Python::attach(|py| { self.to_object(py) .call_method1(py, "set_append_revisions_only", (value,))?; Ok(()) }) } } /// A generic branch that can represent any type of branch. /// /// This struct wraps a Python branch object and provides access to it through /// the Branch trait. pub struct GenericBranch(Py); impl Clone for GenericBranch { fn clone(&self) -> Self { Python::attach(|py| GenericBranch(self.0.clone_ref(py))) } } impl PyBranch for GenericBranch { fn to_object(&self, py: Python<'_>) -> Py { self.0.clone_ref(py) } } impl<'py> IntoPyObject<'py> for GenericBranch { type Target = PyAny; type Output = Bound<'py, Self::Target>; type Error = std::convert::Infallible; fn into_pyobject(self, py: Python<'py>) -> Result { Ok(self.0.into_bound(py)) } } impl<'a, 'py> FromPyObject<'a, 'py> for GenericBranch { type Error = PyErr; fn extract(ob: Borrowed<'a, 'py, PyAny>) -> PyResult { Ok(GenericBranch(ob.to_owned().unbind())) } } impl<'py> From> for GenericBranch { fn from(ob: Bound) -> Self { GenericBranch(ob.unbind()) } } impl From> for GenericBranch { fn from(gb: Py) -> Self { GenericBranch(gb) } } /// A branch that exists only in memory. /// /// Memory branches are not backed by a persistent storage and are primarily /// used for testing or temporary operations. pub struct MemoryBranch(Py); impl Clone for MemoryBranch { fn clone(&self) -> Self { Python::attach(|py| MemoryBranch(self.0.clone_ref(py))) } } impl PyBranch for MemoryBranch { fn to_object(&self, py: Python<'_>) -> Py { self.0.clone_ref(py) } } impl MemoryBranch { /// Create a new MemoryBranch. /// /// # Parameters /// /// * `repository` - The repository to use for this memory branch. /// * `revno` - Optional revision number to use as the last revision. /// * `revid` - The revision ID to use as the last revision. /// /// # Returns /// /// A new MemoryBranch instance. pub fn new(repository: &R, revno: Option, revid: &RevisionId) -> Self { Python::attach(|py| { let mb_cls = py .import("breezy.memorybranch") .unwrap() .getattr("MemoryBranch") .unwrap(); let o = mb_cls .call1((repository.to_object(py), (revno, revid.clone()))) .unwrap(); MemoryBranch(o.unbind()) }) } } pub(crate) fn py_tag_selector( py: Python, tag_selector: Box bool>, ) -> PyResult> { #[pyclass(unsendable)] struct PyTagSelector(Box bool>); #[pymethods] impl PyTagSelector { fn __call__(&self, tag: String) -> bool { (self.0)(tag) } } Ok(PyTagSelector(tag_selector) .into_pyobject(py) .unwrap() .unbind() .into()) } /// Open a branch at the specified URL. /// /// # Parameters /// /// * `url` - The URL of the branch to open. /// /// # Returns /// /// The opened branch, or an error if the branch could not be opened. pub fn open(url: &url::Url) -> Result, Error> { Python::attach(|py| { let m = py.import("breezy.branch").unwrap(); let c = m.getattr("Branch").unwrap(); let r = c.call_method1("open", (url.to_string(),))?; Ok(Box::new(GenericBranch::from(r)) as Box) }) } /// Find and open a branch containing the specified URL. /// /// This function searches for a branch containing the specified URL and returns /// the branch and the relative path from the branch to the specified URL. /// /// # Parameters /// /// * `url` - The URL to find a branch for. /// /// # Returns /// /// A tuple containing the opened branch and the relative path from the branch to /// the specified URL, or an error if no branch could be found. pub fn open_containing(url: &url::Url) -> Result<(Box, String), Error> { Python::attach(|py| { let m = py.import("breezy.branch").unwrap(); let c = m.getattr("Branch").unwrap(); let (b, p): (Bound, String) = c .call_method1("open_containing", (url.to_string(),))? .extract()?; Ok((Box::new(GenericBranch(b.unbind())) as Box, p)) }) } /// Open a branch from a transport. /// /// # Parameters /// /// * `transport` - The transport to use for accessing the branch. /// /// # Returns /// /// The opened branch, or an error if the branch could not be opened. pub fn open_from_transport( transport: &crate::transport::Transport, ) -> Result, Error> { Python::attach(|py| { let m = py.import("breezy.branch").unwrap(); let c = m.getattr("Branch").unwrap(); let r = c.call_method1("open_from_transport", (transport.as_pyobject(),))?; Ok(Box::new(GenericBranch(r.unbind())) as Box) }) } #[cfg(test)] mod tests { use super::*; #[test] fn test_create_and_clone() { crate::init(); let td = tempfile::tempdir().unwrap(); let url = url::Url::from_directory_path(td.path()).unwrap(); let branch = crate::controldir::create_branch_convenience( &url, None, &crate::controldir::ControlDirFormat::default(), ) .unwrap(); assert_eq!(branch.revno(), 0); assert_eq!(branch.last_revision(), RevisionId::null()); } #[test] fn test_create_and_clone_memory() { crate::init(); let td = tempfile::tempdir().unwrap(); let url = url::Url::from_directory_path(td.path()).unwrap(); let branch = crate::controldir::create_branch_convenience( &url, None, &crate::controldir::ControlDirFormat::default(), ) .unwrap(); let branch = MemoryBranch::new(&branch.repository(), None, &RevisionId::null()); assert_eq!(branch.last_revision(), RevisionId::null()); } } breezyshim-0.7.5/src/clean_tree.rs000064400000000000000000000032131046102023000152530ustar 00000000000000//! Functions for cleaning a working tree by removing unknown files. //! //! This module provides functionality to clean a working tree by removing //! unknown files, ignored files, and various detritus files. use crate::error::Error; use pyo3::prelude::*; use pyo3::types::PyDict; use std::path::Path; /// Clean a working tree by removing unwanted files. /// /// # Parameters /// /// * `directory` - The directory containing the working tree to clean /// * `unknown` - If true, remove unknown files (those not tracked by version control) /// * `ignored` - If true, remove ignored files (those matching ignore patterns) /// * `detritus` - If true, remove detritus files (like backup files, etc.) /// * `dry_run` - If true, only report what would be done without actually removing files /// * `no_prompt` - If true, don't prompt for confirmation before removing files /// /// # Returns /// /// * `Ok(())` on success /// * `Err` containing any error that occurred during the cleaning process pub fn clean_tree( directory: &Path, unknown: bool, ignored: bool, detritus: bool, dry_run: bool, no_prompt: bool, ) -> Result<(), Error> { Python::attach(|py| { let m = py.import("breezy.clean_tree")?; let f = m.getattr("clean_tree")?; let kwargs = PyDict::new(py); kwargs.set_item("directory", directory.to_str().unwrap())?; kwargs.set_item("unknown", unknown)?; kwargs.set_item("ignored", ignored)?; kwargs.set_item("detritus", detritus)?; kwargs.set_item("dry_run", dry_run)?; kwargs.set_item("no_prompt", no_prompt)?; f.call((), Some(&kwargs))?; Ok(()) }) } breezyshim-0.7.5/src/commit.rs000064400000000000000000000121051046102023000144420ustar 00000000000000//! Commit-related functionality. //! //! This module provides types for reporting commit information and handling //! commit operations in version control systems. use pyo3::prelude::*; /// A commit reporter that doesn't report anything. /// /// This is useful when you want to perform a commit operation but don't want /// to output any information about the commit. pub struct NullCommitReporter(Py); impl NullCommitReporter { /// Create a new NullCommitReporter. /// /// # Returns /// /// A new NullCommitReporter instance. pub fn new() -> Self { Python::attach(|py| { let m = py.import("breezy.commit").unwrap(); let ncr = m.getattr("NullCommitReporter").unwrap(); NullCommitReporter(ncr.call0().unwrap().into()) }) } } impl Default for NullCommitReporter { fn default() -> Self { Self::new() } } impl From> for NullCommitReporter { fn from(obj: Py) -> Self { NullCommitReporter(obj) } } impl<'py> IntoPyObject<'py> for NullCommitReporter { type Target = PyAny; type Output = Bound<'py, Self::Target>; type Error = std::convert::Infallible; fn into_pyobject(self, py: Python<'py>) -> Result { Ok(self.0.into_bound(py)) } } /// Trait for Python commit reporters. /// /// This trait is implemented by commit reporters that wrap Python objects. pub trait PyCommitReporter: std::any::Any + std::fmt::Debug { /// Get the underlying Python object for this commit reporter. fn to_object(&self, py: Python) -> Py; } /// Trait for commit reporters. /// /// This trait represents objects that report information about commits. pub trait CommitReporter: std::fmt::Debug {} impl CommitReporter for T {} /// A generic commit reporter that wraps any Python commit reporter. pub struct GenericCommitReporter(Py); impl<'py> IntoPyObject<'py> for GenericCommitReporter { type Target = PyAny; type Output = Bound<'py, Self::Target>; type Error = std::convert::Infallible; fn into_pyobject(self, py: Python<'py>) -> Result { Ok(self.0.into_bound(py)) } } impl<'a, 'py> FromPyObject<'a, 'py> for GenericCommitReporter { type Error = PyErr; fn extract(obj: Borrowed<'a, 'py, PyAny>) -> PyResult { Ok(GenericCommitReporter(obj.to_owned().unbind())) } } impl PyCommitReporter for GenericCommitReporter { fn to_object(&self, py: Python) -> Py { self.0.clone_ref(py) } } impl GenericCommitReporter { /// Create a new GenericCommitReporter from a Python object. /// /// # Parameters /// /// * `obj` - A Python object that implements the commit reporter interface. /// /// # Returns /// /// A new GenericCommitReporter instance that wraps the provided Python object. pub fn new(obj: Py) -> Self { Self(obj) } } impl std::fmt::Debug for GenericCommitReporter { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { f.write_fmt(format_args!("GenericCommitReporter({:?})", self.0)) } } impl PyCommitReporter for NullCommitReporter { fn to_object(&self, py: Python) -> Py { self.0.clone_ref(py) } } impl std::fmt::Debug for NullCommitReporter { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { f.write_fmt(format_args!("NullCommitReporter({:?})", self.0)) } } /// A commit reporter that reports commit information to the log. /// /// This reporter outputs information about commits to the logging system. pub struct ReportCommitToLog(Py); impl ReportCommitToLog { /// Create a new ReportCommitToLog instance. /// /// # Returns /// /// A new ReportCommitToLog instance. pub fn new() -> Self { Python::attach(|py| { let m = py.import("breezy.commit").unwrap(); let rctl = m.getattr("ReportCommitToLog").unwrap(); ReportCommitToLog(rctl.call0().unwrap().into()) }) } } impl Default for ReportCommitToLog { fn default() -> Self { Self::new() } } impl From> for ReportCommitToLog { fn from(obj: Py) -> Self { ReportCommitToLog(obj) } } impl<'py> IntoPyObject<'py> for ReportCommitToLog { type Target = PyAny; type Output = Bound<'py, Self::Target>; type Error = std::convert::Infallible; fn into_pyobject(self, py: Python<'py>) -> Result { Ok(self.0.into_bound(py)) } } impl PyCommitReporter for ReportCommitToLog { fn to_object(&self, py: Python) -> Py { self.0.clone_ref(py) } } impl std::fmt::Debug for ReportCommitToLog { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { f.write_fmt(format_args!("ReportCommitToLog({:?})", self.0)) } } #[cfg(test)] mod tests { use super::*; #[test] fn test_null_commit_reporter() { NullCommitReporter::new(); } #[test] fn test_report_commit_to_log() { ReportCommitToLog::new(); } } breezyshim-0.7.5/src/config.rs000064400000000000000000000413371046102023000144300ustar 00000000000000//! Configuration handling. //! //! This module provides access to the Breezy configuration system. //! It allows reading and writing configuration values, and provides //! access to credential stores. use crate::Result; use pyo3::prelude::*; use pyo3::BoundObject; /// Parse a username string into name and email components. /// /// # Parameters /// /// * `e` - The username string to parse, typically in the format "Name ". /// /// # Returns /// /// A tuple containing the name and email address. If no email address is present, /// the second element will be an empty string. pub fn parse_username(e: &str) -> (String, String) { if let Some((_, username, email)) = lazy_regex::regex_captures!(r"(.*?)\s*?", e) { (username.to_string(), email.to_string()) } else { (e.to_string(), "".to_string()) } } /// Extract an email address from a username string. /// /// # Parameters /// /// * `e` - The username string to extract an email address from. /// /// # Returns /// /// The email address, or None if no email address is present. pub fn extract_email_address(e: &str) -> Option { let (_name, email) = parse_username(e); if email.is_empty() { None } else { Some(email) } } /// Trait for values that can be stored in a configuration. /// /// This trait is implemented for common types like strings, integers, and booleans, /// and can be implemented for other types that need to be stored in a configuration. pub trait ConfigValue: for<'py> IntoPyObject<'py> {} impl ConfigValue for String {} impl ConfigValue for &str {} impl ConfigValue for i64 {} impl ConfigValue for bool {} /// Configuration for a branch. /// /// This struct wraps a Python branch configuration object and provides methods for /// accessing and modifying branch-specific configuration options. pub struct BranchConfig(Py); impl Clone for BranchConfig { fn clone(&self) -> Self { Python::attach(|py| -> Self { Self(self.0.clone_ref(py)) }) } } impl<'py> IntoPyObject<'py> for BranchConfig { type Target = PyAny; type Output = Bound<'py, Self::Target>; type Error = std::convert::Infallible; fn into_pyobject(self, py: Python<'py>) -> std::result::Result { Ok(self.0.into_bound(py)) } } impl BranchConfig { /// Create a new BranchConfig from a Python object. /// /// # Parameters /// /// * `o` - A Python object representing a branch configuration. /// /// # Returns /// /// A new BranchConfig instance. pub fn new(o: Py) -> Self { Self(o) } /// Set a user option in this branch configuration. /// /// # Parameters /// /// * `key` - The option key to set. /// * `value` - The value to set the option to. /// /// # Returns /// /// `Ok(())` on success, or an error if the option could not be set. pub fn set_user_option(&self, key: &str, value: T) -> Result<()> { Python::attach(|py| -> Result<()> { let py_value = value .into_pyobject(py) .map_err(|_| { crate::error::Error::Other( pyo3::PyErr::new::( "Failed to convert value to Python object", ), ) })? .unbind(); self.0 .call_method1(py, "set_user_option", (key, py_value))?; Ok(()) })?; Ok(()) } } /// A stack of configuration sources. /// /// This struct represents a stack of configuration sources, where more specific /// sources (like branch-specific configuration) override more general sources /// (like global configuration). pub struct ConfigStack(Py); impl ConfigStack { /// Create a new ConfigStack from a Python object. /// /// # Parameters /// /// * `o` - A Python object representing a configuration stack. /// /// # Returns /// /// A new ConfigStack instance. pub fn new(o: Py) -> Self { Self(o) } /// Get a configuration value from this stack. /// /// # Parameters /// /// * `key` - The configuration key to get. /// /// # Returns /// /// The configuration value, or None if the key is not present. pub fn get(&self, key: &str) -> Result>> { Python::attach(|py| -> Result>> { let value = self.0.call_method1(py, "get", (key,))?; if value.is_none(py) { Ok(None) } else { Ok(Some(value)) } }) } /// Set a configuration value in this stack. /// /// # Parameters /// /// * `key` - The configuration key to set. /// * `value` - The value to set the configuration to. /// /// # Returns /// /// `Ok(())` on success, or an error if the configuration could not be set. pub fn set(&self, key: &str, value: T) -> Result<()> { Python::attach(|py| -> Result<()> { let py_value = value .into_pyobject(py) .map_err(|_| { crate::error::Error::Other( pyo3::PyErr::new::( "Failed to convert value to Python object", ), ) })? .unbind(); self.0.call_method1(py, "set", (key, py_value))?; Ok(()) })?; Ok(()) } } /// Get the global configuration stack. /// /// # Returns /// /// The global configuration stack, or an error if it could not be retrieved. pub fn global_stack() -> Result { Python::attach(|py| -> Result { let m = py.import("breezy.config")?; let stack = m.call_method0("GlobalStack")?; Ok(ConfigStack::new(stack.unbind())) }) } /// Credentials for accessing a remote service. /// /// This struct contains the credentials for accessing a remote service, /// such as username, password, host, port, etc. pub struct Credentials { /// The scheme of the service, like "https", "ftp", etc. pub scheme: Option, /// The username for authenticating with the service. pub username: Option, /// The password for authenticating with the service. pub password: Option, /// The hostname of the service. pub host: Option, /// The port number of the service. pub port: Option, /// The path on the service. pub path: Option, /// The authentication realm of the service. pub realm: Option, /// Whether to verify SSL certificates when connecting to the service. pub verify_certificates: Option, } impl<'a, 'py> FromPyObject<'a, 'py> for Credentials { type Error = PyErr; fn extract(ob: Borrowed<'a, 'py, PyAny>) -> PyResult { let scheme = ob.get_item("scheme")?.extract()?; let username = ob.get_item("username")?.extract()?; let password = ob.get_item("password")?.extract()?; let host = ob.get_item("host")?.extract()?; let port = ob.get_item("port")?.extract()?; let path = ob.get_item("path")?.extract()?; let realm = ob.get_item("realm")?.extract()?; let verify_certificates = ob.get_item("verify_certificates")?.extract()?; Ok(Credentials { scheme, username, password, host, port, path, realm, verify_certificates, }) } } impl<'py> IntoPyObject<'py> for Credentials { type Target = PyAny; type Output = Bound<'py, Self::Target>; type Error = std::convert::Infallible; fn into_pyobject(self, py: Python<'py>) -> std::result::Result { let dict = pyo3::types::PyDict::new(py); dict.set_item("scheme", &self.scheme).unwrap(); dict.set_item("username", &self.username).unwrap(); dict.set_item("password", &self.password).unwrap(); dict.set_item("host", &self.host).unwrap(); dict.set_item("port", self.port).unwrap(); dict.set_item("path", &self.path).unwrap(); dict.set_item("realm", &self.realm).unwrap(); dict.set_item("verify_certificates", self.verify_certificates) .unwrap(); Ok(dict.into_any()) } } // IntoPy is replaced by IntoPyObject in PyO3 0.25 // The IntoPyObject implementation above handles the conversion /// A store for retrieving credentials. /// /// This trait defines the interface for a credential store, which can be used to /// retrieve credentials for accessing remote services. Implementations of this trait /// can store credentials in different ways, like in a keychain, a config file, etc. pub trait CredentialStore: Send + Sync { /// Get credentials for accessing a remote service. /// /// # Parameters /// /// * `scheme` - The scheme of the service, like "https", "ftp", etc. /// * `host` - The hostname of the service. /// * `port` - The port number of the service, or None for the default port. /// * `user` - The username to use, or None to use the default username. /// * `path` - The path on the service, or None for the root path. /// * `realm` - The authentication realm, or None for the default realm. /// /// # Returns /// /// The credentials for accessing the service, or an error if the credentials /// could not be retrieved. fn get_credentials( &self, scheme: &str, host: &str, port: Option, user: Option<&str>, path: Option<&str>, realm: Option<&str>, ) -> Result; } struct PyCredentialStore(Py); impl CredentialStore for PyCredentialStore { fn get_credentials( &self, scheme: &str, host: &str, port: Option, user: Option<&str>, path: Option<&str>, realm: Option<&str>, ) -> Result { Python::attach(|py| -> Result { let creds = self.0.call_method1( py, "get_credentials", (scheme, host, port, user, path, realm), )?; Ok(creds.extract(py)?) }) } } #[pyclass] /// A wrapper for a `CredentialStore` that can be exposed to Python. /// /// This struct wraps a `CredentialStore` implementation and exposes it to Python /// through the Pyo3 type system. pub struct CredentialStoreWrapper(Box); #[pymethods] impl CredentialStoreWrapper { #[pyo3(signature = (scheme, host, port=None, user=None, path=None, realm=None))] fn get_credentials( &self, scheme: &str, host: &str, port: Option, user: Option<&str>, path: Option<&str>, realm: Option<&str>, ) -> PyResult { self.0 .get_credentials(scheme, host, port, user, path, realm) .map_err(|e| PyErr::new::(e.to_string())) } } /// A registry of credential stores. /// /// This struct wraps a Python credential store registry, which can be used to /// register and retrieve credential stores. pub struct CredentialStoreRegistry(Py); impl CredentialStoreRegistry { /// Create a new `CredentialStoreRegistry`. /// /// # Returns /// /// A new `CredentialStoreRegistry` instance. pub fn new() -> Self { Python::attach(|py| -> Self { let m = py.import("breezy.config").unwrap(); let registry = m.call_method0("CredentialStoreRegistry").unwrap(); Self(registry.unbind()) }) } /// Get a credential store from this registry. /// /// # Parameters /// /// * `encoding` - The encoding of the credential store, or None for the default encoding. /// /// # Returns /// /// The credential store, or None if no credential store was found for the specified encoding. pub fn get_credential_store( &self, encoding: Option<&str>, ) -> Result>> { Python::attach(|py| -> Result>> { let store = match self.0.call_method1(py, "get_credential_store", (encoding,)) { Ok(store) => store, Err(e) if e.is_instance_of::(py) => { return Ok(None); } Err(e) => { return Err(e.into()); } }; Ok(Some(Box::new(PyCredentialStore(store)))) }) } /// Get fallback credentials for accessing a remote service. /// /// # Parameters /// /// * `scheme` - The scheme of the service, like "https", "ftp", etc. /// * `port` - The port number of the service, or None for the default port. /// * `user` - The username to use, or None to use the default username. /// * `path` - The path on the service, or None for the root path. /// * `realm` - The authentication realm, or None for the default realm. /// /// # Returns /// /// The fallback credentials for accessing the service, or an error if the /// credentials could not be retrieved. pub fn get_fallback_credentials( &self, scheme: &str, port: Option, user: Option<&str>, path: Option<&str>, realm: Option<&str>, ) -> Result { Python::attach(|py| -> Result { let creds = self.0.call_method1( py, "get_fallback_credentials", (scheme, port, user, path, realm), )?; Ok(creds.extract(py)?) }) } /// Register a credential store with this registry. /// /// # Parameters /// /// * `key` - The key to register the credential store under. /// * `store` - The credential store to register. /// /// # Returns /// /// `Ok(())` on success, or an error if the store could not be registered. pub fn register(&self, key: &str, store: Box) -> Result<()> { Python::attach(|py| -> Result<()> { self.0 .call_method1(py, "register", (key, CredentialStoreWrapper(store)))?; Ok(()) })?; Ok(()) } /// Register a fallback credential store with this registry. /// /// # Parameters /// /// * `store` - The credential store to register as a fallback. /// /// # Returns /// /// `Ok(())` on success, or an error if the store could not be registered. pub fn register_fallback(&self, store: Box) -> Result<()> { Python::attach(|py| -> Result<()> { let kwargs = pyo3::types::PyDict::new(py); kwargs.set_item("fallback", true)?; self.0.call_method( py, "register_fallback", (CredentialStoreWrapper(store),), Some(&kwargs), )?; Ok(()) })?; Ok(()) } } impl Default for CredentialStoreRegistry { fn default() -> Self { Self::new() } } lazy_static::lazy_static! { /// The global credential store registry. /// /// This is a lazily initialized static reference to a `CredentialStoreRegistry` /// instance, which can be used to access credential stores. pub static ref CREDENTIAL_STORE_REGISTRY: CredentialStoreRegistry = CredentialStoreRegistry::new() ; } #[cfg(test)] mod tests { #[test] fn test_credential_store() { fn takes_config_value(_t: T) {} takes_config_value("foo"); takes_config_value(1); takes_config_value(true); takes_config_value("foo".to_string()); } use super::*; use serial_test::serial; #[test] #[serial] fn test_config_stack() { let env = crate::testing::TestEnv::new(); let stack = global_stack().unwrap(); stack.get("email").unwrap(); std::mem::drop(env); } #[test] fn test_parse_username() { assert_eq!( parse_username("John Doe "), ("John Doe".to_string(), "joe@example.com".to_string()) ); assert_eq!( parse_username("John Doe"), ("John Doe".to_string(), "".to_string()) ); } #[test] fn test_extract_email_address() { assert_eq!( extract_email_address("John Doe "), Some("joe@example.com".to_string()) ); assert_eq!(extract_email_address("John Doe"), None); } } breezyshim-0.7.5/src/controldir.rs000064400000000000000000001577171046102023000153540ustar 00000000000000//! The `ControlDir` class provides a high-level interface to control directories, //! e.g. ".bzr" or ".git" directories. use crate::branch::{py_tag_selector, Branch, GenericBranch, PyBranch}; use crate::error::Error; use crate::repository::{GenericRepository, Repository}; use crate::transport::Transport; use crate::workingtree::GenericWorkingTree; use crate::location::AsLocation; use pyo3::prelude::*; use pyo3::types::{PyDict, PyList}; /// Trait for Python probers that can detect control directories. /// /// This trait is implemented by prober types that wrap Python probers, /// which are used to detect the presence of control directories. pub trait PyProber: std::any::Any + std::fmt::Debug { /// Get the underlying Python object for this prober. fn to_object(&self, py: Python) -> Py; } /// Trait for probers that can detect control directories. /// /// This trait defines the interface for probers, which are used to detect /// the presence of control directories (like .git or .bzr) in a location. pub trait Prober: std::fmt::Debug { /// Check if a control directory exists at the location specified by a transport. /// /// # Parameters /// /// * `transport` - The transport to probe. /// /// # Returns /// /// `Ok(true)` if a control directory exists, `Ok(false)` if not, or an error /// if the probe could not be completed. fn probe_transport(&self, transport: &Transport) -> Result; /// Check if a control directory exists at the specified URL. /// /// # Parameters /// /// * `url` - The URL to probe. /// /// # Returns /// /// `Ok(true)` if a control directory exists, `Ok(false)` if not, or an error /// if the probe could not be completed. fn probe(&self, url: &url::Url) -> Result; } impl Prober for T { fn probe_transport(&self, transport: &Transport) -> Result { Python::attach(|py| { let result = self.to_object(py).call_method1( py, "probe_transport", (transport.as_pyobject(),), )?; Ok(result.extract(py)?) }) } fn probe(&self, url: &url::Url) -> Result { Python::attach(|py| { let result = self .to_object(py) .call_method1(py, "probe", (url.to_string(),))?; Ok(result.extract(py)?) }) } } /// Trait for Python control directories. /// /// This trait is implemented by control directory types that wrap Python /// control directory objects. pub trait PyControlDir: std::any::Any + std::fmt::Debug { /// Get the underlying Python object for this control directory. fn to_object(&self, py: Python) -> Py; } /// Trait for control directories. /// /// A control directory is a directory that contains version control metadata, /// like .git or .bzr. This trait defines the interface for accessing and /// manipulating control directories. pub trait ControlDir: std::fmt::Debug { /// Get a reference to self as Any for downcasting. fn as_any(&self) -> &dyn std::any::Any; /// The branch type associated with this control directory. type Branch: Branch + ?Sized; /// The repository type associated with this control directory. type Repository: Repository; /// The working tree type associated with this control directory. type WorkingTree: crate::workingtree::WorkingTree; /// Get the user-visible URL for this control directory. /// /// # Returns /// /// The URL that can be used to access this control directory. fn get_user_url(&self) -> url::Url; /// Get the format of this control directory. /// /// # Returns /// /// The format of this control directory. fn get_format(&self) -> ControlDirFormat; /// Get a transport for accessing this control directory's user files. /// /// # Returns /// /// A transport for accessing this control directory's user files. fn user_transport(&self) -> Transport; /// Get a transport for accessing this control directory's control files. /// /// # Returns /// /// A transport for accessing this control directory's control files. fn control_transport(&self) -> Transport; /// Open the repository in this control directory. /// /// # Returns /// /// The repository, or an error if the repository could not be opened. fn open_repository(&self) -> Result; /// Find a repository in this control directory or its parents. /// /// # Returns /// /// The repository, or an error if no repository could be found. fn find_repository(&self) -> Result; /// Get the format to use when cloning this control directory. /// /// # Returns /// /// The format to use when cloning this control directory. fn cloning_metadir(&self) -> ControlDirFormat; /// Create a new branch in this control directory. /// /// # Parameters /// /// * `name` - The name of the branch to create, or None for the default branch. /// /// # Returns /// /// The newly created branch, or an error if the branch could not be created. fn create_branch(&self, name: Option<&str>) -> Result, Error>; /// Create a new repository in this control directory. /// /// # Parameters /// /// * `shared` - Whether the repository should be shared. /// /// # Returns /// /// The newly created repository, or an error if the repository could not be created. fn create_repository(&self, shared: Option) -> Result; /// Open a branch in this control directory. /// /// # Parameters /// /// * `branch_name` - The name of the branch to open, or None for the default branch. /// /// # Returns /// /// The branch, or an error if the branch could not be opened. fn open_branch(&self, branch_name: Option<&str>) -> Result, Error>; /// Create a working tree in this control directory. /// /// # Returns /// /// The newly created working tree, or an error if the working tree could not be created. fn create_workingtree(&self) -> crate::Result; /// Set a branch reference in this control directory. /// /// # Parameters /// /// * `branch` - The branch to reference. /// * `name` - The name to use for the reference, or None for the default name. /// /// # Returns /// /// `Ok(())` on success, or an error if the reference could not be set. fn set_branch_reference(&self, branch: &dyn PyBranch, name: Option<&str>) -> crate::Result<()>; /// Push a branch to this control directory. /// /// # Parameters /// /// * `source_branch` - The branch to push. /// * `to_branch_name` - The name of the branch to push to, or None for the default name. /// * `stop_revision` - The revision to stop pushing at, or None to push all revisions. /// * `overwrite` - Whether to overwrite the target branch if it has diverged. /// * `tag_selector` - A function that selects which tags to push, or None to push all tags. /// /// # Returns /// /// The target branch after the push, or an error if the push failed. fn push_branch( &self, source_branch: &dyn PyBranch, to_branch_name: Option<&str>, stop_revision: Option<&crate::RevisionId>, overwrite: Option, tag_selector: Option bool>>, ) -> crate::Result>; /// Create a new control directory based on this one (similar to clone). /// /// # Parameters /// /// * `target` - The URL of the new control directory. /// * `source_branch` - The branch to use as a source, or None to use the default branch. /// * `create_tree_if_local` - Whether to create a working tree if the target is local. /// * `stacked` - Whether the new branch should be stacked on this one. /// * `revision_id` - The revision to sprout from, or None to use the last revision. /// /// # Returns /// /// The new control directory, or an error if it could not be created. fn sprout( &self, target: url::Url, source_branch: Option<&dyn PyBranch>, create_tree_if_local: Option, stacked: Option, revision_id: Option<&crate::RevisionId>, ) -> Result< Box< dyn ControlDir< Branch = GenericBranch, Repository = GenericRepository, WorkingTree = crate::workingtree::GenericWorkingTree, >, >, Error, >; /// Check if this control directory has a working tree. /// /// # Returns /// /// `true` if this control directory has a working tree, `false` otherwise. fn has_workingtree(&self) -> bool; /// Open the working tree in this control directory. /// /// # Returns /// /// The working tree, or an error if the working tree could not be opened. fn open_workingtree(&self) -> crate::Result; /// Get the names of all branches in this control directory. /// /// # Returns /// /// A list of branch names, or an error if the branch names could not be retrieved. fn branch_names(&self) -> crate::Result>; /// Check if a branch with the given name exists in this control directory. /// /// # Parameters /// /// * `name` - The name of the branch to check, or None for the default branch. /// /// # Returns /// /// `true` if the branch exists, `false` otherwise. fn has_branch(&self, name: Option<&str>) -> bool; /// Create both a branch and repository in this control directory. /// /// # Parameters /// /// * `name` - The name of the branch to create, or None for the default branch. /// * `shared` - Whether the repository should be shared. /// /// # Returns /// /// The created branch, or an error if the branch could not be created. fn create_branch_and_repo( &self, name: Option<&str>, shared: Option, ) -> Result, Error>; /// Get all branches in this control directory. /// /// # Returns /// /// A hashmap of branch names to branches, or an error if the branches could not be retrieved. fn get_branches(&self) -> crate::Result>>; /// List all branches in this control directory. /// /// # Returns /// /// A list of branch names, or an error if the branches could not be listed. fn list_branches(&self) -> crate::Result>; /// Find branches in the repository. /// /// # Parameters /// /// * `using` - Whether to use the repository's revisions. /// /// # Returns /// /// A vector of branches found, or an error if the branches could not be found. fn find_branches(&self, using: Option) -> crate::Result>>; /// Get the reference location for a branch. /// /// # Parameters /// /// * `name` - The name of the branch, or None for the default branch. /// /// # Returns /// /// The branch reference location, or an error if the reference could not be found. fn get_branch_reference(&self, name: Option<&str>) -> crate::Result; /// Check if this control directory can be converted to the given format. /// /// # Parameters /// /// * `format` - The format to check conversion to. /// /// # Returns /// /// `true` if conversion is possible, `false` otherwise. fn can_convert_format(&self, format: &ControlDirFormat) -> bool; /// Check if the target format is a valid conversion target. /// /// # Parameters /// /// * `target_format` - The format to check as a conversion target. /// /// # Returns /// /// An error if the target format is not valid for conversion. fn check_conversion_target(&self, target_format: &ControlDirFormat) -> crate::Result<()>; /// Check if this control directory needs format conversion. /// /// # Parameters /// /// * `format` - The format to check against. /// /// # Returns /// /// `true` if format conversion is needed, `false` otherwise. fn needs_format_conversion(&self, format: Option<&ControlDirFormat>) -> bool; /// Destroy the branch in this control directory. /// /// # Parameters /// /// * `name` - The name of the branch to destroy, or None for the default branch. /// /// # Returns /// /// An error if the branch could not be destroyed. fn destroy_branch(&self, name: Option<&str>) -> crate::Result<()>; /// Destroy the repository in this control directory. /// /// # Returns /// /// An error if the repository could not be destroyed. fn destroy_repository(&self) -> crate::Result<()>; /// Destroy the working tree in this control directory. /// /// # Returns /// /// An error if the working tree could not be destroyed. fn destroy_workingtree(&self) -> crate::Result<()>; /// Destroy the working tree metadata in this control directory. /// /// # Returns /// /// An error if the working tree metadata could not be destroyed. fn destroy_workingtree_metadata(&self) -> crate::Result<()>; /// Get the configuration for this control directory. /// /// # Returns /// /// A configuration stack for this control directory. fn get_config(&self) -> crate::Result; } /// A generic wrapper for a Python control directory object. /// /// This struct wraps a Python control directory object and provides access to it /// through the ControlDir trait. pub struct GenericControlDir(Py); impl<'py> IntoPyObject<'py> for GenericControlDir { type Target = PyAny; type Output = Bound<'py, Self::Target>; type Error = std::convert::Infallible; fn into_pyobject(self, py: Python<'py>) -> Result { Ok(self.0.into_bound(py)) } } impl<'a, 'py> FromPyObject<'a, 'py> for GenericControlDir { type Error = PyErr; fn extract(obj: Borrowed<'a, 'py, PyAny>) -> PyResult { Ok(GenericControlDir(obj.to_owned().unbind())) } } impl PyControlDir for GenericControlDir { fn to_object(&self, py: Python) -> Py { self.0.clone_ref(py) } } impl GenericControlDir { /// Create a new GenericControlDir from a Python control directory object. /// /// # Parameters /// /// * `obj` - A Python object representing a control directory. /// /// # Returns /// /// A new GenericControlDir instance. pub fn new(obj: Py) -> Self { Self(obj) } } impl ControlDir for T { fn as_any(&self) -> &dyn std::any::Any { self } type Branch = GenericBranch; type Repository = crate::repository::GenericRepository; type WorkingTree = crate::workingtree::GenericWorkingTree; fn get_user_url(&self) -> url::Url { Python::attach(|py| { let result = self.to_object(py).getattr(py, "user_url").unwrap(); url::Url::parse(&result.extract::(py).unwrap()).unwrap() }) } fn get_format(&self) -> ControlDirFormat { Python::attach(|py| { let result = self.to_object(py).getattr(py, "_format")?; Ok::<_, PyErr>(ControlDirFormat(result)) }) .unwrap() } fn user_transport(&self) -> Transport { Python::attach(|py| { let result = self.to_object(py).getattr(py, "user_transport").unwrap(); crate::transport::Transport::new(result) }) } fn control_transport(&self) -> Transport { Python::attach(|py| { let result = self.to_object(py).getattr(py, "control_transport").unwrap(); crate::transport::Transport::new(result) }) } fn open_repository(&self) -> Result { Python::attach(|py| { let result = self.to_object(py).call_method0(py, "open_repository")?; Ok(GenericRepository::new(result)) }) } fn find_repository(&self) -> Result { Python::attach(|py| { let result = self.to_object(py).call_method0(py, "find_repository")?; Ok(GenericRepository::new(result)) }) } fn cloning_metadir(&self) -> ControlDirFormat { Python::attach(|py| { let result = self.to_object(py).call_method0(py, "cloning_metadir")?; Ok::<_, PyErr>(ControlDirFormat(result)) }) .unwrap() } fn create_branch(&self, name: Option<&str>) -> Result, Error> { Python::attach(|py| { let branch: Py = self.to_object(py) .call_method(py, "create_branch", (name,), None)?; Ok(Box::new(GenericBranch::from(branch)) as Box) }) } fn create_repository(&self, shared: Option) -> Result { Python::attach(|py| { let kwargs = PyDict::new(py); if let Some(shared) = shared { kwargs.set_item("shared", shared)?; } let repository = self.to_object(py) .call_method(py, "create_repository", (), Some(&kwargs))?; Ok(GenericRepository::new(repository)) }) } fn open_branch(&self, branch_name: Option<&str>) -> Result, Error> { Python::attach(|py| { let branch: Py = self.to_object(py) .call_method(py, "open_branch", (branch_name,), None)?; Ok(Box::new(GenericBranch::from(branch)) as Box) }) } fn create_workingtree(&self) -> crate::Result { Python::attach(|py| { let wt = self.to_object(py).call_method0(py, "create_workingtree")?; Ok(GenericWorkingTree(wt)) }) } fn set_branch_reference(&self, branch: &dyn PyBranch, name: Option<&str>) -> crate::Result<()> { Python::attach(|py| { self.to_object(py).call_method1( py, "set_branch_reference", (&branch.to_object(py), name), )?; Ok(()) }) } fn push_branch( &self, source_branch: &dyn PyBranch, to_branch_name: Option<&str>, stop_revision: Option<&crate::RevisionId>, overwrite: Option, tag_selector: Option bool>>, ) -> crate::Result> { Python::attach(|py| { let kwargs = PyDict::new(py); if let Some(to_branch_name) = to_branch_name { kwargs.set_item("name", to_branch_name)?; } if let Some(tag_selector) = tag_selector { kwargs.set_item("tag_selector", py_tag_selector(py, tag_selector)?)?; } if let Some(overwrite) = overwrite { kwargs.set_item("overwrite", overwrite)?; } if let Some(stop_revision) = stop_revision { kwargs.set_item("stop_revision", stop_revision.clone())?; } let result = self.to_object(py).call_method( py, "push_branch", (&source_branch.to_object(py),), Some(&kwargs), )?; Ok( Box::new(GenericBranch::from(result.getattr(py, "target_branch")?)) as Box, ) }) } fn sprout( &self, target: url::Url, source_branch: Option<&dyn PyBranch>, create_tree_if_local: Option, stacked: Option, revision_id: Option<&crate::RevisionId>, ) -> Result< Box< dyn ControlDir< Branch = GenericBranch, Repository = GenericRepository, WorkingTree = crate::workingtree::GenericWorkingTree, >, >, Error, > { Python::attach(|py| { let kwargs = PyDict::new(py); if let Some(create_tree_if_local) = create_tree_if_local { kwargs .set_item("create_tree_if_local", create_tree_if_local) .unwrap(); } if let Some(stacked) = stacked { kwargs.set_item("stacked", stacked).unwrap(); } if let Some(source_branch) = source_branch { kwargs .set_item("source_branch", source_branch.to_object(py)) .unwrap(); } if let Some(revision_id) = revision_id { kwargs.set_item("revision_id", revision_id.clone()).unwrap(); } let cd = self.to_object(py).call_method( py, "sprout", (target.to_string(),), Some(&kwargs), )?; Ok(Box::new(GenericControlDir(cd)) as Box< dyn ControlDir< Branch = GenericBranch, Repository = GenericRepository, WorkingTree = crate::workingtree::GenericWorkingTree, >, >) }) } fn has_workingtree(&self) -> bool { Python::attach(|py| { let result = self .to_object(py) .call_method0(py, "has_workingtree") .unwrap(); result.extract(py).unwrap() }) } fn open_workingtree(&self) -> crate::Result { Python::attach(|py| { let wt = self.to_object(py).call_method0(py, "open_workingtree")?; Ok(GenericWorkingTree(wt)) }) } fn branch_names(&self) -> crate::Result> { Python::attach(|py| { let names = self .to_object(py) .call_method0(py, "branch_names")? .extract::>(py)?; Ok(names) }) } fn has_branch(&self, name: Option<&str>) -> bool { Python::attach(|py| { let result = self .to_object(py) .call_method1(py, "has_branch", (name,)) .unwrap(); result.extract(py).unwrap() }) } fn create_branch_and_repo( &self, name: Option<&str>, shared: Option, ) -> Result, Error> { Python::attach(|py| { let kwargs = PyDict::new(py); if let Some(shared) = shared { kwargs.set_item("shared", shared)?; } let branch: Py = self.to_object(py).call_method( py, "create_branch_and_repo", (name,), Some(&kwargs), )?; Ok(Box::new(GenericBranch::from(branch)) as Box) }) } fn get_branches(&self) -> crate::Result>> { Python::attach(|py| { let branches_dict = self.to_object(py).call_method0(py, "get_branches")?; let mut branches = std::collections::HashMap::new(); let dict: &Bound = branches_dict .cast_bound(py) .map_err(|_| PyErr::new::("Expected a dict"))?; for (key, value) in dict.iter() { let name: String = key.extract()?; let branch = GenericBranch::from(value.unbind()); branches.insert(name, Box::new(branch) as Box); } Ok(branches) }) } fn list_branches(&self) -> crate::Result> { Python::attach(|py| { let names = self .to_object(py) .call_method0(py, "list_branches")? .extract::>(py)?; Ok(names) }) } fn find_branches(&self, using: Option) -> crate::Result>> { Python::attach(|py| { let kwargs = PyDict::new(py); if let Some(using) = using { kwargs.set_item("using", using)?; } let branches_list = self.to_object(py) .call_method(py, "find_branches", (), Some(&kwargs))?; let mut branches = Vec::new(); let list: &Bound = branches_list .cast_bound(py) .map_err(|_| PyErr::new::("Expected a list"))?; for item in list.iter() { let branch = GenericBranch::from(item.unbind()); branches.push(Box::new(branch) as Box); } Ok(branches) }) } fn get_branch_reference(&self, name: Option<&str>) -> crate::Result { Python::attach(|py| { let reference = self .to_object(py) .call_method1(py, "get_branch_reference", (name,))? .extract::(py)?; Ok(reference) }) } fn can_convert_format(&self, format: &ControlDirFormat) -> bool { Python::attach(|py| { let result = self .to_object(py) .call_method1(py, "can_convert_format", (format.0.clone_ref(py),)) .unwrap(); result.extract(py).unwrap() }) } fn check_conversion_target(&self, target_format: &ControlDirFormat) -> crate::Result<()> { Python::attach(|py| { self.to_object(py).call_method1( py, "check_conversion_target", (target_format.0.clone_ref(py),), )?; Ok(()) }) } fn needs_format_conversion(&self, format: Option<&ControlDirFormat>) -> bool { Python::attach(|py| { let result = if let Some(format) = format { self.to_object(py) .call_method1(py, "needs_format_conversion", (format.0.clone_ref(py),)) .unwrap() } else { self.to_object(py) .call_method0(py, "needs_format_conversion") .unwrap() }; result.extract(py).unwrap() }) } fn destroy_branch(&self, name: Option<&str>) -> crate::Result<()> { Python::attach(|py| { self.to_object(py) .call_method1(py, "destroy_branch", (name,))?; Ok(()) }) } fn destroy_repository(&self) -> crate::Result<()> { Python::attach(|py| { self.to_object(py).call_method0(py, "destroy_repository")?; Ok(()) }) } fn destroy_workingtree(&self) -> crate::Result<()> { Python::attach(|py| { self.to_object(py).call_method0(py, "destroy_workingtree")?; Ok(()) }) } fn destroy_workingtree_metadata(&self) -> crate::Result<()> { Python::attach(|py| { self.to_object(py) .call_method0(py, "destroy_workingtree_metadata")?; Ok(()) }) } fn get_config(&self) -> crate::Result { Python::attach(|py| { let config = self.to_object(py).call_method0(py, "get_config")?; Ok(crate::config::ConfigStack::new(config)) }) } } impl std::fmt::Debug for GenericControlDir { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { f.write_fmt(format_args!("ControlDir({:?})", self.0)) } } /// The format of a control directory. /// /// This struct represents the format of a control directory, which defines how /// the control directory is stored on disk and what capabilities it has. pub struct ControlDirFormat(Py); impl<'py> IntoPyObject<'py> for ControlDirFormat { type Target = PyAny; type Output = Bound<'py, Self::Target>; type Error = std::convert::Infallible; fn into_pyobject(self, py: Python<'py>) -> Result { Ok(self.0.into_bound(py)) } } impl Clone for ControlDirFormat { fn clone(&self) -> Self { Python::attach(|py| ControlDirFormat(self.0.clone_ref(py))) } } impl From> for ControlDirFormat { fn from(obj: Py) -> Self { ControlDirFormat(obj) } } impl Default for ControlDirFormat { fn default() -> Self { Python::attach(|py| { let breezy = PyModule::import(py, "breezy.controldir").unwrap(); let cd_format = breezy.getattr("ControlDirFormat").unwrap(); let obj = cd_format.call_method0("get_default_format").unwrap(); assert!(!obj.is_none()); ControlDirFormat(obj.into()) }) } } impl ControlDirFormat { /// Get the format string for this control directory format. /// /// # Returns /// /// The format string as a byte vector. pub fn get_format_string(&self) -> Vec { Python::attach(|py| { self.0 .call_method0(py, "get_format_string") .unwrap() .extract(py) .unwrap() }) } /// Get a human-readable description of this control directory format. /// /// # Returns /// /// A string describing this control directory format. pub fn get_format_description(&self) -> String { Python::attach(|py| { self.0 .call_method0(py, "get_format_description") .unwrap() .extract(py) .unwrap() }) } /// Check if a filename is a control filename in this format. /// /// # Parameters /// /// * `filename` - The filename to check. /// /// # Returns /// /// `true` if the filename is a control filename, `false` otherwise. pub fn is_control_filename(&self, filename: &str) -> bool { Python::attach(|py| { self.0 .call_method1(py, "is_control_filename", (filename,)) .unwrap() .extract(py) .unwrap() }) } /// Initialize a control directory of this format on a transport. /// /// # Parameters /// /// * `transport` - The transport to initialize the control directory on. /// /// # Returns /// /// The initialized control directory, or an error if initialization failed. pub fn initialize_on_transport( &self, transport: &Transport, ) -> Result< Box< dyn ControlDir< Branch = GenericBranch, Repository = GenericRepository, WorkingTree = crate::workingtree::GenericWorkingTree, >, >, Error, > { Python::attach(|py| { let cd = self.0 .call_method1(py, "initialize_on_transport", (transport.as_pyobject(),))?; Ok(Box::new(GenericControlDir(cd)) as Box< dyn ControlDir< Branch = GenericBranch, Repository = GenericRepository, WorkingTree = crate::workingtree::GenericWorkingTree, >, >) }) } /// Initialize a control directory of this format at a location. /// /// # Parameters /// /// * `location` - The location to initialize the control directory at. /// /// # Returns /// /// The initialized control directory, or an error if initialization failed. pub fn initialize( &self, location: impl AsLocation, ) -> Result< Box< dyn ControlDir< Branch = GenericBranch, Repository = GenericRepository, WorkingTree = crate::workingtree::GenericWorkingTree, >, >, Error, > { Python::attach(|py| { let cd = self .0 .call_method1(py, "initialize", (location.as_location(),))?; Ok(Box::new(GenericControlDir(cd)) as Box< dyn ControlDir< Branch = GenericBranch, Repository = GenericRepository, WorkingTree = crate::workingtree::GenericWorkingTree, >, >) }) } } /// Open a tree or branch at a location. /// /// # Parameters /// /// * `location` - The location to open. /// * `name` - The name of the branch to open, or None for the default branch. /// * `possible_transports` - Optional list of transports to try. /// /// # Returns /// /// A tuple with an optional working tree (if one exists) and a branch, or an /// error if neither could be opened. pub fn open_tree_or_branch( location: impl AsLocation, name: Option<&str>, possible_transports: Option<&mut Vec>, ) -> Result<(Option, Box), Error> { Python::attach(|py| { let m = py.import("breezy.controldir")?; let cd = m.getattr("ControlDir")?; let kwargs = PyDict::new(py); if let Some(possible_transports) = possible_transports { kwargs.set_item( "possible_transports", possible_transports .iter() .map(|t| t.as_pyobject().clone_ref(py)) .collect::>>(), )?; } let ret = cd.call_method( "open_tree_or_branch", (location.as_location(), name), Some(&kwargs), )?; let (tree, branch) = ret.extract::<(Option>, Py)>()?; let branch = Box::new(GenericBranch::from(branch)) as Box; let tree = tree.map(GenericWorkingTree); Ok((tree, branch)) }) } /// Open a control directory at a location. /// /// # Parameters /// /// * `url` - The location to open. /// * `possible_transports` - Optional list of transports to try. /// /// # Returns /// /// The control directory, or an error if one could not be opened. pub fn open( url: impl AsLocation, possible_transports: Option<&mut Vec>, ) -> Result< Box< dyn ControlDir< Branch = GenericBranch, Repository = GenericRepository, WorkingTree = crate::workingtree::GenericWorkingTree, >, >, Error, > { Python::attach(|py| { let m = py.import("breezy.controldir")?; let cd = m.getattr("ControlDir")?; let kwargs = PyDict::new(py); if let Some(possible_transports) = possible_transports { kwargs.set_item( "possible_transports", possible_transports .iter() .map(|t| t.as_pyobject().clone_ref(py)) .collect::>>(), )?; } let controldir = cd.call_method("open", (url.as_location(),), Some(&kwargs))?; Ok(Box::new(GenericControlDir(controldir.unbind())) as Box< dyn ControlDir< Branch = GenericBranch, Repository = GenericRepository, WorkingTree = crate::workingtree::GenericWorkingTree, >, >) }) } /// Create a new control directory at a location. /// /// # Parameters /// /// * `url` - The location to create the control directory at. /// * `format` - The format to use for the new control directory. /// * `possible_transports` - Optional list of transports to try. /// /// # Returns /// /// The newly created control directory, or an error if it could not be created. pub fn create( url: impl AsLocation, format: impl AsFormat, possible_transports: Option<&mut Vec>, ) -> Result< Box< dyn ControlDir< Branch = GenericBranch, Repository = GenericRepository, WorkingTree = crate::workingtree::GenericWorkingTree, >, >, Error, > { Python::attach(|py| { let m = py.import("breezy.controldir")?; let cd = m.getattr("ControlDir")?; let kwargs = PyDict::new(py); if let Some(format) = format.as_format() { kwargs.set_item("format", format.clone())?; } if let Some(possible_transports) = possible_transports { kwargs.set_item( "possible_transports", possible_transports .iter() .map(|t| t.as_pyobject().clone_ref(py)) .collect::>>(), )?; } let controldir = cd.call_method("create", (url.as_location(),), Some(&kwargs))?; Ok(Box::new(GenericControlDir(controldir.unbind())) as Box< dyn ControlDir< Branch = GenericBranch, Repository = GenericRepository, WorkingTree = crate::workingtree::GenericWorkingTree, >, >) }) } /// Create a new control directory on a transport. /// /// # Parameters /// /// * `transport` - The transport to create the control directory on. /// * `format` - The format to use for the new control directory. /// /// # Returns /// /// The newly created control directory, or an error if it could not be created. pub fn create_on_transport( transport: &Transport, format: impl AsFormat, ) -> Result< Box< dyn ControlDir< Branch = GenericBranch, Repository = GenericRepository, WorkingTree = crate::workingtree::GenericWorkingTree, >, >, Error, > { Python::attach(|py| { let format = format.as_format().unwrap().0; Ok(Box::new(GenericControlDir(format.call_method( py, "initialize_on_transport", (transport.as_pyobject(),), None, )?)) as Box< dyn ControlDir< Branch = GenericBranch, Repository = GenericRepository, WorkingTree = crate::workingtree::GenericWorkingTree, >, >) }) } /// Find a control directory containing a location specified by a transport. /// /// # Parameters /// /// * `transport` - The transport to search from. /// * `probers` - Optional list of probers to use to detect control directories. /// /// # Returns /// /// A tuple containing the control directory and the relative path from the control /// directory to the location specified by the transport, or an error if no control /// directory could be found. pub fn open_containing_from_transport( transport: &Transport, probers: Option<&[&dyn PyProber]>, ) -> Result< ( Box< dyn ControlDir< Branch = GenericBranch, Repository = GenericRepository, WorkingTree = crate::workingtree::GenericWorkingTree, >, >, String, ), Error, > { Python::attach(|py| { let m = py.import("breezy.controldir")?; let cd = m.getattr("ControlDir")?; let kwargs = PyDict::new(py); if let Some(probers) = probers { kwargs.set_item( "probers", probers.iter().map(|p| p.to_object(py)).collect::>(), )?; } let (controldir, subpath): (Py, String) = cd .call_method( "open_containing_from_transport", (transport.as_pyobject(),), Some(&kwargs), )? .extract()?; Ok(( Box::new(GenericControlDir(controldir)) as Box< dyn ControlDir< Branch = GenericBranch, Repository = GenericRepository, WorkingTree = crate::workingtree::GenericWorkingTree, >, >, subpath, )) }) } /// Open a control directory from a transport. /// /// # Parameters /// /// * `transport` - The transport to open from. /// * `probers` - Optional list of probers to use to detect control directories. /// /// # Returns /// /// The opened control directory, or an error if no control directory could be found. pub fn open_from_transport( transport: &Transport, probers: Option<&[&dyn PyProber]>, ) -> Result< Box< dyn ControlDir< Branch = GenericBranch, Repository = GenericRepository, WorkingTree = crate::workingtree::GenericWorkingTree, >, >, Error, > { Python::attach(|py| { let m = py.import("breezy.controldir")?; let cd = m.getattr("ControlDir")?; let kwargs = PyDict::new(py); if let Some(probers) = probers { kwargs.set_item( "probers", probers.iter().map(|p| p.to_object(py)).collect::>(), )?; } let controldir = cd.call_method( "open_from_transport", (transport.as_pyobject(),), Some(&kwargs), )?; Ok(Box::new(GenericControlDir(controldir.unbind())) as Box< dyn ControlDir< Branch = GenericBranch, Repository = GenericRepository, WorkingTree = crate::workingtree::GenericWorkingTree, >, >) }) } /// Trait for types that can be converted to a control directory format. /// /// This trait is implemented by types that can be converted to a control directory /// format, like &str and &ControlDirFormat. pub trait AsFormat { /// Convert to a control directory format. /// /// # Returns /// /// The control directory format, or None if the conversion failed. fn as_format(&self) -> Option; } impl AsFormat for &str { fn as_format(&self) -> Option { Python::attach(|py| { let m = py.import("breezy.controldir").ok()?; let cd = m.getattr("format_registry").ok()?; let format = cd .call_method1("make_controldir", (self.to_string(),)) .ok()?; Some(ControlDirFormat(format.unbind())) }) } } impl AsFormat for &ControlDirFormat { fn as_format(&self) -> Option { Some(Python::attach(|py| ControlDirFormat(self.0.clone_ref(py)))) } } /// Create a branch conveniently (includes creating a repository if needed). /// /// # Parameters /// /// * `base` - The URL to create the branch at. /// * `force_new_tree` - Whether to force the creation of a new working tree if /// one already exists. /// * `format` - The format to use for the new branch. /// /// # Returns /// /// The newly created branch, or an error if the branch could not be created. pub fn create_branch_convenience( base: &url::Url, force_new_tree: Option, format: impl AsFormat, ) -> Result, Error> { Python::attach(|py| { let m = py.import("breezy.controldir")?; let cd = m.getattr("ControlDir")?; let format = format.as_format(); let kwargs = PyDict::new(py); if let Some(force_new_tree) = force_new_tree { kwargs.set_item("force_new_tree", force_new_tree)?; } if let Some(format) = format { kwargs.set_item("format", format.clone())?; } let branch = cd.call_method( "create_branch_convenience", (base.to_string(),), Some(&kwargs), )?; Ok(Box::new(GenericBranch::from(branch.unbind())) as Box) }) } /// Create a standalone working tree. /// /// # Arguments /// * `base` - The base directory for the working tree. /// * `format` - The format of the working tree. pub fn create_standalone_workingtree( base: &std::path::Path, format: impl AsFormat, ) -> Result { let base = base.to_str().unwrap(); Python::attach(|py| { let m = py.import("breezy.controldir")?; let cd = m.getattr("ControlDir")?; let format = format.as_format(); let wt = cd.call_method( "create_standalone_workingtree", (base, format.unwrap_or_default()), None, )?; Ok(GenericWorkingTree(wt.unbind())) }) } /// A generic prober for detecting control directories. /// /// This struct wraps a Python prober object and provides access to it through /// the Prober trait. pub struct GenericProber(Py); impl<'py> IntoPyObject<'py> for GenericProber { type Target = PyAny; type Output = Bound<'py, Self::Target>; type Error = std::convert::Infallible; fn into_pyobject(self, py: Python<'py>) -> Result { Ok(self.0.into_bound(py)) } } impl<'a, 'py> FromPyObject<'a, 'py> for GenericProber { type Error = PyErr; fn extract(obj: Borrowed<'a, 'py, PyAny>) -> PyResult { Ok(GenericProber(obj.to_owned().unbind())) } } impl PyProber for GenericProber { fn to_object(&self, py: Python) -> Py { self.0.clone_ref(py) } } impl GenericProber { /// Create a new GenericProber from a Python prober object. /// /// # Parameters /// /// * `obj` - A Python object representing a prober. /// /// # Returns /// /// A new GenericProber instance. pub fn new(obj: Py) -> Self { Self(obj) } } /// Implementation of Debug for GenericProber. impl std::fmt::Debug for GenericProber { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { f.write_fmt(format_args!("Prober({:?})", self.0)) } } /// Get all available probers. /// /// # Returns /// /// A list of all available probers. pub fn all_probers() -> Vec> { Python::attach(|py| -> PyResult>> { let m = py.import("breezy.controldir")?; let cdf = m.getattr("ControlDirFormat")?; let probers = cdf .call_method0("all_probers")? .extract::>>()?; Ok(probers .into_iter() .map(|p| Box::new(GenericProber::new(p)) as Box) .collect::>()) }) .unwrap() } /// A registry of control directory formats. /// /// This struct wraps a Python registry of control directory formats, /// which can be used to create control directory formats from names. pub struct ControlDirFormatRegistry(Py); impl ControlDirFormatRegistry { /// Create a new ControlDirFormatRegistry. /// /// # Returns /// /// A new ControlDirFormatRegistry instance. pub fn new() -> Self { Python::attach(|py| { let m = py.import("breezy.controldir").unwrap(); let obj = m.getattr("format_registry").unwrap(); ControlDirFormatRegistry(obj.into()) }) } /// Create a control directory format from a format name. /// /// # Parameters /// /// * `format` - The name of the format to create. /// /// # Returns /// /// The control directory format, or None if the format name is not recognized. pub fn make_controldir(&self, format: &str) -> Option { Python::attach( |py| match self.0.call_method1(py, "make_controldir", (format,)) { Ok(format) => Some(ControlDirFormat(format)), Err(e) if e.is_instance_of::(py) => None, Err(e) => panic!("{}", e), }, ) } } /// Implementation of Default for ControlDirFormatRegistry. impl Default for ControlDirFormatRegistry { /// Creates a default ControlDirFormatRegistry. /// /// # Returns /// /// A new ControlDirFormatRegistry instance. fn default() -> Self { ControlDirFormatRegistry::new() } } lazy_static::lazy_static! { /// The global control directory format registry. /// /// This is a lazily initialized static reference to a `ControlDirFormatRegistry` /// instance, which can be used to access control directory formats. pub static ref FORMAT_REGISTRY: ControlDirFormatRegistry = ControlDirFormatRegistry::new(); } #[cfg(test)] mod tests { use super::*; use crate::workingtree::WorkingTree; #[test] fn test_controldir_to_pycontroldir_conversion() { // Test the pattern from the issue: // 1. Get a working tree // 2. Get its controldir as Box // 3. Downcast it to use as &dyn PyControlDir let tmp_dir = tempfile::tempdir().unwrap(); let wt = create_standalone_workingtree(tmp_dir.path(), "2a").unwrap(); // Get controldir as Box let controldir = wt.controldir(); // Now try to downcast it to GenericControlDir using as_any() if let Some(generic_controldir) = controldir.as_any().downcast_ref::() { // Success! We can now use it as &dyn PyControlDir let py_controldir: &dyn PyControlDir = generic_controldir; // Verify we can call PyControlDir methods Python::attach(|py| { let _obj = py_controldir.to_object(py); }); } else { panic!("Failed to downcast ControlDir to GenericControlDir"); } } #[test] fn test_control_dir_format_registry() { crate::init(); let registry = ControlDirFormatRegistry::new(); let format = registry.make_controldir("2a").unwrap(); let _ = format.get_format_string(); } #[test] fn test_format_registry() { crate::init(); let format = FORMAT_REGISTRY.make_controldir("2a").unwrap(); let _ = format.get_format_string(); } #[test] fn test_all_probers() { crate::init(); let probers = all_probers(); assert!(!probers.is_empty()); } #[test] fn test_open_tree_or_branch() { crate::init(); let tmp_dir = tempfile::tempdir().unwrap(); create_branch_convenience( &url::Url::from_directory_path(tmp_dir.path()).unwrap(), None, &ControlDirFormat::default(), ) .unwrap(); let (wt, branch) = open_tree_or_branch( &url::Url::from_directory_path(tmp_dir.path()).unwrap(), None, None, ) .unwrap(); assert_eq!( wt.unwrap().basedir().canonicalize().unwrap(), tmp_dir.path().canonicalize().unwrap() ); assert_eq!( branch.get_user_url(), url::Url::from_directory_path(tmp_dir.path()).unwrap() ); } #[test] fn test_control_dir_format_default() { crate::init(); let d = ControlDirFormat::default(); d.get_format_string(); } #[test] fn test_open() { crate::init(); let tmp_dir = tempfile::tempdir().unwrap(); let e = open( &url::Url::from_directory_path(tmp_dir.path()).unwrap(), None, ) .unwrap_err(); assert!(matches!(e, Error::NotBranchError(..)),); let cd = create( &url::Url::from_directory_path(tmp_dir.path()).unwrap(), "2a", None, ) .unwrap(); let od = open( &url::Url::from_directory_path(tmp_dir.path()).unwrap(), None, ) .unwrap(); assert_eq!( cd.get_format().get_format_string(), od.get_format().get_format_string() ); } #[test] fn test_create() { crate::init(); let tmp_dir = tempfile::tempdir().unwrap(); let cd = create( &url::Url::from_directory_path(tmp_dir.path()).unwrap(), "2a", None, ) .unwrap(); let od = open( &url::Url::from_directory_path(tmp_dir.path()).unwrap(), None, ) .unwrap(); assert_eq!( cd.get_format().get_format_string(), od.get_format().get_format_string() ); } #[test] fn test_create_on_transport() { crate::init(); let tmp_dir = tempfile::tempdir().unwrap(); let transport = crate::transport::get_transport( &url::Url::from_directory_path(tmp_dir.path()).unwrap(), None, ) .unwrap(); let _cd = create_on_transport(&transport, "2a").unwrap(); } #[test] fn test_open_containing_from_transport() { crate::init(); let tmp_dir = tempfile::tempdir().unwrap(); let transport = crate::transport::get_transport( &url::Url::from_directory_path(tmp_dir.path()).unwrap(), None, ) .unwrap(); let e = open_containing_from_transport(&transport, None).unwrap_err(); assert!(matches!(e, Error::NotBranchError(..)),); } #[test] fn test_open_from_transport() { crate::init(); let tmp_dir = tempfile::tempdir().unwrap(); let transport = crate::transport::get_transport( &url::Url::from_directory_path(tmp_dir.path()).unwrap(), None, ) .unwrap(); let e = open_from_transport(&transport, None).unwrap_err(); assert!(matches!(e, Error::NotBranchError(..)),); } #[test] fn test_create_standalone_workingtree() { crate::init(); let tmp_dir = tempfile::tempdir().unwrap(); let wt = create_standalone_workingtree(tmp_dir.path(), "2a").unwrap(); assert_eq!( wt.basedir().canonicalize().unwrap(), tmp_dir.path().canonicalize().unwrap() ); } #[test] fn test_create_branch_convenience() { crate::init(); let tmp_dir = tempfile::tempdir().unwrap(); let branch = create_branch_convenience( &url::Url::from_directory_path(tmp_dir.path()).unwrap(), None, &ControlDirFormat::default(), ) .unwrap(); assert_eq!( branch.get_user_url(), url::Url::from_directory_path(tmp_dir.path()).unwrap() ); } #[test] fn test_create_repository() { crate::init(); let tmp_dir = tempfile::tempdir().unwrap(); let controldir = create( &url::Url::from_directory_path(tmp_dir.path()).unwrap(), &ControlDirFormat::default(), None, ) .unwrap(); let _repo = controldir.create_repository(None).unwrap(); } #[test] fn test_create_branch() { crate::init(); let tmp_dir = tempfile::tempdir().unwrap(); let controldir = create( &url::Url::from_directory_path(tmp_dir.path()).unwrap(), &ControlDirFormat::default(), None, ) .unwrap(); assert!(matches!( controldir.create_branch(None), Err(Error::NoRepositoryPresent) )); let _repo = controldir.create_repository(None).unwrap(); let _branch = controldir.create_branch(Some("foo")).unwrap(); } #[test] fn test_create_workingtree() { crate::init(); let tmp_dir = tempfile::tempdir().unwrap(); let controldir = create( &url::Url::from_directory_path(tmp_dir.path()).unwrap(), &ControlDirFormat::default(), None, ) .unwrap(); controldir.create_repository(None).unwrap(); controldir.create_branch(None).unwrap(); let _wt = controldir.create_workingtree().unwrap(); } #[test] fn test_branch_names() { crate::init(); let tmp_dir = tempfile::tempdir().unwrap(); let controldir = create( &url::Url::from_directory_path(tmp_dir.path()).unwrap(), &ControlDirFormat::default(), None, ) .unwrap(); controldir.create_repository(None).unwrap(); controldir.create_branch(None).unwrap(); assert_eq!(controldir.branch_names().unwrap(), vec!["".to_string()]); } } breezyshim-0.7.5/src/cvs.rs000064400000000000000000000036351046102023000137550ustar 00000000000000//! Support for detecting CVS repositories. //! //! This module provides a prober for detecting CVS repositories, but //! does not provide any support for interacting with them. use pyo3::exceptions::PyModuleNotFoundError; use pyo3::prelude::*; /// A prober for CVS repositories. pub struct CVSProber(Py); impl CVSProber { /// Create a new CVS prober instance. pub fn new() -> Option { Python::attach(|py| { let m = match py.import("breezy.plugins.cvs") { Ok(m) => m, Err(e) => { if e.is_instance_of::(py) { return None; } else { e.print_and_set_sys_last_vars(py); panic!("Failed to import breezy.plugins.cvs"); } } }; let cvsprober = m.getattr("CVSProber").expect("Failed to get CVSProber"); Some(Self(cvsprober.unbind())) }) } } impl<'a, 'py> FromPyObject<'a, 'py> for CVSProber { type Error = PyErr; fn extract(obj: Borrowed<'a, 'py, PyAny>) -> PyResult { Ok(Self(obj.to_owned().unbind())) } } impl<'py> IntoPyObject<'py> for CVSProber { type Target = PyAny; type Output = Bound<'py, Self::Target>; type Error = std::convert::Infallible; fn into_pyobject(self, py: Python<'py>) -> Result { Ok(self.0.into_bound(py)) } } impl std::fmt::Debug for CVSProber { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { f.write_fmt(format_args!("CVSProber({:?})", self.0)) } } impl crate::controldir::PyProber for CVSProber { fn to_object(&self, py: Python) -> Py { self.0.clone_ref(py) } } #[cfg(test)] mod tests { use super::*; #[test] fn test_remote_cvs_prober() { let _ = CVSProber::new(); } } breezyshim-0.7.5/src/darcs.rs000064400000000000000000000036331046102023000142540ustar 00000000000000//! Darcs prober. //! //! This module provides a prober for Darcs repositories. It can detect //! darcs repositories but does not provide any additional functionality. use pyo3::exceptions::PyModuleNotFoundError; use pyo3::prelude::*; /// A prober for Darcs repositories. pub struct DarcsProber(Py); impl DarcsProber { /// Create a new Darcs prober instance. pub fn new() -> Option { Python::attach(|py| { let m = match py.import("breezy.plugins.darcs") { Ok(m) => m, Err(e) => { if e.is_instance_of::(py) { return None; } else { e.print_and_set_sys_last_vars(py); panic!("Failed to import breezy.plugins.darcs"); } } }; let prober = m.getattr("DarcsProber").expect("Failed to get DarcsProber"); Some(Self(prober.unbind())) }) } } impl<'a, 'py> FromPyObject<'a, 'py> for DarcsProber { type Error = PyErr; fn extract(obj: Borrowed<'a, 'py, PyAny>) -> PyResult { Ok(Self(obj.to_owned().unbind())) } } impl<'py> IntoPyObject<'py> for DarcsProber { type Target = PyAny; type Output = Bound<'py, Self::Target>; type Error = std::convert::Infallible; fn into_pyobject(self, py: Python<'py>) -> Result { Ok(self.0.into_bound(py)) } } impl std::fmt::Debug for DarcsProber { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { f.write_fmt(format_args!("DarcsProber({:?})", self.0)) } } impl crate::controldir::PyProber for DarcsProber { fn to_object(&self, py: Python) -> Py { self.0.clone_ref(py) } } #[cfg(test)] mod tests { use super::*; #[test] fn test_new() { let _ = DarcsProber::new(); } } breezyshim-0.7.5/src/debian/apt.rs000064400000000000000000000331311046102023000151620ustar 00000000000000//! APT repository access. use crate::error::Error; use debian_control::apt::{Package, Source}; use debversion::Version; use pyo3::exceptions::{PyModuleNotFoundError, PyStopIteration}; use pyo3::intern; use pyo3::prelude::*; pyo3::import_exception!(breezy.plugins.debian.apt_repo, NoAptSources); lazy_static::lazy_static! { static ref apt_mutex: std::sync::Mutex<()> = std::sync::Mutex::new(()); } struct SourceIterator(Py); impl Iterator for SourceIterator { type Item = Source; fn next(&mut self) -> Option { let _mutex = apt_mutex.lock().unwrap(); Python::attach(|py| { let next = self.0.call_method0(py, "__next__"); match next { Ok(next) => Some(next.extract(py).unwrap()), Err(e) if e.is_instance_of::(py) => None, Err(e) if e.is_instance_of::(py) => None, Err(e) => panic!("error iterating: {:?}", e), } }) } } struct PackageIterator(Py); impl Iterator for PackageIterator { type Item = Package; fn next(&mut self) -> Option { let _mutex = apt_mutex.lock().unwrap(); Python::attach(|py| { let next = self.0.call_method0(py, "__next__"); match next { Ok(next) => Some(next.extract(py).unwrap()), Err(e) if e.is_instance_of::(py) => None, Err(e) => panic!("error iterating: {:?}", e), } }) } } /// Interface for interacting with APT repositories. /// /// This trait defines methods for retrieving packages and other information /// from APT repositories, both local and remote. pub trait Apt { /// Get the underlying Py fn as_pyobject(&self) -> &Py; // Retrieve the orig tarball from the repository. // // # Arguments // * `source_name` - The name of the source package to retrieve. // * `target_directory` - The directory to store the orig tarball in. // * `orig_version` - The version of the orig tarball to retrieve. // // # Returns // * `Ok(())` - If the orig tarball was successfully retrieved. /// Retrieve the orig tarball from the repository. /// /// # Arguments /// * `source_name` - The name of the source package to retrieve /// * `target_directory` - The directory to store the orig tarball in /// * `orig_version` - The version of the orig tarball to retrieve /// /// # Returns /// * `Ok(())` - If the orig tarball was successfully retrieved fn retrieve_orig( &self, source_name: &str, target_directory: &std::path::Path, orig_version: Option<&Version>, ) -> Result<(), Error> { let _mutex = apt_mutex.lock().unwrap(); Python::attach(|py| { let apt = self.as_pyobject(); apt.call_method1( py, "retrieve_orig", ( source_name, target_directory.to_string_lossy().to_string(), orig_version.map(|v| v.to_string()), ), )?; Ok(()) }) } /// Retrieve the source package from the repository. /// /// # Arguments /// * `source_name` - The name of the source package to retrieve. /// * `target_directory` - The directory to store the source package in. /// * `source_version` - The version of the source package to retrieve. /// /// # Returns /// * `Ok(())` - If the source package was successfully retrieved. fn retrieve_source( &self, source_name: &str, target_directory: &std::path::Path, source_version: Option<&Version>, ) -> Result<(), Error> { let _mutex = apt_mutex.lock().unwrap(); Python::attach(|py| { let apt = self.as_pyobject(); apt.call_method1( py, "retrieve_source", ( source_name, target_directory.to_string_lossy().to_string(), source_version.map(|v| v.to_string()), ), )?; Ok(()) }) } /// Retrieve the binary package from the repository. fn iter_sources(&self) -> Box> { let _mutex = apt_mutex.lock().unwrap(); Python::attach(|py| { let apt = self.as_pyobject(); let iter = apt.call_method0(py, "iter_sources").unwrap(); Box::new(SourceIterator(iter)) }) } /// Retrieve the binary package from the repository. fn iter_binaries(&self) -> Box> { let _mutex = apt_mutex.lock().unwrap(); Python::attach(|py| { let apt = self.as_pyobject(); let iter = apt.call_method0(py, "iter_binaries").unwrap(); Box::new(PackageIterator(iter)) }) } /// Retrieve source package by name. fn iter_source_by_name(&self, name: &str) -> Box> { let _mutex = apt_mutex.lock().unwrap(); Python::attach(|py| { let apt = self.as_pyobject(); let iter = apt .call_method1(py, "iter_source_by_name", (name,)) .unwrap(); Box::new(SourceIterator(iter)) }) } /// Retrieve binary package by name. fn iter_binary_by_name(&self, name: &str) -> Box> { let _mutex = apt_mutex.lock().unwrap(); Python::attach(|py| { let apt = self.as_pyobject(); let iter = apt .call_method1(py, "iter_binary_by_name", (name,)) .unwrap(); Box::new(PackageIterator(iter)) }) } } /// Interface to a local APT repository. /// /// This struct provides access to the APT repositories configured on the local system. pub struct LocalApt(Py); impl Apt for LocalApt { fn as_pyobject(&self) -> &Py { &self.0 } } impl<'py> IntoPyObject<'py> for LocalApt { type Target = PyAny; type Output = Bound<'py, PyAny>; type Error = PyErr; fn into_pyobject(self, py: Python<'py>) -> Result { Ok(self.0.clone_ref(py).into_bound(py)) } } impl LocalApt { /// Create a new LocalApt instance. /// /// # Arguments /// * `rootdir` - Optional root directory for the APT configuration /// /// # Returns /// A new LocalApt instance or an error pub fn new(rootdir: Option<&std::path::Path>) -> Result { let _mutex = apt_mutex.lock().unwrap(); Python::attach(|py| { let m = match PyModule::import(py, "breezy.plugins.debian.apt_repo") { Ok(m) => m, Err(e) if e.is_instance_of::(py) => { return Err(Error::DependencyNotPresent( "breezy.plugins.debian".to_string(), "Install the brz-debian plugin".to_string(), )); } Err(e) => return Err(e.into()), }; let apt = m.getattr("LocalApt")?; let apt = apt.call1((rootdir.map(|p| p.to_string_lossy().to_string()),))?; apt.call_method0(intern!(py, "__enter__"))?; Ok(Self(apt.into())) }) } } impl Default for LocalApt { fn default() -> Self { LocalApt::new(None).expect("Failed to create LocalApt instance") } } impl Drop for LocalApt { fn drop(&mut self) { Python::attach(|py| { self.0 .call_method1( py, intern!(py, "__exit__"), (py.None(), py.None(), py.None()), ) .unwrap(); }); } } /// Interface to a remote APT repository. /// /// This struct provides access to APT repositories on remote servers. pub struct RemoteApt(Py); impl<'py> IntoPyObject<'py> for RemoteApt { type Target = PyAny; type Output = Bound<'py, PyAny>; type Error = PyErr; fn into_pyobject(self, py: Python<'py>) -> Result { Ok(self.0.clone_ref(py).into_bound(py)) } } impl RemoteApt { /// Create a new RemoteApt instance. /// /// # Arguments /// * `mirror_uri` - URI of the APT mirror /// * `distribution` - Optional distribution name (e.g., "unstable") /// * `components` - Optional list of components (e.g., "main", "contrib") /// * `key_path` - Optional path to the GPG key file /// /// # Returns /// A new RemoteApt instance or an error pub fn new( mirror_uri: &url::Url, distribution: Option<&str>, components: Option>, key_path: Option<&std::path::Path>, ) -> Result { let _mutex = apt_mutex.lock().unwrap(); Python::attach(|py| { let m = match PyModule::import(py, "breezy.plugins.debian.apt_repo") { Ok(m) => m, Err(e) if e.is_instance_of::(py) => { return Err(Error::DependencyNotPresent( "breezy.plugins.debian".to_string(), "Install the brz-debian plugin".to_string(), )); } Err(e) => return Err(e.into()), }; let apt = m.getattr("RemoteApt")?; let apt = apt.call1(( mirror_uri.as_str(), distribution, components, key_path.map(|p| p.to_string_lossy().to_string()), ))?; apt.call_method0(intern!(py, "__enter__"))?; Ok(Self(apt.into())) }) } /// Create a new RemoteApt instance from an APT sources.list entry string. /// /// # Arguments /// * `text` - Text from a sources.list entry /// * `key_path` - Optional path to the GPG key file /// /// # Returns /// A new RemoteApt instance or an error pub fn from_string(text: &str, key_path: Option<&std::path::Path>) -> Result { let _mutex = apt_mutex.lock().unwrap(); Python::attach(|py| { let m = match PyModule::import(py, "breezy.plugins.debian.apt_repo") { Ok(m) => m, Err(e) if e.is_instance_of::(py) => { return Err(Error::DependencyNotPresent( "breezy.plugins.debian".to_string(), "Install the brz-debian plugin".to_string(), )); } Err(e) => return Err(e.into()), }; let apt = m.getattr("RemoteApt")?; let apt = apt.call_method1( "from_string", (text, key_path.map(|p| p.to_string_lossy().to_string())), )?; apt.call_method0(intern!(py, "__enter__"))?; Ok(Self(apt.into())) }) } } impl Apt for RemoteApt { fn as_pyobject(&self) -> &Py { &self.0 } } impl Drop for RemoteApt { fn drop(&mut self) { Python::attach(|py| { self.0 .call_method1( py, intern!(py, "__exit__"), (py.None(), py.None(), py.None()), ) .unwrap(); }); } } #[cfg(test)] mod tests { use super::*; #[test] fn test_local_apt_retrieve_orig() { let apt = match LocalApt::new(None) { Ok(apt) => apt, Err(Error::DependencyNotPresent(dep, _)) if dep == "breezy.plugins.debian" => { // Skip test if brz-debian plugin is not installed return; } Err(e) => panic!("Unexpected error creating LocalApt: {:?}", e), }; let td = tempfile::tempdir().unwrap(); match apt.retrieve_orig("apt", td.path(), None) { Ok(_) => { // Verify the orig file is there let entries = td.path().read_dir().unwrap().collect::>(); assert_eq!(entries.len(), 1); let entry = entries[0].as_ref().unwrap(); assert!(entry.file_name().to_str().unwrap().starts_with("apt_"),); assert!(entry .file_name() .to_str() .unwrap() .ends_with(".orig.tar.gz"),); } Err(Error::NotImplemented) => { // This is expected, LocalApt does not implement this method } Err(e) => panic!("Unexpected error: {:?}", e), } } #[test] #[ignore] // Sometimes hangs fn test_local_apt() { let apt = match LocalApt::new(None) { Ok(apt) => apt, Err(Error::DependencyNotPresent(dep, _)) if dep == "breezy.plugins.debian" => { // Skip test if brz-debian plugin is not installed return; } Err(e) => panic!("Unexpected error creating LocalApt: {:?}", e), }; let package = apt.iter_binaries().next().unwrap(); assert!(package.name().is_some()); assert!(package.version().is_some()); let mut sources = apt.iter_sources(); if let Some(source) = sources.next() { assert!(source.package().is_some()); let source = apt.iter_source_by_name("dpkg").next().unwrap(); assert_eq!(source.package().unwrap(), "dpkg"); let package = apt.iter_binary_by_name("dpkg").next().unwrap(); assert_eq!(package.name().unwrap(), "dpkg"); } } } breezyshim-0.7.5/src/debian/debcommit.rs000064400000000000000000000243111046102023000163410ustar 00000000000000use crate::branch::Branch; use crate::commit::PyCommitReporter; use crate::debian::error::Error; use crate::debian::{suite_to_distribution, Vendor}; use crate::error::Error as BrzError; use crate::tree::{Kind, Path, PyTree, Tree}; use crate::workingtree::PyWorkingTree; use crate::workingtree::WorkingTree; use crate::RevisionId; use debian_changelog::ChangeLog; /// Create a release commit and tag for a Debian package. /// /// This function creates a commit for a Debian package release, and tags /// it with an appropriate tag based on the package name and version. /// /// # Arguments /// * `tree` - The working tree containing the package /// * `committer` - Optional committer identity /// * `subpath` - Optional subpath to the package within the tree /// * `message` - Optional commit message (default: "releasing package X version Y") /// * `vendor` - Optional vendor (default: inferred from changelog) /// /// # Returns /// The name of the created tag, or an error pub fn debcommit_release( tree: &dyn PyWorkingTree, committer: Option<&str>, subpath: Option<&std::path::Path>, message: Option<&str>, vendor: Option, ) -> Result { let subpath = subpath.unwrap_or_else(|| std::path::Path::new("")); let cl_path = subpath.join("debian/changelog"); let (message, vendor) = if let (Some(message), Some(vendor)) = (message, vendor) { (message.to_string(), vendor) } else { let f = tree.get_file(&cl_path)?; let cl = ChangeLog::read(f)?; let entry = cl.iter().next().unwrap(); let message = if let Some(message) = message { message.to_string() } else { format!( "releasing package {} version {}", entry.package().unwrap(), entry.version().unwrap() ) }; let vendor = vendor.unwrap_or_else(|| { suite_to_distribution( entry .distributions() .as_ref() .and_then(|d| d.first()) .unwrap(), ) .unwrap() }); (message, vendor) }; let tag_name = if let Ok(tag_name) = crate::debian::tree_debian_tag_name(tree, &tree.branch(), Some(subpath), Some(vendor)) { tag_name } else { return Err(Error::UnreleasedChanges); }; let mut builder = tree.build_commit().message(&message); if let Some(committer) = committer { builder = builder.committer(committer); } let revid = builder.commit()?; tree.branch().tags().unwrap().set_tag(&tag_name, &revid)?; Ok(tag_name) } /// Find changes in a changelog file between two trees. /// /// # Arguments /// * `tree` - The current tree /// * `basis_tree` - The basis tree to compare against /// * `cl_path` - Path to the changelog file /// /// # Returns /// An optional list of changes, or an error pub fn changelog_changes( tree: &dyn PyTree, basis_tree: &dyn PyTree, cl_path: &Path, ) -> Result>, BrzError> { let mut changes = vec![]; for change in tree.iter_changes(basis_tree, Some(&[cl_path]), None, None)? { let change = change?; let paths = change.path; let changed_content = change.changed_content; let versioned = change.versioned; let kind = change.kind; // Content not changed if !changed_content { return Ok(None); } // Not versioned in new tree if !versioned.1.unwrap_or(false) { return Ok(None); } // Not a file in one tree if kind.0 != Some(Kind::File) || kind.1 != Some(Kind::File) { return Ok(None); } let old_text = basis_tree.get_file_lines(&paths.0.unwrap())?; let new_text = tree.get_file_lines(&paths.1.unwrap())?; changes.extend(new_changelog_entries(&old_text, &new_text)); } Ok(Some(changes)) } /// Strip a changelog message like debcommit does. /// /// Takes a list of changes from a changelog entry and applies a transformation /// so the message is well formatted for a commit message. /// /// # Arguments /// * `changes` - a list of lines from the changelog entry /// /// # Returns /// another list of lines with blank lines stripped from the start /// and the spaces the start of the lines split if there is only one /// logical entry. pub fn strip_changelog_message(changes: &[&str]) -> Vec { if changes.is_empty() { return vec![]; } let mut changes = changes.to_vec(); while changes.last() == Some(&"") { changes.pop(); } while changes.first() == Some(&"") { changes.remove(0); } let changes = changes .into_iter() .map(|l| lazy_regex::regex_replace!(r" |\t", l, |_| "")) .collect::>(); let leader_re = lazy_regex::regex!(r"^[ \t]*[*+-] "); let leader_changes = changes .iter() .filter(|line| leader_re.is_match(line)) .collect::>(); if leader_changes.len() == 1 { changes .iter() .map(|line| leader_re.replace(line, "").trim_start().to_string()) .collect() } else { changes.into_iter().map(|l| l.to_string()).collect() } } /// Create a commit message based on changes in a changelog file. /// /// # Arguments /// * `tree` - The current tree /// * `basis_tree` - The basis tree to compare against /// * `path` - Path to the changelog file /// /// # Returns /// A commit message string based on the changelog changes, or an error pub fn changelog_commit_message( tree: &dyn PyTree, basis_tree: &dyn PyTree, path: &Path, ) -> Result { let changes = changelog_changes(tree, basis_tree, path)?; let changes = changes.unwrap_or_default(); Ok(strip_changelog_message( changes .iter() .map(|s| s.as_str()) .collect::>() .as_slice(), ) .concat()) } /// Create a git commit with message based on the new entries in changelog. /// /// # Arguments /// * `tree` - Tree to commit in /// * `committer` - Optional committer identity /// * `subpath` - subpath to commit in /// * `paths` - specifics paths to commit, if any /// * `reporter` - CommitReporter to use /// /// # Returns /// Created revision id pub fn debcommit( tree: &dyn PyWorkingTree, committer: Option<&str>, subpath: &Path, paths: Option<&[&Path]>, reporter: Option<&dyn PyCommitReporter>, message: Option<&str>, ) -> Result { let message = message.map_or_else( || { changelog_commit_message( tree, &tree.basis_tree().unwrap(), &subpath.join("debian/changelog"), ) .unwrap() }, |m| m.to_string(), ); let specific_files = if let Some(paths) = paths { Some(paths.iter().map(|p| subpath.join(p)).collect()) } else if !subpath.to_str().unwrap().is_empty() { Some(vec![subpath.to_path_buf()]) } else { None }; let mut builder = tree.build_commit().message(&message); if let Some(reporter) = reporter { builder = builder.reporter(reporter); } if let Some(committer) = committer { builder = builder.committer(committer); } if let Some(specific_files) = specific_files { builder = builder.specific_files( specific_files .iter() .map(|p| p.as_path()) .collect::>() .as_slice(), ); } builder.commit() } /// Extract new entries from a Debian changelog. /// /// This function compares the old and new versions of a changelog file /// and extracts the newly added entries. /// /// # Arguments /// * `old_text` - The old version of the changelog as byte lines /// * `new_text` - The new version of the changelog as byte lines /// /// # Returns /// A vector of strings containing the new changelog entries pub fn new_changelog_entries(old_text: &[Vec], new_text: &[Vec]) -> Vec { let mut sm = difflib::sequencematcher::SequenceMatcher::new(old_text, new_text); let mut changes = vec![]; for group in sm.get_grouped_opcodes(0) { let (j1, j2) = (group[0].second_start, group.last().unwrap().second_end); for line in new_text[j1..j2].iter() { if line.starts_with(b" ") { // Debian Policy Manual states that debian/changelog must be UTF-8 changes.push(String::from_utf8_lossy(line).to_string()); } } } changes } #[cfg(test)] mod tests { use super::*; mod strip_changelog_message { use super::*; #[test] fn test_empty() { assert_eq!(strip_changelog_message(&[]), Vec::::new()); } #[test] fn test_empty_changes() { assert_eq!(strip_changelog_message(&[""]), Vec::::new()); } #[test] fn test_removes_leading_whitespace() { assert_eq!( strip_changelog_message(&["foo", " bar", "\tbaz", " bang"]), vec!["foo", "bar", "baz", " bang"], ); } #[test] fn test_removes_star_if_one() { assert_eq!(strip_changelog_message(&[" * foo"]), ["foo"]); assert_eq!(strip_changelog_message(&["\t* foo"]), ["foo"]); assert_eq!(strip_changelog_message(&[" + foo"]), ["foo"]); assert_eq!(strip_changelog_message(&[" - foo"]), ["foo"]); assert_eq!(strip_changelog_message(&[" * foo"]), ["foo"]); assert_eq!( strip_changelog_message(&[" * foo", " bar"]), ["foo", "bar"] ); } #[test] fn test_leaves_start_if_multiple() { assert_eq!( strip_changelog_message(&[" * foo", " * bar"]), ["* foo", "* bar"] ); assert_eq!( strip_changelog_message(&[" * foo", " + bar"]), ["* foo", "+ bar"] ); assert_eq!( strip_changelog_message(&[" * foo", " bar", " * baz"]), ["* foo", "bar", "* baz"], ); } } } breezyshim-0.7.5/src/debian/directory.rs000064400000000000000000000010111046102023000163720ustar 00000000000000use pyo3::prelude::*; /// Convert a Git URL from a Debian VCS field to a Breezy URL. /// /// # Arguments /// * `url` - The Git URL to convert /// /// # Returns /// The converted URL pub fn vcs_git_url_to_bzr_url(url: &str) -> url::Url { Python::attach(|py| { let m = py.import("breezy.plugins.debian.directory").unwrap(); m.call_method1("vcs_git_url_to_bzr_url", (url,)) .unwrap() .extract::() .unwrap() .parse() .unwrap() }) } breezyshim-0.7.5/src/debian/error.rs000064400000000000000000000153511046102023000155330ustar 00000000000000use crate::error::Error as BrzError; use debversion::Version; use pyo3::import_exception; use pyo3::prelude::*; pyo3::import_exception!(breezy.plugins.debian.builder, BuildFailedError); import_exception!(breezy.plugins.debian.import_dsc, UpstreamAlreadyImported); import_exception!(breezy.plugins.debian.upstream.branch, DistCommandfailed); import_exception!(breezy.plugins.debian.upstream, PackageVersionNotPresent); import_exception!(breezy.plugins.debian.upstream, MissingUpstreamTarball); import_exception!(breezy.plugins.debian.changelog, UnreleasedChanges); import_exception!(breezy.plugins.debian.import_dsc, VersionAlreadyImported); /// Errors that can occur in Debian-specific operations. #[derive(Debug)] pub enum Error { /// An error from the underlying Breezy library. BrzError(BrzError), /// A package build failed. BuildFailed, /// An upstream version has already been imported. UpstreamAlreadyImported(String), /// A specific version of a package has already been imported. VersionAlreadyImported { /// The name of the package. package: String, /// The version that was already imported. version: Version, /// The tag name that was used for the import. tag_name: String, }, /// A distribution command failed. DistCommandFailed(String), /// A package version wasn't found in the repository. PackageVersionNotPresent { /// The name of the package. package: String, /// The version that wasn't found. version: String, }, /// An upstream tarball is missing. MissingUpstreamTarball { /// The name of the package. package: String, /// The version of the missing tarball. version: String, }, /// There are unreleased changes in the package. UnreleasedChanges, /// An error occurred when parsing the changelog. ChangeLogError(debian_changelog::Error), } impl std::fmt::Display for Error { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { match self { Error::BrzError(err) => write!(f, "{}", err), Error::BuildFailed => write!(f, "Build failed"), Error::UpstreamAlreadyImported(version) => { write!(f, "Upstream version {} already imported", version) } Error::DistCommandFailed(err) => write!(f, "Dist command failed: {}", err), Error::PackageVersionNotPresent { package, version } => { write!(f, "Package {} version {} not present", package, version) } Error::MissingUpstreamTarball { package, version } => { write!( f, "Missing upstream tarball for {} version {}", package, version ) } Error::UnreleasedChanges => write!(f, "Unreleased changes"), Error::ChangeLogError(err) => write!(f, "{}", err), Error::VersionAlreadyImported { package, version, tag_name, } => { write!( f, "Version {} of package {} already imported with tag {}", version, package, tag_name ) } } } } impl std::error::Error for Error {} impl From for Error { fn from(err: BrzError) -> Error { Error::BrzError(err) } } impl From for Error { fn from(err: debian_changelog::Error) -> Error { Error::ChangeLogError(err) } } impl From for Error { fn from(err: PyErr) -> Error { Python::attach(|py| { let brz_error: BrzError = err.into(); if let BrzError::Other(ref err) = brz_error { if err.is_instance_of::(py) { let v = err.value(py); Error::UpstreamAlreadyImported(v.getattr("version").unwrap().extract().unwrap()) } else if err.is_instance_of::(py) { let v = err.value(py); Error::VersionAlreadyImported { package: v.getattr("package").unwrap().extract().unwrap(), version: v.getattr("version").unwrap().extract().unwrap(), tag_name: v.getattr("tag_name").unwrap().extract().unwrap(), } } else if err.is_instance_of::(py) { let v = err.value(py); Error::DistCommandFailed(v.getattr("error").unwrap().extract().unwrap()) } else if err.is_instance_of::(py) { let v = err.value(py); Error::PackageVersionNotPresent { package: v.getattr("package").unwrap().extract().unwrap(), version: v.getattr("version").unwrap().extract().unwrap(), } } else if err.is_instance_of::(py) { let v = err.value(py); Error::MissingUpstreamTarball { package: v.getattr("package").unwrap().extract().unwrap(), version: v.getattr("version").unwrap().extract().unwrap(), } } else if err.is_instance_of::(py) { Error::BuildFailed } else if err.is_instance_of::(py) { Error::UnreleasedChanges } else { Error::BrzError(brz_error) } } else { Error::BrzError(brz_error) } }) } } impl From for PyErr { fn from(err: Error) -> PyErr { match err { Error::BrzError(err) => err.into(), Error::BuildFailed => BuildFailedError::new_err(("Build failed",)), Error::UpstreamAlreadyImported(version) => UpstreamAlreadyImported::new_err((version,)), Error::DistCommandFailed(err) => DistCommandfailed::new_err((err,)), Error::PackageVersionNotPresent { package, version } => { PackageVersionNotPresent::new_err((package, version)) } Error::MissingUpstreamTarball { package, version } => { MissingUpstreamTarball::new_err((package, version)) } Error::UnreleasedChanges => UnreleasedChanges::new_err(()), Error::ChangeLogError(_err) => todo!(), Error::VersionAlreadyImported { package, version, tag_name, } => VersionAlreadyImported::new_err((package, version, tag_name)), } } } breezyshim-0.7.5/src/debian/import_dsc.rs000064400000000000000000000167331046102023000165520ustar 00000000000000use crate::branch::GenericBranch; use crate::debian::TarballKind; use crate::workingtree::GenericWorkingTree; use crate::{ branch::{Branch, PyBranch}, tree::PyTree, RevisionId, }; use pyo3::prelude::*; use std::{collections::HashMap, path::Path, path::PathBuf}; /// A set of distribution branches for Debian package imports. /// /// This struct represents a collection of distribution branches that can be /// used when importing Debian source packages. pub struct DistributionBranchSet(Py); impl<'py> IntoPyObject<'py> for DistributionBranchSet { type Target = PyAny; type Output = Bound<'py, PyAny>; type Error = PyErr; fn into_pyobject(self, py: Python<'py>) -> Result { Ok(self.0.clone_ref(py).into_bound(py)) } } impl DistributionBranchSet { /// Create a new DistributionBranchSet instance. pub fn new() -> Self { Python::attach(|py| { let m = py.import("breezy.plugins.debian.import_dsc").unwrap(); let ctr = m.getattr("DistributionBranchSet").unwrap(); DistributionBranchSet(ctr.call0().unwrap().into()) }) } /// Add a distribution branch to this set. /// /// # Arguments /// * `branch` - The branch to add to the set pub fn add_branch(&self, branch: &DistributionBranch) { Python::attach(|py| { self.0.call_method1(py, "add_branch", (&branch.0,)).unwrap(); }) } } /// A branch representing a Debian distribution. /// /// This struct represents a branch used for importing Debian source packages /// into version control. pub struct DistributionBranch(Py); impl<'py> IntoPyObject<'py> for DistributionBranch { type Target = PyAny; type Output = Bound<'py, PyAny>; type Error = PyErr; fn into_pyobject(self, py: Python<'py>) -> Result { Ok(self.0.clone_ref(py).into_bound(py)) } } impl DistributionBranch { /// Create a new DistributionBranch instance. /// /// # Arguments /// * `branch` - Main branch for the distribution /// * `pristine_upstream_branch` - Branch containing pristine upstream sources /// * `tree` - Optional tree for the distribution branch /// * `pristine_upstream_tree` - Optional tree for the pristine upstream branch /// /// # Returns /// A new DistributionBranch instance pub fn new( branch: &dyn PyBranch, pristine_upstream_branch: &dyn PyBranch, tree: Option<&dyn PyTree>, pristine_upstream_tree: Option<&dyn PyTree>, ) -> Self { Python::attach(|py| { let m = py.import("breezy.plugins.debian.import_dsc").unwrap(); let ctr = m.getattr("DistributionBranch").unwrap(); DistributionBranch( ctr.call1(( branch.to_object(py), pristine_upstream_branch.to_object(py), tree.map(|t| t.to_object(py)), pristine_upstream_tree.map(|t| t.to_object(py)), )) .unwrap() .into(), ) }) } /// Get the revision ID corresponding to a specific version. /// /// # Arguments /// * `version` - The Debian package version /// /// # Returns /// The revision ID corresponding to the version, or an error pub fn revid_of_version( &self, version: &debversion::Version, ) -> Result { Ok(Python::attach(|py| -> PyResult { self.0 .call_method1(py, "revid_of_version", (version.to_string(),))? .extract::(py) })?) } /// Import a Debian source package (.dsc file) into the distribution branch. /// /// # Arguments /// * `dsc_path` - Path to the .dsc file to import /// * `apply_patches` - Whether to apply patches during import /// /// # Returns /// The version string of the imported package, or an error pub fn import_package( &self, dsc_path: &Path, apply_patches: bool, ) -> Result { Ok(Python::attach(|py| -> PyResult { self.0 .call_method1( py, "import_package", (dsc_path.to_string_lossy().to_string(), apply_patches), )? .extract::(py) })?) } /// Get the working tree associated with this distribution branch. /// /// # Returns /// The working tree, if available pub fn tree(&self) -> Option { Python::attach(|py| -> PyResult> { let tree = self .0 .getattr(py, "tree")? .extract::>>(py)?; if tree.is_none() { return Ok(None); } Ok(Some(GenericWorkingTree::from(tree.unwrap()))) }) .unwrap() } /// Get the branch associated with this distribution branch. /// /// # Returns /// The branch object pub fn branch(&self) -> Box { Python::attach(|py| -> PyResult> { Ok(Box::new(GenericBranch::from(self.0.getattr(py, "branch")?))) }) .unwrap() } /// Get the pristine-tar source associated with this distribution branch. /// /// # Returns /// The pristine-tar source for accessing upstream tarballs pub fn pristine_upstream_source(&self) -> crate::debian::upstream::PristineTarSource { Python::attach( |py| -> PyResult { Ok(crate::debian::upstream::PristineTarSource::from( self.0.getattr(py, "pristine_upstream_source")?, )) }, ) .unwrap() } /// Create an empty upstream tree in the specified directory. /// /// # Arguments /// * `basedir` - Directory in which to create the empty tree /// /// # Returns /// Ok(()) on success, or an error pub fn create_empty_upstream_tree( &self, basedir: &Path, ) -> Result<(), crate::debian::error::Error> { Python::attach(|py| -> PyResult<()> { self.0 .call_method1(py, "create_empty_upstream_tree", (basedir,))?; Ok(()) })?; Ok(()) } /// Extract upstream trees from their revisions into a directory. /// /// # Arguments /// * `upstream_tips` - Mapping from tarball kinds to revision IDs and paths /// * `basedir` - Directory in which to extract the upstream tree /// /// # Returns /// Ok(()) on success, or an error pub fn extract_upstream_tree( &self, upstream_tips: &HashMap, basedir: &Path, ) -> Result<(), crate::debian::error::Error> { Ok(Python::attach(|py| -> PyResult<()> { self.0.call_method1( py, "extract_upstream_tree", ( { let dict = pyo3::types::PyDict::new(py); for (k, (r, p)) in upstream_tips { dict.set_item(k.clone(), (r.clone(), p.clone()))?; } dict }, basedir, ), )?; Ok(()) })?) } } breezyshim-0.7.5/src/debian/merge_upstream.rs000064400000000000000000000106771046102023000174270ustar 00000000000000use crate::branch::PyBranch; use crate::debian::error::Error; use crate::debian::upstream::PyUpstreamSource; use crate::debian::TarballKind; use crate::tree::PyTree; use crate::workingtree::PyWorkingTree; use crate::RevisionId; use pyo3::prelude::*; use pyo3::types::PyDict; use std::collections::HashMap; use std::path::{Path, PathBuf}; /// Import new tarballs. /// /// # Arguments /// * `tree` - Working tree to operate in /// * `subpath` - Subpath to operate in /// * `tarball_filenames` - List of tarball filenames as tuples with (path, component) /// * `package` - Package name /// * `version` - New upstream version to merge /// * `current_version` - Current upstream version in tree /// * `upstream_branch` - Optional upstream branch to merge from /// * `upstream_revisions` - Dictionary mapping versions to upstream revisions /// * `merge_type` - Merge type /// * `committer` - Committer string to use /// * `files_excluded` - Files to exclude /// /// # Returns /// List with (component, tag, revid, pristine_tar_imported, subpath) tuples pub fn do_import( tree: &dyn PyWorkingTree, subpath: &Path, tarball_filenames: &[&Path], package: &str, version: &str, current_version: Option<&str>, upstream_branch: &dyn PyBranch, upstream_revisions: HashMap, merge_type: Option<&str>, force: bool, force_pristine_tar: bool, committer: Option<&str>, files_excluded: Option<&[&Path]>, ) -> Result, PathBuf)>, Error> { Python::attach(|py| { let m = PyModule::import(py, "breezy.plugins.debian.merge_upstream").unwrap(); let do_import = m.getattr("do_import").unwrap(); let kwargs = PyDict::new(py); kwargs.set_item("tree", tree.to_object(py))?; kwargs.set_item("subpath", subpath.to_string_lossy().to_string())?; kwargs.set_item("tarball_filenames", tarball_filenames.to_vec())?; kwargs.set_item("package", package)?; kwargs.set_item("version", version)?; kwargs.set_item("current_version", current_version)?; kwargs.set_item("upstream_branch", upstream_branch.to_object(py))?; kwargs.set_item("upstream_revisions", upstream_revisions)?; kwargs.set_item("merge_type", merge_type)?; kwargs.set_item("force", force)?; kwargs.set_item("force_pristine_tar", force_pristine_tar)?; kwargs.set_item("committer", committer)?; kwargs.set_item("files_excluded", files_excluded)?; Ok(do_import.call((), Some(&kwargs))?.extract()?) }) } /// Find tarballs for a specific package and version. /// /// # Arguments /// * `orig_dir` - Directory containing orig tarballs /// * `tree` - The working tree /// * `package` - Package name /// * `version` - Version string /// * `locations` - List of additional locations to search for tarballs /// /// # Returns /// A list of paths to found tarballs, or an error pub fn get_tarballs( orig_dir: &Path, tree: &dyn PyTree, package: &str, version: &str, locations: &[&Path], ) -> Result, Error> { Python::attach(|py| { let m = PyModule::import(py, "breezy.plugins.debian.merge_upstream").unwrap(); let get_tarballs = m.getattr("get_tarballs").unwrap(); Ok(get_tarballs .call1(( orig_dir, tree.to_object(py), package, version, locations.to_vec(), ))? .extract()?) }) } /// Get revision IDs for already imported upstream versions. /// /// # Arguments /// * `upstream_source` - The upstream source to check /// * `package` - Package name /// * `new_upstream_version` - The new upstream version being imported /// /// # Returns /// A list of tuples with component information: (kind, version, revid, pristine_tar_imported, path) pub fn get_existing_imported_upstream_revids( upstream_source: &T, package: &str, new_upstream_version: &str, ) -> Result, PathBuf)>, Error> { Python::attach(|py| { let m = PyModule::import(py, "breezy.plugins.debian.merge_upstream").unwrap(); let get_existing_imported_upstream_revids = m.getattr("get_existing_imported_upstream_revids").unwrap(); Ok(get_existing_imported_upstream_revids .call1((upstream_source.as_pyobject(), package, new_upstream_version))? .extract()?) }) } breezyshim-0.7.5/src/debian/mod.rs000064400000000000000000000243231046102023000151600ustar 00000000000000//! Debian specific functionality. //! //! This module provides functionality for working with Debian packages. //! //! It mostly wraps the `breezy.plugins.debian` module from the Breezy VCS. pub mod apt; /// Module for working with Debian commit messages and tagging. pub mod debcommit; /// Module for working with Debian directory structures. pub mod directory; /// Module defining errors specific to Debian functionality. pub mod error; /// Module for importing Debian source packages (dsc files). pub mod import_dsc; /// Module for merging upstream changes into Debian packages. pub mod merge_upstream; pub mod release; /// Module for working with upstream sources in Debian packages. pub mod upstream; /// Module for checking if a Debian package in version control is up to date with the archive. pub mod vcs_up_to_date; /// Default directory for building Debian packages. pub const DEFAULT_BUILD_DIR: &str = "../build-area"; /// Default directory for orig tarballs. pub const DEFAULT_ORIG_DIR: &str = ".."; /// Default directory for build results. pub const DEFAULT_RESULT_DIR: &str = ".."; use crate::branch::PyBranch; use crate::debian::error::Error as DebianError; use crate::error::Error; use crate::tree::PyTree; use crate::workingtree::PyWorkingTree; use std::collections::HashMap; use std::path::PathBuf; use pyo3::exceptions::PyValueError; use pyo3::prelude::*; use pyo3::types::PyDict; /// Represents different Debian-based distributions/vendors. /// /// This enum is used to differentiate between various Debian-based /// distributions when working with packages. #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub enum Vendor { /// The Debian distribution. Debian, /// The Ubuntu distribution. Ubuntu, /// The Kali Linux distribution. Kali, } impl std::fmt::Display for Vendor { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { match self { Vendor::Debian => write!(f, "debian"), Vendor::Ubuntu => write!(f, "ubuntu"), Vendor::Kali => write!(f, "kali"), } } } impl std::str::FromStr for Vendor { type Err = String; fn from_str(s: &str) -> Result { match s { "debian" => Ok(Vendor::Debian), "ubuntu" => Ok(Vendor::Ubuntu), "kali" => Ok(Vendor::Kali), _ => Err(format!("Invalid vendor: {}", s)), } } } impl<'a, 'py> FromPyObject<'a, 'py> for Vendor { type Error = PyErr; fn extract(ob: Borrowed<'a, 'py, PyAny>) -> PyResult { let vendor = ob.extract::()?; match vendor.as_str() { "debian" => Ok(Vendor::Debian), "ubuntu" => Ok(Vendor::Ubuntu), "kali" => Ok(Vendor::Kali), _ => Err(PyValueError::new_err((format!( "Invalid vendor: {}", vendor ),))), } } } /// Kinds of upstream version handling. /// /// This enum represents the different ways an upstream version can be handled, /// particularly when determining version numbers for packages. #[derive(Debug, Clone, PartialEq, Eq, std::hash::Hash, Default)] pub enum VersionKind { /// Automatically determine the kind of version. #[default] Auto, /// Use snapshot versioning (typically includes a revision identifier). Snapshot, /// Use release versioning (clean version without revision identifiers). Release, } impl std::str::FromStr for VersionKind { type Err = String; fn from_str(s: &str) -> Result { match s { "auto" => Ok(VersionKind::Auto), "snapshot" => Ok(VersionKind::Snapshot), "release" => Ok(VersionKind::Release), _ => Err(format!("Invalid version kind: {}", s)), } } } impl<'py> IntoPyObject<'py> for VersionKind { type Target = PyAny; type Output = Bound<'py, PyAny>; type Error = PyErr; fn into_pyobject(self, py: Python<'py>) -> Result { let s = match self { VersionKind::Auto => "auto", VersionKind::Snapshot => "snapshot", VersionKind::Release => "release", }; Ok(s.into_pyobject(py)?.into_any()) } } impl std::fmt::Display for VersionKind { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { match self { VersionKind::Auto => write!(f, "auto"), VersionKind::Snapshot => write!(f, "snapshot"), VersionKind::Release => write!(f, "release"), } } } impl<'a, 'py> FromPyObject<'a, 'py> for VersionKind { type Error = PyErr; fn extract(ob: Borrowed<'a, 'py, PyAny>) -> PyResult { let kind = ob.extract::()?; match kind.as_str() { "auto" => Ok(VersionKind::Auto), "snapshot" => Ok(VersionKind::Snapshot), "release" => Ok(VersionKind::Release), _ => Err(PyValueError::new_err((format!( "Invalid version kind: {}", kind ),))), } } } /// Kind of tarball in a Debian source package. /// /// Debian source packages can include multiple tarballs: the main orig tarball /// and additional component tarballs. This enum represents those types. #[derive(Debug, Clone, PartialEq, Eq, std::hash::Hash)] pub enum TarballKind { /// The main original upstream tarball. Orig, /// An additional component tarball with the specified component name. Additional(String), } impl serde::ser::Serialize for TarballKind { fn serialize(&self, serializer: S) -> Result { match self { TarballKind::Orig => serializer.serialize_none(), TarballKind::Additional(kind) => serializer.serialize_some(kind), } } } impl<'a> serde::de::Deserialize<'a> for TarballKind { fn deserialize>(deserializer: D) -> Result { let kind = Option::::deserialize(deserializer)?; Ok(kind.into()) } } impl From> for TarballKind { fn from(kind: Option) -> Self { match kind { Some(kind) => TarballKind::Additional(kind), None => TarballKind::Orig, } } } impl From for Option { fn from(kind: TarballKind) -> Self { match kind { TarballKind::Orig => None, TarballKind::Additional(kind) => Some(kind), } } } impl<'a, 'py> FromPyObject<'a, 'py> for TarballKind { type Error = PyErr; fn extract(ob: Borrowed<'a, 'py, PyAny>) -> PyResult { let kind = ob.extract::>()?; Ok(kind.into()) } } impl<'py> IntoPyObject<'py> for TarballKind { type Target = PyAny; type Output = Bound<'py, PyAny>; type Error = PyErr; fn into_pyobject(self, py: Python<'py>) -> Result { let o: Option = self.into(); Ok(o.into_pyobject(py)?.into_any()) } } /// Helper function to build a Debian package. /// /// # Arguments /// * `local_tree` - The working tree containing the Debian package /// * `subpath` - Path to the debian directory within the tree /// * `branch` - Branch containing the package /// * `target_dir` - Directory to store build results /// * `builder` - Name of the build tool to use /// * `guess_upstream_branch_url` - Whether to guess the upstream branch URL /// * `apt_repo` - Optional APT repository to use /// /// # Returns /// A map of result file types to their paths, or an error pub fn build_helper( local_tree: &dyn PyWorkingTree, subpath: &std::path::Path, branch: &dyn PyBranch, target_dir: &std::path::Path, builder: &str, guess_upstream_branch_url: bool, apt_repo: Option<&dyn apt::Apt>, ) -> Result, DebianError> { pyo3::prepare_freethreaded_python(); Python::attach(|py| -> PyResult> { let locals = PyDict::new(py); locals.set_item("local_tree", local_tree.to_object(py))?; locals.set_item("subpath", subpath.to_string_lossy().to_string())?; locals.set_item("branch", branch.to_object(py))?; locals.set_item("target_dir", target_dir.to_string_lossy().to_string())?; locals.set_item("builder", builder)?; locals.set_item("guess_upstream_branch_url", guess_upstream_branch_url)?; if let Some(apt_repo) = apt_repo { locals.set_item("apt", apt_repo.as_pyobject())?; } py.import("breezy.plugins.debian.cmds")? .call_method1("_build_helper", (locals,))? .extract() }) .map_err(DebianError::from) } /// Return the name of the debian tag for the given tree and branch. /// /// # Arguments /// * `tree` - The tree to get the debian tag name for. /// * `branch` - The branch to get the debian tag name for. /// * `subpath` - The subpath to get the debian tag name for. /// * `vendor` - The vendor to get the debian tag name for. /// /// # Returns /// The name of the debian tag. pub fn tree_debian_tag_name( tree: &dyn PyTree, branch: &dyn PyBranch, subpath: Option<&std::path::Path>, vendor: Option, ) -> Result { Python::attach(|py| { let result = py.import("breezy.plugins.debian")?.call_method1( "tree_debian_tag_name", ( tree.to_object(py), branch.to_object(py), subpath, vendor.map(|v| v.to_string()), ), )?; Ok(result.extract()?) }) } // TODO(jelmer): deduplicate this with the suite_to_distribution function // in debian-analyzer /// Infer the distribution from a suite. /// /// When passed the name of a suite (anything in the distributions field of /// a changelog) it will infer the distribution from that (i.e. Debian or /// Ubuntu). /// /// # Arguments /// * `suite`: the string containing the suite /// /// # Returns /// Vendor or None if the distribution cannot be inferred. pub fn suite_to_distribution(suite: &str) -> Option { Python::attach(|py| -> PyResult> { let result = py .import("breezy.plugins.debian.util")? .call_method1("suite_to_distribution", (suite,))?; result.extract() }) .unwrap() } breezyshim-0.7.5/src/debian/release.rs000064400000000000000000000035271046102023000160240ustar 00000000000000//! Debian package releasing use crate::error::Error; use crate::tree::PyMutableTree; use pyo3::prelude::*; /// Errors that can occur when releasing a Debian package. #[derive(Debug)] pub enum ReleaseError { /// The file was generated and shouldn't be modified directly. GeneratedFile, /// An error from the underlying Breezy library. BrzError(Error), } impl From for ReleaseError { fn from(err: Error) -> Self { ReleaseError::BrzError(err) } } impl std::fmt::Display for ReleaseError { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { match self { ReleaseError::GeneratedFile => write!(f, "Generated file"), ReleaseError::BrzError(err) => write!(f, "{}", err), } } } impl std::error::Error for ReleaseError {} /// Release a Debian package by updating the changelog. /// /// This function updates the changelog to mark the package as released, /// setting the appropriate fields like the release date. /// /// # Arguments /// * `local_tree` - The tree containing the package to release /// * `subpath` - Path to the debian directory within the tree /// /// # Returns /// The version string of the released package, or an error pub fn release( local_tree: &dyn PyMutableTree, subpath: &std::path::Path, ) -> Result { pyo3::import_exception!(debmutate.reformatting, GeneratedFile); Python::attach(|py| { let m = py.import("breezy.plugins.debian.release").unwrap(); let release = m.getattr("release").unwrap(); match release.call1((local_tree.to_object(py), subpath)) { Ok(result) => Ok(result.extract().unwrap()), Err(err) if err.is_instance_of::(py) => Err(ReleaseError::GeneratedFile), Err(err) => Err(ReleaseError::BrzError(err.into())), } }) } breezyshim-0.7.5/src/debian/upstream.rs000064400000000000000000000424221046102023000162410ustar 00000000000000use crate::branch::PyBranch; use crate::controldir::PyControlDir; use crate::debian::error::Error; use crate::debian::TarballKind; use crate::debian::VersionKind; use crate::tree::PyTree; use crate::RevisionId; use debversion::Version; use pyo3::prelude::*; use pyo3::types::{PyCFunction, PyDict, PyTuple}; use std::collections::HashMap; use std::ffi::OsString; use std::path::{Path, PathBuf}; /// Source for pristine tarballs. /// /// This struct represents a source for pristine tarballs stored /// in a pristine-tar branch. pub struct PristineTarSource(Py); impl From> for PristineTarSource { fn from(obj: Py) -> Self { PristineTarSource(obj) } } impl<'py> IntoPyObject<'py> for PristineTarSource { type Target = PyAny; type Output = Bound<'py, PyAny>; type Error = PyErr; fn into_pyobject(self, py: Python<'py>) -> Result { Ok(self.0.clone_ref(py).into_bound(py)) } } /// A source for upstream versions (uscan, debian/rules, etc). pub struct UpstreamBranchSource(Py); impl From> for UpstreamBranchSource { fn from(obj: Py) -> Self { UpstreamBranchSource(obj) } } impl<'py> IntoPyObject<'py> for UpstreamBranchSource { type Target = PyAny; type Output = Bound<'py, PyAny>; type Error = PyErr; fn into_pyobject(self, py: Python<'py>) -> Result { Ok(self.0.clone_ref(py).into_bound(py)) } } /// Information about a tarball file. /// /// This struct contains metadata about a tarball file, including its /// filename, component kind, and MD5 hash. pub struct Tarball { /// The filename of the tarball. pub filename: String, /// The kind of component this tarball represents. pub component: TarballKind, /// The MD5 hash of the tarball. pub md5: String, } /// A collection of tarballs. pub type Tarballs = Vec; impl<'a, 'py> FromPyObject<'a, 'py> for Tarball { type Error = PyErr; fn extract(ob: Borrowed<'a, 'py, PyAny>) -> PyResult { Ok(Tarball { filename: ob.get_item(0)?.extract()?, component: ob.get_item(1)?.extract()?, md5: ob.get_item(2)?.extract()?, }) } } impl<'py> IntoPyObject<'py> for Tarball { type Target = PyAny; type Output = Bound<'py, PyAny>; type Error = PyErr; fn into_pyobject(self, py: Python<'py>) -> Result { let tuple = (self.filename, self.component, self.md5); Ok(tuple.into_pyobject(py)?.into_any()) } } /// Trait for Python-based upstream sources. /// /// This trait is implemented by wrappers around Python upstream source objects. pub trait PyUpstreamSource: std::any::Any + std::fmt::Debug { /// Get the underlying Py fn as_pyobject(&self) -> &Py; } /// Trait for upstream sources. /// /// This trait defines the interface for working with upstream sources, /// which provide access to upstream versions of packages. pub trait UpstreamSource: std::fmt::Debug { /// Check what the latest upstream version is. /// /// # Arguments /// * `package` - Name of the package /// * `version` - The current upstream version of the package. /// /// # Returns /// A tuple of the latest upstream version and the mangled version. fn get_latest_version( &self, package: Option<&str>, current_version: Option<&str>, ) -> Result, Error>; /// Retrieve recent version strings. /// /// # Arguments /// * `package`: Name of the package /// * `version`: Last upstream version since which to retrieve versions fn get_recent_versions( &self, package: Option<&str>, since_version: Option<&str>, ) -> Box>; /// Lookup the revision ids for a particular version. /// /// # Arguments /// * `package` - Package name /// * `version` - Version string /// /// # Returns /// A dictionary mapping component names to revision ids fn version_as_revisions( &self, package: Option<&str>, version: &str, tarballs: Option, ) -> Result, Error>; /// Check whether this upstream source contains a particular package. /// /// # Arguments /// * `package` - Package name /// * `version` - Version string /// * `tarballs` - Tarballs list fn has_version( &self, package: Option<&str>, version: &str, tarballs: Option, ) -> Result; /// Fetch the source tarball for a particular version. /// /// # Arguments /// * `package` - Name of the package /// * `version` - Version string of the version to fetch /// * `target_dir` - Directory in which to store the tarball /// * `components` - List of component names to fetch; may be None, /// /// # Returns /// Paths of the fetched tarballs fn fetch_tarballs( &self, package: Option<&str>, version: &str, target_dir: &Path, components: Option<&[TarballKind]>, ) -> Result, Error>; } impl UpstreamSource for T { fn get_latest_version( &self, package: Option<&str>, current_version: Option<&str>, ) -> Result, Error> { Python::attach(|py| { Ok(self .as_pyobject() .call_method1(py, "get_latest_version", (package, current_version))? .extract(py)?) }) } fn get_recent_versions( &self, package: Option<&str>, since_version: Option<&str>, ) -> Box> { let mut ret = vec![]; Python::attach(|py| -> PyResult<()> { let recent_versions = self.as_pyobject().call_method1( py, "get_recent_versions", (package, since_version), )?; while let Ok(Some((version, mangled_version))) = recent_versions.call_method0(py, "__next__")?.extract(py) { ret.push((version, mangled_version)); } Ok(()) }) .unwrap(); Box::new(ret.into_iter()) } fn version_as_revisions( &self, package: Option<&str>, version: &str, tarballs: Option, ) -> Result, Error> { Python::attach(|py| { Ok(self .as_pyobject() .call_method1(py, "version_as_revisions", (package, version, tarballs))? .extract(py)?) }) } fn has_version( &self, package: Option<&str>, version: &str, tarballs: Option, ) -> Result { Python::attach(|py| { Ok(self .as_pyobject() .call_method1(py, "has_version", (package, version, tarballs))? .extract(py)?) }) } fn fetch_tarballs( &self, package: Option<&str>, version: &str, target_dir: &Path, components: Option<&[TarballKind]>, ) -> Result, Error> { Python::attach(|py| { Ok(self .as_pyobject() .call_method1( py, "fetch_tarballs", (package, version, target_dir, components.map(|x| x.to_vec())), )? .extract(py)?) }) } } /// A generic wrapper around any Python upstream source object. /// /// This struct provides a way to interact with any upstream source /// from Python code, regardless of its specific implementation. pub struct GenericUpstreamSource(Py); impl<'py> IntoPyObject<'py> for GenericUpstreamSource { type Target = PyAny; type Output = Bound<'py, PyAny>; type Error = PyErr; fn into_pyobject(self, py: Python<'py>) -> Result { Ok(self.0.into_pyobject(py)?.into_any()) } } impl<'a, 'py> FromPyObject<'a, 'py> for GenericUpstreamSource { type Error = PyErr; fn extract(obj: Borrowed<'a, 'py, PyAny>) -> PyResult { Ok(GenericUpstreamSource(obj.to_owned().unbind())) } } impl PyUpstreamSource for GenericUpstreamSource { fn as_pyobject(&self) -> &Py { &self.0 } } impl GenericUpstreamSource { /// Create a new generic upstream source from a Python object. /// /// # Arguments /// * `obj` - The Python object representing an upstream source. pub fn new(obj: Py) -> Self { Self(obj) } } impl std::fmt::Debug for GenericUpstreamSource { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { f.write_fmt(format_args!("GenericUpstreamSource({:?})", self.0)) } } impl PyUpstreamSource for UpstreamBranchSource { fn as_pyobject(&self) -> &Py { &self.0 } } impl std::fmt::Debug for UpstreamBranchSource { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_struct("UpstreamBranchSource").finish() } } impl UpstreamBranchSource { /// Get the upstream branch associated with this source. /// /// # Returns /// A branch object representing the upstream branch. pub fn upstream_branch(&self) -> Box { let o = Python::attach(|py| self.as_pyobject().getattr(py, "upstream_branch").unwrap()); Box::new(crate::branch::GenericBranch::from(o)) } /// Get a revision tree for a specific upstream version. /// /// # Arguments /// * `source_name` - Optional name of the source package /// * `mangled_upstream_version` - The mangled version string of the upstream version /// /// # Returns /// A revision tree object or an error pub fn revision_tree( &self, source_name: Option<&str>, mangled_upstream_version: &str, ) -> Result { Python::attach(|py| { Ok(crate::tree::RevisionTree(self.as_pyobject().call_method1( py, "revision_tree", (source_name, mangled_upstream_version), )?)) }) } /// Get the revision ID for a specific upstream version. /// /// # Arguments /// * `package` - Optional name of the source package /// * `version` - Version string of the upstream version /// * `tarballs` - Optional list of tarballs /// /// # Returns /// A tuple containing the revision ID and path, or an error pub fn version_as_revision( &self, package: Option<&str>, version: &str, tarballs: Option, ) -> Result<(RevisionId, PathBuf), Error> { Python::attach(|py| { Ok(self .as_pyobject() .call_method1(py, "version_as_revision", (package, version, tarballs))? .extract(py)?) }) } /// Create an upstream branch source from a branch. /// /// # Arguments /// * `upstream_branch` - The upstream branch to use /// * `version_kind` - Optional kind of version to use /// * `local_dir` - The local control directory /// * `create_dist` - Optional function to create a distribution /// /// # Returns /// A new upstream branch source or an error pub fn from_branch( upstream_branch: &dyn PyBranch, version_kind: Option, local_dir: &dyn PyControlDir, create_dist: Option< impl Fn(&dyn PyTree, &str, &str, &Path, &Path) -> Result + Send + Sync + 'static, >, ) -> Result { Python::attach(|py| { let m = py.import("breezy.plugins.debian.upstream.branch").unwrap(); let cls = m.getattr("UpstreamBranchSource").unwrap(); let upstream_branch = upstream_branch.to_object(py); let kwargs = PyDict::new(py); kwargs.set_item("version_kind", version_kind.unwrap_or_default())?; kwargs.set_item("local_dir", local_dir.to_object(py))?; if let Some(create_dist) = create_dist { let create_dist = move |args: &Bound<'_, PyTuple>, _kwargs: Option<&Bound<'_, PyDict>>| -> PyResult<_> { let args = args.extract::<(Py, String, String, PathBuf, PathBuf)>()?; create_dist( &crate::tree::RevisionTree(args.0), &args.1, &args.2, &args.3, &args.4, ) .map(|x| x.to_string_lossy().into_owned()) .map_err(|e| e.into()) }; let create_dist = PyCFunction::new_closure(py, None, None, create_dist)?; kwargs.set_item("create_dist", create_dist)?; } Ok(UpstreamBranchSource( cls.call_method("from_branch", (upstream_branch,), Some(&kwargs))? .into(), )) }) } } impl PyUpstreamSource for PristineTarSource { fn as_pyobject(&self) -> &Py { &self.0 } } impl std::fmt::Debug for PristineTarSource { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { f.write_fmt(format_args!("PristineTarSource({:?})", self.0)) } } impl PristineTarSource { /// Check whether this upstream source contains a particular package. /// /// # Arguments /// * `package` - Package name /// * `version` - Version string /// * `tarballs` - Tarballs list pub fn has_version( &self, package: Option<&str>, version: &str, tarballs: Option, try_hard: bool, ) -> Result { Python::attach(|py| { Ok(self .as_pyobject() .call_method1(py, "has_version", (package, version, tarballs, try_hard))? .extract(py)?) }) } } /// Update the revision in a upstream version string. /// /// # Arguments /// * `branch` - Branch in which the revision can be found /// * `version_string` - Original version string /// * `revid` - Revision id of the revision /// * `sep` - Separator to use when adding snapshot pub fn upstream_version_add_revision( upstream_branch: &dyn PyBranch, version_string: &str, revid: &RevisionId, sep: Option<&str>, ) -> Result { let sep = sep.unwrap_or("+"); Python::attach(|py| { let m = py.import("breezy.plugins.debian.upstream.branch").unwrap(); let upstream_version_add_revision = m.getattr("upstream_version_add_revision").unwrap(); Ok(upstream_version_add_revision .call_method1( "upstream_version_add_revision", ( upstream_branch.to_object(py), version_string, revid.clone(), sep, ), )? .extract()?) }) } /// Get a pristine-tar source for a packaging branch. /// /// # Arguments /// * `packaging_tree` - The packaging tree /// * `packaging_branch` - The packaging branch /// /// # Returns /// A pristine-tar source or an error pub fn get_pristine_tar_source( packaging_tree: &dyn PyTree, packaging_branch: &dyn PyBranch, ) -> Result { Python::attach(|py| { let m = py.import("breezy.plugins.debian.upstream").unwrap(); let cls = m.getattr("get_pristine_tar_source").unwrap(); Ok(PristineTarSource( cls.call1((packaging_tree.to_object(py), packaging_branch.to_object(py)))? .into(), )) }) } /// Run a distribution command to create a source tarball. /// /// # Arguments /// * `revtree` - The revision tree to run the command in /// * `package` - Optional name of the package /// * `version` - Version of the package /// * `target_dir` - Directory to store the result in /// * `dist_command` - Command to run to create the distribution /// * `include_controldir` - Whether to include the control directory /// * `subpath` - Subpath within the tree /// /// # Returns /// Whether the command succeeded or an error pub fn run_dist_command( revtree: &dyn PyTree, package: Option<&str>, version: &Version, target_dir: &Path, dist_command: &str, include_controldir: bool, subpath: &Path, ) -> Result { Python::attach(|py| { let m = py.import("breezy.plugins.debian.upstream").unwrap(); let run_dist_command = m.getattr("run_dist_command").unwrap(); let kwargs = PyDict::new(py); kwargs.set_item("revtree", revtree.to_object(py))?; kwargs.set_item("package", package)?; kwargs.set_item("version", version.to_string())?; kwargs.set_item("target_dir", target_dir.to_string_lossy().to_string())?; kwargs.set_item("dist_command", dist_command)?; kwargs.set_item("include_controldir", include_controldir)?; kwargs.set_item("subpath", subpath.to_string_lossy().to_string())?; Ok(run_dist_command.call((), Some(&kwargs))?.extract()?) }) } breezyshim-0.7.5/src/debian/vcs_up_to_date.rs000064400000000000000000000070341046102023000173770ustar 00000000000000use crate::debian::apt::Apt; use crate::tree::PyTree; use debversion::Version; use pyo3::prelude::*; /// Status of a Debian package in version control compared to the archive. #[derive(PartialEq, Eq)] pub enum UpToDateStatus { /// The package in version control is up to date with the archive. UpToDate, /// The package is missing a changelog file. MissingChangelog, /// The package does not exist in the archive. PackageMissingInArchive { /// The name of the package that is missing. package: String, }, /// The version in the tree does not exist in the archive. TreeVersionNotInArchive { /// The version found in the tree. tree_version: Version, /// The versions available in the archive. archive_versions: Vec, }, /// There's a newer version in the archive than in the tree. NewArchiveVersion { /// The newest version in the archive. archive_version: Version, /// The version in the tree. tree_version: Version, }, } /// Check if a Debian package in version control is up to date with the archive. /// /// # Arguments /// * `tree` - The tree containing the Debian package /// * `subpath` - The path to the Debian directory in the tree /// * `apt` - The APT interface to use for checking archive versions /// /// # Returns /// The status of the package compared to the archive pub fn check_up_to_date( tree: &dyn PyTree, subpath: &std::path::Path, apt: &impl Apt, ) -> PyResult { use pyo3::import_exception; import_exception!(breezy.plugins.debian.vcs_up_to_date, MissingChangelogError); import_exception!( breezy.plugins.debian.vcs_up_to_date, PackageMissingInArchive ); import_exception!( breezy.plugins.debian.vcs_up_to_date, TreeVersionNotInArchive ); import_exception!(breezy.plugins.debian.vcs_up_to_date, NewArchiveVersion); Python::attach(|py| { let m = py.import("breezy.plugins.debian.vcs_up_to_date")?; let check_up_to_date = m.getattr("check_up_to_date")?; match check_up_to_date.call1(( tree.to_object(py), subpath.to_string_lossy().to_string(), apt.as_pyobject(), )) { Err(e) if e.is_instance_of::(py) => { Ok(UpToDateStatus::MissingChangelog) } Err(e) if e.is_instance_of::(py) => { Ok(UpToDateStatus::PackageMissingInArchive { package: e.into_value(py).getattr(py, "package")?.extract(py)?, }) } Err(e) if e.is_instance_of::(py) => { let value = e.into_value(py); Ok(UpToDateStatus::TreeVersionNotInArchive { tree_version: value.getattr(py, "tree_version")?.extract(py)?, archive_versions: value .getattr(py, "archive_versions")? .extract::>(py)?, }) } Err(e) if e.is_instance_of::(py) => { let value = e.into_value(py); Ok(UpToDateStatus::NewArchiveVersion { archive_version: value.getattr(py, "archive_version")?.extract(py)?, tree_version: value.getattr(py, "tree_version")?.extract(py)?, }) } Ok(_o) => Ok(UpToDateStatus::UpToDate), Err(e) => Err(e), } }) } breezyshim-0.7.5/src/delta.rs000064400000000000000000000077751046102023000142640ustar 00000000000000//! Detection of changes between trees. use crate::osutils::is_inside_any; use crate::tree::TreeChange; use pyo3::prelude::*; /// Describes changes from one tree to another. /// /// Contains seven lists with TreeChange objects. /// /// added /// removed /// renamed /// copied /// kind_changed /// modified /// unchanged /// unversioned /// /// Each id is listed only once. /// /// Files that are both modified and renamed or copied are listed only in /// renamed or copied, with the text_modified flag true. The text_modified /// applies either to the content of the file or the target of the /// symbolic link, depending of the kind of file. /// /// Files are only considered renamed if their name has changed or /// their parent directory has changed. Renaming a directory /// does not count as renaming all its contents. /// /// The lists are normally sorted when the delta is created. #[derive(Debug, Clone, PartialEq, Eq)] pub struct TreeDelta { /// Files that were added between the trees. pub added: Vec, /// Files that were removed between the trees. pub removed: Vec, /// Files that were renamed between the trees. pub renamed: Vec, /// Files that were copied between the trees. pub copied: Vec, /// Files that changed kind between the trees. pub kind_changed: Vec, /// Files that were modified between the trees. pub modified: Vec, /// Files that were unchanged between the trees. pub unchanged: Vec, /// Files that are unversioned in the trees. pub unversioned: Vec, /// Files that are missing in the trees. pub missing: Vec, } impl TreeDelta { /// Check if there are any changes in this delta. pub fn has_changed(&self) -> bool { !self.added.is_empty() || !self.removed.is_empty() || !self.renamed.is_empty() || !self.copied.is_empty() || !self.kind_changed.is_empty() || !self.modified.is_empty() } } impl<'a, 'py> FromPyObject<'a, 'py> for TreeDelta { type Error = PyErr; fn extract(ob: Borrowed<'a, 'py, PyAny>) -> PyResult { let added = ob.getattr("added")?.extract()?; let removed = ob.getattr("removed")?.extract()?; let renamed = ob.getattr("renamed")?.extract()?; let copied = ob.getattr("copied")?.extract()?; let kind_changed = ob.getattr("kind_changed")?.extract()?; let modified = ob.getattr("modified")?.extract()?; let unchanged = ob.getattr("unchanged")?.extract()?; let unversioned = ob.getattr("unversioned")?.extract()?; let missing = ob.getattr("missing")?.extract()?; Ok(TreeDelta { added, removed, renamed, copied, kind_changed, modified, unchanged, unversioned, missing, }) } } /// Filter out excluded paths from a list of tree changes. /// /// This function filters out tree changes that are in excluded paths. /// /// # Arguments /// * `iter_changes` - Iterator of tree changes /// * `exclude` - List of paths to exclude /// /// # Returns /// Iterator of tree changes that aren't in excluded paths pub fn filter_excluded<'a>( iter_changes: impl Iterator + 'a, exclude: &'a [&'a std::path::Path], ) -> impl Iterator + 'a { iter_changes.filter(|change| { let new_excluded = if let Some(p) = change.path.1.as_ref() { is_inside_any(exclude, p.as_path()) } else { false }; let old_excluded = if let Some(p) = change.path.0.as_ref() { is_inside_any(exclude, p.as_path()) } else { false }; if old_excluded && new_excluded { false } else if old_excluded || new_excluded { // TODO(jelmer): Perhaps raise an error here instead? false } else { true } }) } breezyshim-0.7.5/src/diff.rs000064400000000000000000000056471046102023000140770ustar 00000000000000//! Generation of unified diffs between trees. use pyo3::prelude::*; use pyo3::types::PyDict; use std::io::Write; /// Generate a unified diff between two trees and write it to the provided writer. /// /// # Arguments /// * `tree1` - First tree to compare /// * `tree2` - Second tree to compare /// * `w` - Writer to write the diff to /// * `old_label` - Optional label for the old tree /// * `new_label` - Optional label for the new tree /// /// # Returns /// Result with empty success value or error pub fn show_diff_trees( tree1: &dyn crate::tree::PyTree, tree2: &dyn crate::tree::PyTree, mut w: impl Write, old_label: Option<&str>, new_label: Option<&str>, ) -> Result<(), crate::error::Error> { Python::attach(|py| -> PyResult<()> { let m = py.import("breezy.diff")?; let f = m.getattr("show_diff_trees")?; let o = py.import("io")?.call_method0("BytesIO")?; let kwargs = PyDict::new(py); if let Some(old_label) = old_label { kwargs.set_item("old_label", old_label)?; } if let Some(new_label) = new_label { kwargs.set_item("new_label", new_label)?; } f.call( (tree1.to_object(py), tree2.to_object(py), &o), Some(&kwargs), )?; let s = o.call_method0("getvalue")?.extract::>()?; w.write_all(&s)?; Ok(()) })?; Ok(()) } #[cfg(test)] mod tests { use super::*; use crate::controldir::create_standalone_workingtree; use crate::workingtree::WorkingTree; use std::io::Cursor; #[test] fn test_show_diff_trees_empty() { crate::init(); let tmp_dir = tempfile::tempdir().unwrap(); let wt = create_standalone_workingtree(tmp_dir.path(), "2a").unwrap(); let tree1 = wt.basis_tree().unwrap(); let tree2 = wt.basis_tree().unwrap(); let mut output = Vec::new(); let result = show_diff_trees(&tree1, &tree2, &mut output, None, None); assert!(result.is_ok()); } #[test] fn test_show_diff_trees_with_labels() { crate::init(); let tmp_dir = tempfile::tempdir().unwrap(); let wt = create_standalone_workingtree(tmp_dir.path(), "2a").unwrap(); let tree1 = wt.basis_tree().unwrap(); let tree2 = wt.basis_tree().unwrap(); let mut output = Vec::new(); let result = show_diff_trees(&tree1, &tree2, &mut output, Some("old"), Some("new")); assert!(result.is_ok()); } #[test] fn test_show_diff_trees_cursor() { crate::init(); let tmp_dir = tempfile::tempdir().unwrap(); let wt = create_standalone_workingtree(tmp_dir.path(), "2a").unwrap(); let tree1 = wt.basis_tree().unwrap(); let tree2 = wt.basis_tree().unwrap(); let mut cursor = Cursor::new(Vec::new()); let result = show_diff_trees(&tree1, &tree2, &mut cursor, None, None); assert!(result.is_ok()); } } breezyshim-0.7.5/src/dirty_tracker.rs000064400000000000000000000147011046102023000160240ustar 00000000000000//! Fast and efficient detection of files that have been modified in a directory tree. use crate::workingtree::{GenericWorkingTree, WorkingTree}; use dirty_tracker::DirtyTracker; pub use dirty_tracker::State; /// Tracks changes in a working tree. /// /// This struct provides functionality to efficiently detect which files have been /// modified in a working tree. pub struct DirtyTreeTracker { tracker: DirtyTracker, tree: GenericWorkingTree, base: std::path::PathBuf, } impl DirtyTreeTracker { /// Create a new DirtyTreeTracker for the given WorkingTree. pub fn new(tree: GenericWorkingTree) -> Self { let base = tree.basedir(); let tracker = DirtyTracker::new(&base).unwrap(); Self { tracker, tree, base, } } /// Create a new DirtyTreeTracker for a specific subpath within a WorkingTree. /// /// This allows tracking changes only within a specific subdirectory of the working tree. pub fn new_in_subpath(tree: GenericWorkingTree, subpath: &std::path::Path) -> Self { let base = tree.basedir(); let tracker = DirtyTracker::new(&base.join(subpath)).unwrap(); Self { tracker, tree, base, } } /// Get the current state. pub fn state(&mut self) -> State { let relpaths = self.relpaths(); if relpaths.is_none() { return State::Unknown; } if relpaths.unwrap().into_iter().next().is_some() { State::Dirty } else { State::Clean } } /// Get the relative paths of the dirty files. pub fn relpaths(&mut self) -> Option> { self.tracker.paths().map(|ps| { ps.iter() .map(|p| p.strip_prefix(&self.base).unwrap()) .filter(|p| !self.tree.is_control_filename(p)) .map(|p| p.to_path_buf()) .collect() }) } /// Get the absolute paths of the dirty files. pub fn paths(&mut self) -> Option> { self.relpaths() .map(|ps| ps.iter().map(|p| self.tree.abspath(p).unwrap()).collect()) } /// Mark the tree as clean. pub fn mark_clean(&mut self) { self.tracker.mark_clean() } } #[cfg(test)] mod tests { use super::*; use crate::controldir::create_standalone_workingtree; use crate::controldir::ControlDirFormat; #[test] fn test_unchanged_tree() { let td = tempfile::tempdir().unwrap(); let tree = create_standalone_workingtree(td.path(), &ControlDirFormat::default()).unwrap(); let mut tracker = DirtyTreeTracker::new(tree); assert_eq!(tracker.state(), State::Clean); assert_eq!(tracker.relpaths(), Some(std::collections::HashSet::new())); assert_eq!(tracker.paths(), Some(std::collections::HashSet::new())); } #[test] fn test_unversioned_file() { let td = tempfile::tempdir().unwrap(); let tree = create_standalone_workingtree(td.path(), &ControlDirFormat::default()).unwrap(); let mut tracker = DirtyTreeTracker::new(tree); std::fs::write(td.path().join("foo"), "bar").unwrap(); assert_eq!( tracker.relpaths(), Some(maplit::hashset! { std::path::PathBuf::from("foo") }) ); assert_eq!( tracker.paths(), Some(maplit::hashset! { td.path().join("foo") }) ); assert_eq!(tracker.state(), State::Dirty); } #[test] fn test_control_file_change() { let td = tempfile::tempdir().unwrap(); let tree = create_standalone_workingtree(td.path(), &ControlDirFormat::default()).unwrap(); let mut tracker = DirtyTreeTracker::new(Clone::clone(&tree)); tree.build_commit() .message("Dummy") .committer("Joe Example ") .allow_pointless(true) .commit() .unwrap(); assert_eq!(tracker.relpaths(), Some(std::collections::HashSet::new())); assert_eq!(tracker.state(), State::Clean); assert_eq!(tracker.paths(), Some(std::collections::HashSet::new())); } } breezyshim-0.7.5/src/error.rs000064400000000000000000001604771046102023000143230ustar 00000000000000//! Error handling for the Breezy Python bindings use crate::transform::RawConflict; use pyo3::import_exception; use pyo3::intern; use pyo3::prelude::*; use pyo3::PyErr; use url::Url; import_exception!(breezy.errors, UnknownFormatError); import_exception!(breezy.errors, NotBranchError); import_exception!(breezy.errors, ReadOnlyError); import_exception!(breezy.controldir, NoColocatedBranchSupport); import_exception!(breezy.errors, DependencyNotPresent); import_exception!(breezy.errors, PermissionDenied); import_exception!(breezy.transport, UnsupportedProtocol); import_exception!(breezy.transport, UnusableRedirect); import_exception!(breezy.urlutils, InvalidURL); import_exception!(breezy.errors, TransportError); import_exception!(breezy.errors, UnsupportedFormatError); import_exception!(breezy.errors, UnsupportedVcs); import_exception!(breezy.git.remote, RemoteGitError); import_exception!(breezy.git.remote, ProtectedBranchHookDeclined); import_exception!(http.client, IncompleteRead); import_exception!(breezy.bzr, LineEndingError); import_exception!(breezy.errors, InvalidHttpResponse); import_exception!(breezy.errors, AlreadyControlDirError); import_exception!(breezy.errors, AlreadyBranchError); import_exception!(breezy.errors, DivergedBranches); import_exception!(breezy.workspace, WorkspaceDirty); import_exception!(breezy.transport, NoSuchFile); import_exception!(breezy.commit, PointlessCommit); import_exception!(breezy.errors, NoWhoami); import_exception!(breezy.errors, NoSuchTag); import_exception!(breezy.errors, TagAlreadyExists); import_exception!(breezy.forge, ForgeLoginRequired); import_exception!(breezy.forge, UnsupportedForge); import_exception!(breezy.forge, MergeProposalExists); import_exception!(breezy.errors, UnsupportedOperation); import_exception!(breezy.errors, NoRepositoryPresent); import_exception!(breezy.errors, LockFailed); import_exception!(breezy.errors, LockContention); import_exception!(breezy.transport, FileExists); import_exception!(breezy.errors, NoSuchRevisionInTree); import_exception!(breezy.tree, MissingNestedTree); import_exception!(breezy.transform, ImmortalLimbo); import_exception!(breezy.transform, MalformedTransform); import_exception!(breezy.transform, TransformRenameFailed); import_exception!(breezy.errors, UnexpectedHttpStatus); import_exception!(breezy.errors, BadHttpRequest); import_exception!(breezy.errors, TransportNotPossible); import_exception!(breezy.errors, IncompatibleFormat); import_exception!(breezy.errors, NoSuchRevision); import_exception!(breezy.errors, RevisionNotPresent); import_exception!(breezy.forge, NoSuchProject); import_exception!(breezy.errors, ObjectNotLocked); import_exception!(breezy.plugins.gitlab.forge, ForkingDisabled); import_exception!(breezy.plugins.gitlab.forge, GitLabConflict); import_exception!(breezy.plugins.gitlab.forge, ProjectCreationTimeout); import_exception!(breezy.forge, SourceNotDerivedFromTarget); import_exception!(breezy.controldir, BranchReferenceLoop); import_exception!(breezy.errors, RedirectRequested); import_exception!(breezy.errors, ConflictsInTree); import_exception!(breezy.errors, NoRoundtrippingSupport); import_exception!(breezy.inter, NoCompatibleInter); lazy_static::lazy_static! { /// Static reference to the BreezyConnectionError class, if it exists. /// /// This is only present in Breezy versions before 4.0. pub static ref BreezyConnectionError: Option> = { Python::attach(|py| { let m = py.import("breezy.errors").unwrap(); m.getattr("ConnectionError").ok().map(|x| x.unbind()) }) }; } /// Error type for the Breezy Rust wrapper. /// /// This enum represents the various errors that can occur when using the Breezy /// API through this Rust wrapper. Each variant corresponds to a specific error /// condition, with many mapping directly to Python exceptions from the Breezy /// library. #[derive(Debug)] pub enum Error { /// A Python error that doesn't map to a specific variant. Other(PyErr), /// An unknown format was encountered. UnknownFormat(String), /// The specified path is not a branch. NotBranchError(String, Option), /// The repository format doesn't support colocated branches. NoColocatedBranchSupport, /// A required dependency is not present. DependencyNotPresent(String, String), /// Permission was denied when accessing a path. PermissionDenied(std::path::PathBuf, Option), /// The specified protocol is not supported. UnsupportedProtocol(String, Option), /// A redirect could not be followed. UnusableRedirect(String, String, String), /// A connection error occurred. ConnectionError(String), /// The specified URL is invalid. InvalidURL(String, Option), /// An error occurred during transport. TransportError(String), /// The specified format is not supported. UnsupportedFormat(String), /// The specified version control system is not supported. UnsupportedVcs(String), /// An error occurred when interacting with a remote Git repository. RemoteGitError(String), /// A read operation did not complete (partial read). IncompleteRead(Vec, Option), /// An error occurred with line endings in a file. LineEndingError(String), /// An invalid HTTP response was received. InvalidHttpResponse( String, String, Option, std::collections::HashMap, ), /// A control directory already exists at the specified path. AlreadyControlDir(std::path::PathBuf), /// A branch already exists at the specified path. AlreadyBranch(std::path::PathBuf), /// The branches have diverged. DivergedBranches, /// The workspace has uncommitted changes. WorkspaceDirty(std::path::PathBuf), /// The specified file does not exist. NoSuchFile(std::path::PathBuf), /// The commit would not change anything. PointlessCommit, /// No user identity has been configured. NoWhoami, /// The specified tag does not exist. NoSuchTag(String), /// The specified tag already exists. TagAlreadyExists(String), /// A socket error occurred. Socket(std::io::Error), /// Login to the forge is required. ForgeLoginRequired, /// The specified forge is not supported. UnsupportedForge(url::Url), /// A project already exists on the forge. ForgeProjectExists(String), /// A merge proposal already exists. MergeProposalExists(url::Url, Option), /// The operation is not supported. UnsupportedOperation(String, String), /// A protected branch hook declined the push. ProtectedBranchHookDeclined(String), /// No repository is present at the specified location. NoRepositoryPresent, /// Failed to acquire a lock. LockFailed(String), /// A file already exists at the specified path. FileExists(std::path::PathBuf, Option), /// Lock contention occurred. LockContention(String, String), /// The requested operation is not implemented. NotImplemented, /// The specified revision is not in the tree. NoSuchRevisionInTree(crate::RevisionId), /// A nested tree is missing at the specified path. MissingNestedTree(std::path::PathBuf), /// Failed to delete transform temporary directory. ImmortalLimbo(std::path::PathBuf), /// The transform is malformed. MalformedTransform(Vec), /// Failed to rename a file during a transform operation. TransformRenameFailed( std::path::PathBuf, std::path::PathBuf, String, std::io::Error, ), /// An unexpected HTTP status code was received. UnexpectedHttpStatus { /// The URL that was requested. url: url::Url, /// The HTTP status code that was received. code: u16, /// Additional information about the error. extra: Option, /// The HTTP headers that were received. headers: std::collections::HashMap, }, /// A timeout occurred. Timeout, /// A bad HTTP request was made. BadHttpRequest(Url, String), /// The transport is not possible. TransportNotPossible(String), /// The format is not compatible. IncompatibleFormat(String, String), /// The specified revision does not exist. NoSuchRevision(crate::RevisionId), /// The specified revision is not present in the repository. RevisionNotPresent(crate::RevisionId), /// The specified project does not exist. NoSuchProject(String), /// Forking is disabled for the specified project. ForkingDisabled(String), /// The project creation timed out. ProjectCreationTimeout(String, chrono::Duration), /// A conflict occurred in GitLab. GitLabConflict(String), /// The source branch is not derived from the target branch. SourceNotDerivedFromTarget, /// A loop was detected in branch references. BranchReferenceLoop, /// A redirect was requested. RedirectRequested { /// The source URL. source: url::Url, /// The target URL. target: url::Url, /// Whether the redirect is permanent. is_permanent: bool, }, /// There are conflicts in the tree. ConflictsInTree, /// The operation does not support roundtripping. NoRoundtrippingSupport, /// No compatible inter-repository implementation was found. NoCompatibleInter, /// The resource is read-only. ReadOnly, /// An object that needs to be locked is not locked. ObjectNotLocked(String), } impl From for Error { fn from(e: url::ParseError) -> Self { Error::InvalidURL(e.to_string(), None) } } impl std::fmt::Display for Error { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { match self { Self::Other(e) => write!(f, "Error::Other({})", e), Self::UnknownFormat(s) => write!(f, "Unknown format: {}", s), Self::NotBranchError(path, detail) => { if let Some(detail) = detail { write!(f, "Not a branch: {}: {}", path, detail) } else { write!(f, "Not a branch: {}", path) } } Self::NoColocatedBranchSupport => write!(f, "No colocated branch support"), Self::DependencyNotPresent(d, r) => write!(f, "Dependency {} not present: {}", d, r), Self::PermissionDenied(p, r) => { if let Some(r) = r { write!(f, "Permission denied: {}: {}", p.display(), r) } else { write!(f, "Permission denied: {}", p.display()) } } Self::UnsupportedProtocol(p, r) => { if let Some(r) = r { write!(f, "Unsupported protocol: {}: {}", p, r) } else { write!(f, "Unsupported protocol: {}", p) } } Self::UnusableRedirect(p, r, u) => { write!(f, "Unusable redirect: {}: {} -> {}", p, r, u) } Self::ConnectionError(e) => write!(f, "Connection error: {}", e), Self::InvalidURL(p, r) => { if let Some(r) = r { write!(f, "Invalid URL: {}: {}", p, r) } else { write!(f, "Invalid URL: {}", p) } } Self::TransportError(e) => write!(f, "Transport error: {}", e), Self::UnsupportedFormat(s) => write!(f, "Unsupported format: {}", s), Self::UnsupportedVcs(s) => write!(f, "Unsupported VCS: {}", s), Self::RemoteGitError(e) => write!(f, "Remote Git error: {}", e), Self::IncompleteRead(partial, expected) => { write!(f, "Incomplete read: {:?} {:?}", partial, expected) } Self::LineEndingError(e) => write!(f, "Line ending error: {}", e), Self::InvalidHttpResponse(s, c, b, _hs) => { if let Some(b) = b { write!(f, "Invalid HTTP response: {} {}: {}", s, c, b) } else { write!(f, "Invalid HTTP response: {} {}", s, c) } } Self::AlreadyControlDir(p) => write!(f, "Already exists: {}", p.display()), Self::AlreadyBranch(p) => write!(f, "Already a branch: {}", p.display()), Self::DivergedBranches => write!(f, "Diverged branches"), Self::WorkspaceDirty(p) => write!(f, "Workspace dirty at {}", p.display()), Self::NoSuchFile(p) => write!(f, "No such file: {}", p.to_string_lossy()), Self::PointlessCommit => write!(f, "Pointless commit"), Self::NoWhoami => write!(f, "No whoami"), Self::NoSuchTag(tag) => write!(f, "No such tag: {}", tag), Self::TagAlreadyExists(tag) => write!(f, "Tag already exists: {}", tag), Self::Socket(e) => write!(f, "socket error: {}", e), Self::ForgeLoginRequired => write!(f, "Forge login required"), Self::UnsupportedForge(url) => write!(f, "Unsupported forge: {}", url), Self::ForgeProjectExists(p) => write!(f, "Forge project exists: {}", p), Self::MergeProposalExists(p, r) => { if let Some(r) = r { write!(f, "Merge proposal exists: {} -> {}", p, r) } else { write!(f, "Merge proposal exists: {}", p) } } Self::UnsupportedOperation(a, b) => write!(f, "Unsupported operation: {} on {}", a, b), Self::ProtectedBranchHookDeclined(e) => { write!(f, "Protected branch hook declined: {}", e) } Self::NoRepositoryPresent => write!(f, "No repository present"), Self::LockFailed(w) => write!(f, "Lock failed: {}", w), Self::FileExists(p, r) => { if let Some(r) = r { write!(f, "File exists: {}: {}", p.display(), r) } else { write!(f, "File exists: {}", p.display()) } } Self::LockContention(a, b) => write!(f, "Lock contention: {} {}", a, b), Self::NotImplemented => write!(f, "Not implemented"), Self::NoSuchRevisionInTree(rev) => write!(f, "No such revision in tree: {}", rev), Self::MissingNestedTree(p) => write!(f, "Missing nested tree: {}", p.display()), Self::ImmortalLimbo(p) => write!( f, "Failed to delete transform temporary directory: {}", p.display() ), Self::MalformedTransform(e) => write!(f, "Malformed transform: {:?}", e), Self::TransformRenameFailed(a, b, c, d) => write!( f, "Transform rename failed: {} -> {}: {}: {}", a.display(), b.display(), c, d ), Self::UnexpectedHttpStatus { url, code, extra, headers: _, } => { if let Some(extra) = extra { write!(f, "Unexpected HTTP status: {} {}: {}", url, code, extra) } else { write!(f, "Unexpected HTTP status: {} {}", url, code) } } Self::Timeout => write!(f, "Timeout"), Self::BadHttpRequest(url, msg) => write!(f, "Bad HTTP request: {} {}", url, msg), Self::TransportNotPossible(e) => write!(f, "Transport not possible: {}", e), Self::IncompatibleFormat(a, b) => { write!(f, "Incompatible format: {} is not compatible with {}", a, b) } Self::NoSuchRevision(rev) => write!(f, "No such revision: {}", rev), Self::RevisionNotPresent(rev) => write!(f, "Revision not present: {}", rev), Self::NoSuchProject(p) => write!(f, "No such project: {}", p), Self::ForkingDisabled(p) => write!(f, "Forking disabled: {}", p), Self::ProjectCreationTimeout(p, t) => { write!(f, "Project creation timeout: {} after {} seconds", p, t) } Self::GitLabConflict(p) => write!(f, "GitLab conflict: {}", p), Self::ConflictsInTree => write!(f, "Conflicts in tree"), Self::SourceNotDerivedFromTarget => write!(f, "Source not derived from target"), Self::BranchReferenceLoop => write!(f, "Branch reference loop"), Self::NoRoundtrippingSupport => write!(f, "No roundtripping support"), Self::NoCompatibleInter => write!(f, "No compatible inter"), Self::ReadOnly => write!(f, "Read-only"), Self::ObjectNotLocked(msg) => write!(f, "Object not locked: {}", msg), Self::RedirectRequested { source, target, is_permanent, } => { write!( f, "Redirect requested: {} -> {} (permanent: {})", source, target, is_permanent ) } } } } impl std::error::Error for Error {} impl From for Error { fn from(err: PyErr) -> Self { pyo3::import_exception!(socket, error); pyo3::Python::attach(|py| { let value = err.value(py); if err.is_instance_of::(py) { Error::UnknownFormat(value.getattr("format").unwrap().extract().unwrap()) } else if err.is_instance_of::(py) { Error::NotBranchError( value.getattr("path").unwrap().extract().unwrap(), value.getattr("detail").unwrap().extract().unwrap(), ) } else if err.is_instance_of::(py) { Error::NoColocatedBranchSupport } else if err.is_instance_of::(py) { Error::DependencyNotPresent( value.getattr("library").unwrap().extract().unwrap(), value.getattr("error").unwrap().extract().unwrap(), ) } else if err.is_instance_of::(py) { Error::PermissionDenied( value.getattr("path").unwrap().extract().unwrap(), value.getattr("extra").unwrap().extract().unwrap(), ) } else if err.is_instance_of::(py) { Error::UnsupportedProtocol( value.getattr("url").unwrap().extract().unwrap(), value.getattr("extra").unwrap().extract().unwrap(), ) } else if err.is_instance_of::(py) { Error::UnusableRedirect( value.getattr("source").unwrap().extract().unwrap(), value.getattr("target").unwrap().extract().unwrap(), value.getattr("reason").unwrap().extract().unwrap(), ) } else if err.is_instance_of::(py) { Error::InvalidURL( value.getattr("path").unwrap().extract().unwrap(), value.getattr("extra").unwrap().extract().unwrap(), ) } else if err.is_instance_of::(py) { Error::ConnectionError(err.to_string()) } else if err.is_instance_of::(py) { Error::UnsupportedFormat(value.getattr("format").unwrap().extract().unwrap()) } else if err.is_instance_of::(py) { Error::UnsupportedVcs(value.getattr("vcs").unwrap().extract().unwrap()) } else if err.is_instance_of::(py) { if let Ok(e) = value.getattr("msg").unwrap().extract() { Error::RemoteGitError(e) } else { // Just get it from the args tuple Error::RemoteGitError(value.getattr("args").unwrap().extract().unwrap()) } } else if err.is_instance_of::(py) { Error::IncompleteRead( value.getattr("partial").unwrap().extract().unwrap(), value.getattr("expected").unwrap().extract().unwrap(), ) } else if err.is_instance_of::(py) { Error::LineEndingError(value.getattr("file").unwrap().extract().unwrap()) } else if err.is_instance_of::(py) { Error::AlreadyControlDir(value.getattr("path").unwrap().extract().unwrap()) } else if err.is_instance_of::(py) { Error::AlreadyBranch(value.getattr("path").unwrap().extract().unwrap()) } else if err.is_instance_of::(py) { Error::DivergedBranches } else if err.is_instance_of::(py) { let value = err.into_value(py); let tree = value.getattr(py, "tree").unwrap(); let path = value.getattr(py, "path").unwrap(); let path = tree .call_method1(py, "abspath", (path,)) .unwrap() .extract::(py) .unwrap(); Error::WorkspaceDirty(std::path::PathBuf::from(path)) } else if err.is_instance_of::(py) { Error::NoSuchFile(std::path::PathBuf::from( value.getattr("path").unwrap().extract::().unwrap(), )) } else if err.is_instance_of::(py) { Error::PointlessCommit } else if err.is_instance_of::(py) { Error::NoWhoami } else if err.is_instance_of::(py) { Error::NoSuchTag(value.getattr("tag_name").unwrap().extract().unwrap()) } else if err.is_instance_of::(py) { Error::TagAlreadyExists(value.getattr("tag_name").unwrap().extract().unwrap()) } else if err.is_instance_of::(py) { Error::Socket(std::io::Error::from_raw_os_error( value.getattr("errno").unwrap().extract().unwrap(), )) } else if err.is_instance_of::(py) { Error::ForgeLoginRequired } else if err.is_instance_of::(py) { let branch = value.getattr("branch").unwrap(); if let Ok(url) = branch.getattr("user_url") { Error::UnsupportedForge(url.extract::().unwrap().parse().unwrap()) } else { Error::UnsupportedForge(branch.extract::().unwrap().parse().unwrap()) } } else if err.is_instance_of::(py) { let source_url: String = value.getattr("url").unwrap().extract().unwrap(); let existing_proposal = value.getattr("existing_proposal").unwrap(); let target_url: Option = if existing_proposal.is_none() { None } else { Some(existing_proposal.getattr("url").unwrap().extract().unwrap()) }; Error::MergeProposalExists( source_url.parse().unwrap(), target_url.map(|u| u.parse().unwrap()), ) } else if err.is_instance_of::(py) { Error::UnsupportedOperation( value.getattr("mname").unwrap().extract().unwrap(), value.getattr("tname").unwrap().extract().unwrap(), ) } else if err.is_instance_of::(py) { Error::ProtectedBranchHookDeclined(value.getattr("msg").unwrap().extract().unwrap()) } else if err.is_instance_of::(py) { Error::NoRepositoryPresent } else if err.is_instance_of::(py) { let why = value.getattr("why").unwrap(); if why.is_none() { Error::LockFailed("".to_string()) } else { let why = why.call_method0("__str__").unwrap(); Error::LockFailed(why.extract().unwrap()) } } else if err.is_instance_of::(py) { Error::FileExists( std::path::PathBuf::from( value.getattr("path").unwrap().extract::().unwrap(), ), value.getattr("extra").unwrap().extract().unwrap(), ) } else if err.is_instance_of::(py) { Error::LockContention( value .getattr("lock") .unwrap() .call_method0("__str__") .unwrap() .extract() .unwrap(), value.getattr("msg").unwrap().extract().unwrap(), ) } else if err.is_instance_of::(py) { Error::NotImplemented } else if err.is_instance_of::(py) { Error::NoSuchRevisionInTree( value.getattr("revision_id").unwrap().extract().unwrap(), ) } else if err.is_instance_of::(py) { Error::MissingNestedTree(std::path::PathBuf::from( value.getattr("path").unwrap().extract::().unwrap(), )) } else if err.is_instance_of::(py) { Error::ImmortalLimbo(std::path::PathBuf::from( value .getattr("limbo_dir") .unwrap() .extract::() .unwrap(), )) } else if err.is_instance_of::(py) { Error::MalformedTransform(value.getattr("conflicts").unwrap().extract().unwrap()) } else if err.is_instance_of::(py) { Error::TransformRenameFailed( std::path::PathBuf::from( value .getattr("from_path") .unwrap() .extract::() .unwrap(), ), std::path::PathBuf::from( value .getattr("to_path") .unwrap() .extract::() .unwrap(), ), value.getattr("why").unwrap().extract().unwrap(), std::io::Error::from_raw_os_error( value.getattr("errno").unwrap().extract::().unwrap(), ), ) } else if err.is_instance_of::(py) { Error::UnexpectedHttpStatus { url: value .getattr("path") .unwrap() .extract::() .unwrap() .parse() .unwrap(), code: value.getattr("code").unwrap().extract().unwrap(), extra: value.getattr("extra").unwrap().extract().unwrap(), headers: value.getattr("headers").unwrap().extract().unwrap(), } } else if err.is_instance_of::(py) { Error::Timeout } else if err.is_instance_of::(py) { Error::BadHttpRequest( value .getattr("path") .unwrap() .extract::() .unwrap() .parse() .unwrap(), value.getattr("reason").unwrap().extract().unwrap(), ) } else if err.is_instance_of::(py) { Error::TransportNotPossible(value.getattr("msg").unwrap().extract().unwrap()) } else if err.is_instance_of::(py) { let format = value.getattr("format").unwrap(); let controldir = value.getattr("controldir").unwrap(); Error::IncompatibleFormat( if let Ok(format) = format.extract::() { format } else { format .call_method0(intern!(py, "get_format_string")) .unwrap() .extract() .unwrap() }, if let Ok(controldir) = controldir.extract::() { controldir } else { controldir .call_method0(intern!(py, "get_format_string")) .unwrap() .extract() .unwrap() }, ) } else if err.is_instance_of::(py) { Error::NoSuchRevision(value.getattr("revision").unwrap().extract().unwrap()) } else if err.is_instance_of::(py) { Error::RevisionNotPresent(value.getattr("revision_id").unwrap().extract().unwrap()) } else if err.is_instance_of::(py) { Error::NoSuchProject(value.getattr("project").unwrap().extract().unwrap()) } else if err.is_instance_of::(py) { Error::ForkingDisabled(value.getattr("project").unwrap().extract().unwrap()) } else if err.is_instance_of::(py) { Error::ProjectCreationTimeout( value.getattr("project").unwrap().extract().unwrap(), value.getattr("timeout").unwrap().extract().unwrap(), ) } else if err.is_instance_of::(py) { Error::GitLabConflict(value.getattr("reason").unwrap().extract().unwrap()) } else if err.is_instance_of::(py) { Error::ConflictsInTree } else if err.is_instance_of::(py) { Error::SourceNotDerivedFromTarget } else if BreezyConnectionError .as_ref() .and_then(|cls| { Python::attach(|py| { Some( err.is_instance_of::(py) && err.value(py).is_instance(cls.bind(py)).unwrap(), ) }) }) .unwrap_or(false) { Error::ConnectionError(err.to_string()) } else if err.is_instance_of::(py) { Error::ReadOnly } else if err.is_instance_of::(py) { Error::RedirectRequested { source: value .getattr("source") .unwrap() .extract::() .unwrap() .parse() .unwrap(), target: value .getattr("target") .unwrap() .extract::() .unwrap() .parse() .unwrap(), is_permanent: value.getattr("is_permanent").unwrap().extract().unwrap(), } } else if err.is_instance_of::(py) { Error::NoRoundtrippingSupport } else if err.is_instance_of::(py) { Error::NoCompatibleInter // Intentionally sorted below the more specific errors } else if err.is_instance_of::(py) { Error::InvalidHttpResponse( value.getattr("path").unwrap().extract().unwrap(), value.getattr("msg").unwrap().extract().unwrap(), value.getattr("orig_error").unwrap().extract().unwrap(), value.getattr("headers").unwrap().extract().unwrap(), ) } else if err.is_instance_of::(py) { Error::TransportError(value.getattr("msg").unwrap().extract().unwrap()) } else if err.is_instance_of::(py) { Error::BranchReferenceLoop } else if err.is_instance_of::(py) { Error::ObjectNotLocked(err.to_string()) } else { if std::env::var("BRZ_ERROR").is_ok() { // Print backtrace err.print(py); } Self::Other(err) } }) } } impl From for PyErr { fn from(e: Error) -> Self { match e { Error::Other(e) => e, Error::UnknownFormat(s) => UnknownFormatError::new_err((s,)), Error::NotBranchError(path, details) => NotBranchError::new_err((path, details)), Error::NoColocatedBranchSupport => { Python::attach(|py| NoColocatedBranchSupport::new_err((py.None(),))) } Error::DependencyNotPresent(library, error) => { DependencyNotPresent::new_err((library, error)) } Error::PermissionDenied(path, reason) => PermissionDenied::new_err((path, reason)), Error::UnsupportedProtocol(url, error) => UnsupportedProtocol::new_err((url, error)), Error::UnusableRedirect(source, target, reason) => { UnusableRedirect::new_err((source, target, reason)) } Error::ConnectionError(e) => pyo3::exceptions::PyConnectionError::new_err((e,)), Error::InvalidURL(path, reason) => InvalidURL::new_err((path, reason)), Error::TransportError(e) => TransportError::new_err((e,)), Error::UnsupportedFormat(s) => UnsupportedFormatError::new_err((s,)), Error::UnsupportedVcs(s) => UnsupportedVcs::new_err((s,)), Error::RemoteGitError(e) => RemoteGitError::new_err((e,)), Error::IncompleteRead(partial, expected) => Python::attach(|py| { let bytes = pyo3::types::PyBytes::new(py, partial.as_slice()); IncompleteRead::new_err((bytes.unbind(), expected)) }), Error::LineEndingError(e) => LineEndingError::new_err((e,)), Error::InvalidHttpResponse(status, msg, orig_error, headers) => { InvalidHttpResponse::new_err((status, msg, orig_error, headers)) } Error::AlreadyControlDir(path) => { AlreadyControlDirError::new_err((path.to_string_lossy().to_string(),)) } Error::AlreadyBranch(path) => { AlreadyBranchError::new_err((path.to_string_lossy().to_string(),)) } Error::DivergedBranches => { Python::attach(|py| DivergedBranches::new_err((py.None(), py.None()))) } Error::WorkspaceDirty(p) => WorkspaceDirty::new_err((p.to_string_lossy().to_string(),)), Error::NoSuchFile(p) => NoSuchFile::new_err(p.to_string_lossy().to_string()), Error::PointlessCommit => PointlessCommit::new_err(()), Error::NoWhoami => NoWhoami::new_err(()), Error::NoSuchTag(tag) => NoSuchTag::new_err((tag,)), Error::TagAlreadyExists(tag) => TagAlreadyExists::new_err((tag,)), Error::Socket(e) => { pyo3::import_exception!(socket, error); error::new_err((e.raw_os_error().unwrap(),)) } Error::ForgeLoginRequired => { Python::attach(|py| ForgeLoginRequired::new_err((py.None(),))) } Error::UnsupportedForge(url) => UnsupportedForge::new_err((url.to_string(),)), Error::ForgeProjectExists(name) => AlreadyControlDirError::new_err((name.to_string(),)), Error::MergeProposalExists(source, _target) => { Python::attach(|py| MergeProposalExists::new_err((source.to_string(), py.None()))) } Error::UnsupportedOperation(mname, tname) => { UnsupportedOperation::new_err((mname, tname)) } Error::ProtectedBranchHookDeclined(msg) => ProtectedBranchHookDeclined::new_err((msg,)), Error::NoRepositoryPresent => { Python::attach(|py| NoRepositoryPresent::new_err((py.None(),))) } Error::LockFailed(why) => Python::attach(|py| LockFailed::new_err((py.None(), why))), Error::FileExists(p, extra) => { FileExists::new_err((p.to_string_lossy().to_string(), extra)) } Error::LockContention(_lock, msg) => { Python::attach(|py| LockContention::new_err((py.None(), msg))) } Error::NotImplemented => pyo3::exceptions::PyNotImplementedError::new_err(()), Error::NoSuchRevisionInTree(rev) => { Python::attach(|py| NoSuchRevisionInTree::new_err((py.None(), rev.to_string()))) } Error::MissingNestedTree(p) => { MissingNestedTree::new_err((p.to_string_lossy().to_string(),)) } Error::ImmortalLimbo(p) => ImmortalLimbo::new_err((p.to_string_lossy().to_string(),)), Error::MalformedTransform(conflicts) => { MalformedTransform::new_err((Python::attach(|py| { conflicts.into_pyobject(py).unwrap().unbind() }),)) } Error::TransformRenameFailed(from_path, to_path, why, error) => { TransformRenameFailed::new_err(( from_path.to_string_lossy().to_string(), to_path.to_string_lossy().to_string(), why, PyErr::from(error), )) } Error::UnexpectedHttpStatus { url, code, extra, headers, } => UnexpectedHttpStatus::new_err((url.to_string(), code, extra, headers)), Error::Timeout => pyo3::exceptions::PyTimeoutError::new_err(()), Error::BadHttpRequest(url, reason) => { BadHttpRequest::new_err((url.to_string(), reason)) } Error::TransportNotPossible(e) => TransportNotPossible::new_err((e,)), Error::IncompatibleFormat(a, b) => IncompatibleFormat::new_err((a, b)), Error::NoSuchRevision(rev) => { Python::attach(|py| NoSuchRevision::new_err((py.None(), rev.to_string()))) } Error::RevisionNotPresent(rev) => RevisionNotPresent::new_err((rev.to_string(),)), Error::NoSuchProject(p) => NoSuchProject::new_err((p,)), Error::ForkingDisabled(p) => ForkingDisabled::new_err((p,)), Error::ProjectCreationTimeout(p, t) => ProjectCreationTimeout::new_err((p, t)), Error::GitLabConflict(p) => GitLabConflict::new_err((p,)), Error::ConflictsInTree => ConflictsInTree::new_err(()), Error::SourceNotDerivedFromTarget => SourceNotDerivedFromTarget::new_err(()), Error::BranchReferenceLoop => BranchReferenceLoop::new_err(()), Error::ReadOnly => Python::attach(|py| ReadOnlyError::new_err((py.None(),))), Error::ObjectNotLocked(msg) => ObjectNotLocked::new_err((msg,)), Error::RedirectRequested { source, target, is_permanent, } => RedirectRequested::new_err((source.to_string(), target.to_string(), is_permanent)), Error::NoRoundtrippingSupport => { Python::attach(|py| NoRoundtrippingSupport::new_err((py.None(), py.None()))) } Error::NoCompatibleInter => { Python::attach(|py| NoCompatibleInter::new_err((py.None(), py.None()))) } } } } #[test] fn test_error_unknownformat() { let e = Error::UnknownFormat("foo".to_string()); let p: PyErr = e.into(); // Verify that p is an instance of UnknownFormatError Python::attach(|py| { assert!(p.is_instance_of::(py)); }); } #[test] fn test_error_notbrancherror() { let e = Error::NotBranchError("foo".to_string(), Some("bar".to_string())); let p: PyErr = e.into(); // Verify that p is an instance of NotBranchError Python::attach(|py| { assert!(p.is_instance_of::(py)); }); } #[test] fn test_error_nocolocatedbranchsupport() { let e = Error::NoColocatedBranchSupport; let p: PyErr = e.into(); // Verify that p is an instance of NoColocatedBranchSupport Python::attach(|py| { assert!(p.is_instance_of::(py), "{}", p); }); } #[test] fn test_error_dependencynotpresent() { let e = Error::DependencyNotPresent("foo".to_string(), "bar".to_string()); let p: PyErr = e.into(); // Verify that p is an instance of DependencyNotPresent Python::attach(|py| { assert!(p.is_instance_of::(py)); }); } #[test] fn test_error_permissiondenied() { let e = Error::PermissionDenied(std::path::PathBuf::from("foo"), Some("bar".to_string())); let p: PyErr = e.into(); // Verify that p is an instance of PermissionDenied Python::attach(|py| { assert!(p.is_instance_of::(py)); }); } #[test] fn test_error_unsupportedprotocol() { let e = Error::UnsupportedProtocol("foo".to_string(), Some("bar".to_string())); let p: PyErr = e.into(); // Verify that p is an instance of UnsupportedProtocol Python::attach(|py| { assert!(p.is_instance_of::(py)); }); } #[test] fn test_error_unusableredirect() { let e = Error::UnusableRedirect("foo".to_string(), "bar".to_string(), "baz".to_string()); let p: PyErr = e.into(); // Verify that p is an instance of UnusableRedirect Python::attach(|py| { assert!(p.is_instance_of::(py)); }); } #[test] fn test_error_connectionerror() { let e = Error::ConnectionError("foo".to_string()); let p: PyErr = e.into(); // Verify that p is an instance of PyConnectionError Python::attach(|py| { assert!(p.is_instance_of::(py)); }); } #[test] fn test_error_invalidurl() { let e = Error::InvalidURL("foo".to_string(), Some("bar".to_string())); let p: PyErr = e.into(); // Verify that p is an instance of InvalidURL Python::attach(|py| { assert!(p.is_instance_of::(py)); }); } #[test] fn test_error_transporterror() { let e = Error::TransportError("foo".to_string()); let p: PyErr = e.into(); // Verify that p is an instance of TransportError Python::attach(|py| { assert!(p.is_instance_of::(py)); }); } #[test] fn test_error_unsupportedformat() { let e = Error::UnsupportedFormat("foo".to_string()); let p: PyErr = e.into(); // Verify that p is an instance of UnsupportedFormatError Python::attach(|py| { assert!(p.is_instance_of::(py)); }); } #[test] fn test_error_unsupportedvcs() { let e = Error::UnsupportedVcs("foo".to_string()); let p: PyErr = e.into(); // Verify that p is an instance of UnsupportedVcs Python::attach(|py| { assert!(p.is_instance_of::(py)); }); } #[test] fn test_error_remotegiterror() { let e = Error::RemoteGitError("foo".to_string()); let p: PyErr = e.into(); // Verify that p is an instance of RemoteGitError Python::attach(|py| { assert!(p.is_instance_of::(py)); }); } #[test] fn test_error_incompleteread() { let e = Error::IncompleteRead(vec![1, 2, 3], Some(4)); let p: PyErr = e.into(); // Verify that p is an instance of IncompleteRead Python::attach(|py| { assert!(p.is_instance_of::(py), "{}", p); }); } #[test] fn test_error_lineendingerror() { let e = Error::LineEndingError("foo".to_string()); let p: PyErr = e.into(); // Verify that p is an instance of LineEndingError Python::attach(|py| { assert!(p.is_instance_of::(py)); }); } #[test] fn test_error_invalidhttpresponse() { let e = Error::InvalidHttpResponse( "foo".to_string(), "bar".to_string(), Some("baz".to_string()), std::collections::HashMap::new(), ); let p: PyErr = e.into(); // Verify that p is an instance of InvalidHttpResponse Python::attach(|py| { assert!(p.is_instance_of::(py)); }); } #[test] fn test_error_alreadyexists() { let e = Error::AlreadyControlDir(std::path::PathBuf::from("foo")); let p: PyErr = e.into(); // Verify that p is an instance of AlreadyControlDirError Python::attach(|py| { assert!(p.is_instance_of::(py), "{}", p); }); } #[test] fn test_error_divergedbranches() { let e = Error::DivergedBranches; let p: PyErr = e.into(); // Verify that p is an instance of DivergedBranches Python::attach(|py| { assert!(p.is_instance_of::(py), "{}", p); }); } #[test] #[ignore] // WorkspaceDirty takes a tree argument, which is not implemented fn test_error_workspacedirty() { let e = Error::WorkspaceDirty(std::path::PathBuf::from("foo")); let p: PyErr = e.into(); // Verify that p is an instance of WorkspaceDirty Python::attach(|py| { assert!(p.is_instance_of::(py), "{}", p); }); } #[test] fn test_error_nosuchfile() { let e = Error::NoSuchFile(std::path::PathBuf::from("foo")); let p: PyErr = e.into(); // Verify that p is an instance of NoSuchFile Python::attach(|py| { assert!(p.is_instance_of::(py)); }); } #[test] fn test_error_pointlesscommit() { let e = Error::PointlessCommit; let p: PyErr = e.into(); // Verify that p is an instance of PointlessCommit Python::attach(|py| { assert!(p.is_instance_of::(py)); }); } #[test] fn test_error_nowhoami() { let e = Error::NoWhoami; let p: PyErr = e.into(); // Verify that p is an instance of NoWhoami Python::attach(|py| { assert!(p.is_instance_of::(py), "{}", p); }); } #[test] fn test_error_nosuchtag() { let e = Error::NoSuchTag("foo".to_string()); let p: PyErr = e.into(); // Verify that p is an instance of NoSuchTag Python::attach(|py| { assert!(p.is_instance_of::(py)); }); } #[test] fn test_error_tagalreadyexists() { let e = Error::TagAlreadyExists("foo".to_string()); let p: PyErr = e.into(); // Verify that p is an instance of TagAlreadyExists Python::attach(|py| { assert!(p.is_instance_of::(py)); }); } #[test] fn test_error_socket() { let e = Error::Socket(std::io::Error::from_raw_os_error(0)); let p: PyErr = e.into(); // Verify that p is an instance of error Python::attach(|py| { assert!(p.is_instance_of::(py)); }); } #[test] fn test_error_other() { let e = Error::Other(PyErr::new::((("foo",),))); let p: PyErr = e.into(); // Verify that p is an instance of error Python::attach(|py| { assert!(p.is_instance_of::(py)); }); } #[test] fn test_error_forge_login_required() { let e = Error::ForgeLoginRequired; let p: PyErr = e.into(); // Verify that p is an instance of ForgeLoginRequired Python::attach(|py| { assert!(p.is_instance_of::(py)); }); } #[test] fn test_error_unsupported_forge() { let e = Error::UnsupportedForge("http://example.com".parse().unwrap()); let p: PyErr = e.into(); // Verify that p is an instance of UnsupportedForge Python::attach(|py| { assert!(p.is_instance_of::(py)); }); } #[test] fn test_error_forge_project_exists() { let e = Error::ForgeProjectExists("foo".to_string()); let p: PyErr = e.into(); // Verify that p is an instance of AlreadyControlDirError Python::attach(|py| { assert!(p.is_instance_of::(py), "{}", p); }); } #[test] fn test_error_merge_proposal_exists() { let e = Error::MergeProposalExists( "http://source.com".parse().unwrap(), Some("http://target.com".parse().unwrap()), ); let p: PyErr = e.into(); // Verify that p is an instance of MergeProposalExists Python::attach(|py| { assert!(p.is_instance_of::(py), "{}", p); }); } #[test] #[ignore] // UnsupportedOperation takes two arguments, which is not implemented fn test_error_unsupported_operation() { let e = Error::UnsupportedOperation("foo".to_string(), "bar".to_string()); let p: PyErr = e.into(); // Verify that p is an instance of UnsupportedOperation Python::attach(|py| { assert!(p.is_instance_of::(py), "{}", p); }); } #[test] fn test_error_protected_branch_hook_declined() { let e = Error::ProtectedBranchHookDeclined("foo".to_string()); let p: PyErr = e.into(); // Verify that p is an instance of ProtectedBranchHookDeclined Python::attach(|py| { assert!(p.is_instance_of::(py), "{}", p); }); } #[test] #[ignore] // NoRepositoryPresent takes an argument, which is not implemented fn test_error_no_repository_present() { let e = Error::NoRepositoryPresent; let p: PyErr = e.into(); // Verify that p is an instance of NoRepositoryPresent Python::attach(|py| { assert!(p.is_instance_of::(py), "{}", p); }); } #[test] #[ignore] // LockFailed takes a lockfile argument, which is not implemented fn test_error_lock_failed() { let e = Error::LockFailed("bar".to_string()); let p: PyErr = e.into(); // Verify that p is an instance of LockFailed Python::attach(|py| { assert!(p.is_instance_of::(py), "{}", p); }); } #[test] fn test_error_file_exists() { let e = Error::FileExists(std::path::PathBuf::from("foo"), Some("bar".to_string())); let p: PyErr = e.into(); // Verify that p is an instance of FileExists Python::attach(|py| { assert!(p.is_instance_of::(py), "{}", p); }); } #[test] fn test_error_lock_contention() { let e = Error::LockContention("foo".to_string(), "bar".to_string()); let p: PyErr = e.into(); // Verify that p is an instance of LockContention Python::attach(|py| { assert!(p.is_instance_of::(py), "{}", p); }); } #[test] fn test_error_notimplementederror() { let e = Error::NotImplemented; let p: PyErr = e.into(); // Verify that p is an instance of PyNotImplementedError Python::attach(|py| { assert!(p.is_instance_of::(py)); }); } #[test] fn test_missing_nested_tree() { let e = Error::MissingNestedTree(std::path::PathBuf::from("foo")); let p: PyErr = e.into(); // Verify that p is an instance of MissingNestedTree Python::attach(|py| { assert!(p.is_instance_of::(py), "{}", p); }); } #[test] fn test_immortal_limbo() { let e = Error::ImmortalLimbo(std::path::PathBuf::from("foo")); let p: PyErr = e.into(); // Verify that p is an instance of ImmortalLimbo Python::attach(|py| { assert!(p.is_instance_of::(py), "{}", p); }); } #[test] fn test_malformed_transform() { let e = Error::MalformedTransform(vec![]); let p: PyErr = e.into(); // Verify that p is an instance of MalformedTransform Python::attach(|py| { assert!(p.is_instance_of::(py), "{}", p); }); } #[test] fn test_transform_rename_failed() { let e = Error::TransformRenameFailed( std::path::PathBuf::from("foo"), std::path::PathBuf::from("bar"), "baz".to_string(), std::io::Error::new(std::io::ErrorKind::NotFound, "foo"), ); let p: PyErr = e.into(); // Verify that p is an instance of TransformRenameFailed Python::attach(|py| { assert!(p.is_instance_of::(py), "{}", p,); }); } #[test] fn test_unexpected_http_status() { let e = Error::UnexpectedHttpStatus { url: url::Url::parse("http://example.com").unwrap(), code: 404, extra: Some("bar".to_string()), headers: std::collections::HashMap::new(), }; let p: PyErr = e.into(); // Verify that p is an instance of UnexpectedHttpStatus Python::attach(|py| { assert!(p.is_instance_of::(py), "{}", p); }); } #[test] fn test_timeout() { let e = Error::Timeout; let p: PyErr = e.into(); // Verify that p is an instance of PyTimeoutError Python::attach(|py| { assert!(p.is_instance_of::(py)); }); } #[test] fn test_bad_http_request() { let e = Error::BadHttpRequest("http://example.com".parse().unwrap(), "foo".to_string()); let p: PyErr = e.into(); // Verify that p is an instance of BadHttpRequest Python::attach(|py| { assert!(p.is_instance_of::(py), "{}", p); }); } #[test] fn test_transport_not_possible() { let e = Error::TransportNotPossible("foo".to_string()); let p: PyErr = e.into(); // Verify that p is an instance of TransportNotPossible Python::attach(|py| { assert!(p.is_instance_of::(py), "{}", p); }); } #[test] fn test_incompatible_format() { let e = Error::IncompatibleFormat("foo".to_string(), "bar".to_string()); let p: PyErr = e.into(); // Verify that p is an instance of IncompatibleFormat Python::attach(|py| { assert!(p.is_instance_of::(py), "{}", p); }); } #[test] fn test_no_such_project() { let e = Error::NoSuchProject("foo".to_string()); let p: PyErr = e.into(); // Verify that p is an instance of NoSuchProject Python::attach(|py| { assert!(p.is_instance_of::(py), "{}", p); }); } #[test] fn test_forking_disabled() { let e = Error::ForkingDisabled("foo".to_string()); let p: PyErr = e.into(); // Verify that p is an instance of ForkingDisabled Python::attach(|py| { assert!(p.is_instance_of::(py), "{}", p); }); } #[test] fn test_gitlab_conflict() { let e = Error::GitLabConflict("foo".to_string()); let p: PyErr = e.into(); // Verify that p is an instance of GitLabConflict Python::attach(|py| { assert!(p.is_instance_of::(py), "{}", p); }); } #[test] fn test_conflicts_in_tree() { let e = Error::ConflictsInTree; let p: PyErr = e.into(); // Verify that p is an instance of ConflictsInTree Python::attach(|py| { assert!(p.is_instance_of::(py), "{}", p); }); } #[test] fn test_project_creation_timeout() { let e = Error::ProjectCreationTimeout("foo".to_string(), chrono::Duration::seconds(0)); let p: PyErr = e.into(); // Verify that p is an instance of ProjectCreationTimeout Python::attach(|py| { assert!(p.is_instance_of::(py), "{}", p); }); } #[test] fn test_already_branch() { let e = Error::AlreadyBranch(std::path::PathBuf::from("foo")); let p: PyErr = e.into(); // Verify that p is an instance of AlreadyBranchError Python::attach(|py| { assert!(p.is_instance_of::(py), "{}", p); }); } #[test] fn test_redirect_requested() { let e = Error::RedirectRequested { source: "http://example.com".parse().unwrap(), target: "http://example.com".parse().unwrap(), is_permanent: true, }; let p: PyErr = e.into(); // Verify that p is an instance of RedirectRequested Python::attach(|py| { assert!(p.is_instance_of::(py), "{}", p); }); } breezyshim-0.7.5/src/export.rs000064400000000000000000000064011046102023000144750ustar 00000000000000//! Export a tree to a directory. use pyo3::prelude::*; use pyo3::types::PyDict; use std::path::Path; /// Export a tree to a directory. /// /// # Arguments /// * `tree` - Tree to export /// * `target` - Target directory path /// * `subdir` - Optional subdirectory within the tree to export /// /// # Returns /// Result with empty success value or error pub fn export( tree: &T, target: &std::path::Path, subdir: Option<&std::path::Path>, ) -> Result<(), crate::error::Error> { Python::attach(|py| { let m = py.import("breezy.export").unwrap(); let export = m.getattr("export").unwrap(); let kwargs = PyDict::new(py); let subdir = if subdir.is_none() || subdir == Some(Path::new("")) { None } else { Some(subdir.unwrap().to_string_lossy().to_string()) }; kwargs.set_item("subdir", subdir).unwrap(); export.call( ( tree.to_object(py), target.to_string_lossy().to_string(), "dir", py.None(), ), Some(&kwargs), )?; Ok(()) }) } #[cfg(test)] mod tests { use super::*; use crate::controldir::create_standalone_workingtree; use crate::tree::MutableTree; use crate::workingtree::WorkingTree; use serial_test::serial; use std::path::Path; #[serial] #[test] fn test_export_tree() { let env = crate::testing::TestEnv::new(); let tmp_dir = tempfile::tempdir().unwrap(); let wt = create_standalone_workingtree(tmp_dir.path(), "2a").unwrap(); let tree = wt.basis_tree().unwrap(); let target_tmp = tempfile::tempdir().unwrap(); let target_dir = target_tmp.path().join("export_target"); let result = export(&tree, &target_dir, None); assert!(result.is_ok()); std::mem::drop(env); } #[serial] #[test] fn test_export_with_subdir() { let env = crate::testing::TestEnv::new(); let tmp_dir = tempfile::tempdir().unwrap(); let wt = create_standalone_workingtree(tmp_dir.path(), "2a").unwrap(); // Add some content first std::fs::write(tmp_dir.path().join("file.txt"), "content").unwrap(); wt.add(&[Path::new("file.txt")]).unwrap(); wt.build_commit().message("Add file").commit().unwrap(); let tree = wt.basis_tree().unwrap(); let target_tmp = tempfile::tempdir().unwrap(); let target_dir = target_tmp.path().join("export_subdir"); // Test with None subdir to simplify the test let result = export(&tree, &target_dir, None); assert!(result.is_ok()); std::mem::drop(env); } #[serial] #[test] fn test_export_with_empty_subdir() { let env = crate::testing::TestEnv::new(); let tmp_dir = tempfile::tempdir().unwrap(); let wt = create_standalone_workingtree(tmp_dir.path(), "2a").unwrap(); let tree = wt.basis_tree().unwrap(); let target_tmp = tempfile::tempdir().unwrap(); let target_dir = target_tmp.path().join("export_empty"); let subdir = Path::new(""); let result = export(&tree, &target_dir, Some(subdir)); assert!(result.is_ok()); std::mem::drop(env); } } breezyshim-0.7.5/src/foreign.rs000064400000000000000000000013051046102023000146030ustar 00000000000000//! Support for foreign version control systems. //! //! This module provides types and traits for interacting with various //! version control systems supported by Breezy. /// Type of version control system. #[derive(PartialEq, Eq, Debug, Clone)] pub enum VcsType { /// Bazaar version control system. Bazaar, /// Git version control system. Git, /// Mercurial version control system. Hg, /// Subversion version control system. Svn, /// Fossil version control system. Fossil, /// Darcs version control system. Darcs, /// CVS version control system. Cvs, /// GNU Arch version control system. Arch, /// SVK version control system. Svk, } breezyshim-0.7.5/src/forge.rs000064400000000000000000000641051046102023000142630ustar 00000000000000//! Code hosting services and merge proposals. use crate::branch::{py_tag_selector, Branch, GenericBranch, PyBranch}; use crate::error::Error; use crate::revisionid::RevisionId; use pyo3::exceptions::PyValueError; use pyo3::prelude::*; use pyo3::types::PyDict; use std::hash::Hash; /// Represents a code forge (hosting service) like GitHub, GitLab, etc. pub struct Forge(Py); impl Clone for Forge { fn clone(&self) -> Self { Forge(Python::attach(|py| self.0.clone_ref(py))) } } impl From> for Forge { fn from(obj: Py) -> Self { Forge(obj) } } impl std::fmt::Debug for MergeProposal { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { // Just print the URL for now let mut s = f.debug_struct("MergeProposal"); if let Ok(url) = self.url() { s.field("url", &url); } s.finish() } } impl std::fmt::Display for MergeProposal { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { let url = self.url().unwrap(); write!(f, "{}", url) } } /// Status of a merge proposal. #[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)] pub enum MergeProposalStatus { /// All merge proposals regardless of status. All, /// Open merge proposals that haven't been merged or closed. Open, /// Closed merge proposals that weren't merged. Closed, /// Merged merge proposals that have been accepted and integrated. Merged, } impl MergeProposalStatus { /// Get all possible merge proposal statuses. pub fn all() -> Vec { vec![MergeProposalStatus::All] } } impl std::str::FromStr for MergeProposalStatus { type Err = String; fn from_str(s: &str) -> Result { match s { "all" => Ok(MergeProposalStatus::All), "open" => Ok(MergeProposalStatus::Open), "merged" => Ok(MergeProposalStatus::Merged), "closed" => Ok(MergeProposalStatus::Closed), _ => Err(format!("Invalid merge proposal status: {}", s)), } } } impl std::fmt::Display for MergeProposalStatus { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { MergeProposalStatus::All => write!(f, "all"), MergeProposalStatus::Open => write!(f, "open"), MergeProposalStatus::Merged => write!(f, "merged"), MergeProposalStatus::Closed => write!(f, "closed"), } } } impl<'py> IntoPyObject<'py> for MergeProposalStatus { type Target = PyAny; type Output = Bound<'py, Self::Target>; type Error = std::convert::Infallible; fn into_pyobject(self, py: Python<'py>) -> Result { Ok(self.to_string().into_pyobject(py).unwrap().into_any()) } } impl<'a, 'py> FromPyObject<'a, 'py> for MergeProposalStatus { type Error = PyErr; fn extract(ob: Borrowed<'a, 'py, PyAny>) -> PyResult { let status = ob.extract::()?; match status.as_str() { "all" => Ok(MergeProposalStatus::All), "open" => Ok(MergeProposalStatus::Open), "merged" => Ok(MergeProposalStatus::Merged), "closed" => Ok(MergeProposalStatus::Closed), _ => Err(PyValueError::new_err((format!( "Invalid merge proposal status: {}", status ),))), } } } /// A merge proposal (pull request) on a code hosting service. pub struct MergeProposal(Py); impl Clone for MergeProposal { fn clone(&self) -> Self { MergeProposal(Python::attach(|py| self.0.clone_ref(py))) } } impl From> for MergeProposal { fn from(obj: Py) -> Self { MergeProposal(obj) } } impl MergeProposal { /// Create a merge proposal reference from a URL. pub fn from_url(url: &url::Url) -> Result { get_proposal_by_url(url) } /// Reopens a previously closed merge proposal. pub fn reopen(&self) -> Result<(), crate::error::Error> { Python::attach(|py| { self.0.call_method0(py, "reopen")?; Ok(()) }) } /// Closes an open merge proposal without merging it. pub fn close(&self) -> Result<(), crate::error::Error> { Python::attach(|py| { self.0.call_method0(py, "close")?; Ok(()) }) } /// Returns the URL of the merge proposal. pub fn url(&self) -> Result { Python::attach(|py| { let url = self.0.getattr(py, "url")?; Ok(url.extract::(py)?.parse().unwrap()) }) } /// Checks if the merge proposal has been merged. pub fn is_merged(&self) -> Result { Python::attach(|py| { let is_merged = self.0.call_method0(py, "is_merged")?; is_merged.extract(py).map_err(Into::into) }) } /// Checks if the merge proposal has been closed without being merged. pub fn is_closed(&self) -> Result { Python::attach(|py| { let is_closed = self.0.call_method0(py, "is_closed")?; is_closed.extract(py).map_err(Into::into) }) } /// Retrieves the title of the merge proposal. pub fn get_title(&self) -> Result, crate::error::Error> { Python::attach(|py| { let title = self.0.call_method0(py, "get_title")?; title.extract(py).map_err(Into::into) }) } /// Sets the title of the merge proposal. pub fn set_title(&self, title: Option<&str>) -> Result<(), crate::error::Error> { Python::attach(|py| { self.0.call_method1(py, "set_title", (title,))?; Ok(()) }) } /// Retrieves the commit message associated with the merge proposal. pub fn get_commit_message(&self) -> Result, crate::error::Error> { Python::attach(|py| { let commit_message = self.0.call_method0(py, "get_commit_message")?; commit_message.extract(py).map_err(Into::into) }) } /// Sets the commit message for the merge proposal. pub fn set_commit_message( &self, commit_message: Option<&str>, ) -> Result<(), crate::error::Error> { Python::attach(|py| { self.0 .call_method1(py, "set_commit_message", (commit_message,))?; Ok(()) }) } /// Returns the URL of the target branch for this merge proposal. pub fn get_target_branch_url(&self) -> Result, crate::error::Error> { Python::attach(|py| { let target_branch_url = self.0.call_method0(py, "get_target_branch_url")?; target_branch_url .extract::(py)? .parse::() .map(Some) .map_err(Into::into) }) } /// Returns the URL of the source branch for this merge proposal. pub fn get_source_branch_url(&self) -> Result, crate::error::Error> { Python::attach(|py| { let source_branch_url = self.0.call_method0(py, "get_source_branch_url")?; source_branch_url .extract::(py)? .parse::() .map(Some) .map_err(Into::into) }) } /// Retrieves the description of the merge proposal. pub fn get_description(&self) -> Result, crate::error::Error> { Python::attach(|py| { let description = self.0.call_method0(py, "get_description")?; description.extract(py).map_err(Into::into) }) } /// Sets the description of the merge proposal. pub fn set_description(&self, description: Option<&str>) -> Result<(), crate::error::Error> { Python::attach(|py| { self.0.call_method1(py, "set_description", (description,))?; Ok(()) }) } /// Checks if the merge proposal can currently be merged. pub fn can_be_merged(&self) -> Result { Python::attach(|py| { let can_be_merged = self.0.call_method0(py, "can_be_merged")?; can_be_merged.extract(py).map_err(Into::into) }) } /// Checks if the merge proposal supports automatic merging. pub fn supports_auto_merge(&self) -> bool { Python::attach(|py| { self.0 .getattr(py, "supports_auto_merge") .unwrap() .extract(py) .unwrap() }) } /// Merges the merge proposal, optionally using automatic merge. /// /// The `auto` parameter determines whether to use automatic merging. pub fn merge(&self, auto: bool) -> Result<(), Error> { Python::attach(|py| { self.0.call_method1(py, "merge", (auto,))?; Ok(()) }) } /// Returns the web URL for viewing the merge proposal in a browser. pub fn get_web_url(&self) -> Result { Python::attach(|py| { let web_url = self.0.call_method0(py, "get_web_url")?; web_url .extract::(py)? .parse::() .map_err(Into::into) }) } /// Returns the username of the person who merged this proposal, if it has been merged. pub fn get_merged_by(&self) -> Result, crate::error::Error> { Python::attach(|py| { let merged_by = self.0.call_method0(py, "get_merged_by")?; merged_by.extract(py).map_err(Into::into) }) } /// Returns the date and time when this proposal was merged, if it has been merged. pub fn get_merged_at( &self, ) -> Result>, crate::error::Error> { Python::attach(|py| { let merged_at = self.0.call_method0(py, "get_merged_at")?; merged_at .extract::>>(py) .map_err(Into::into) }) } } #[pyclass] /// Builder for creating merge proposals. pub struct ProposalBuilder(Py, Py); impl ProposalBuilder { /// Sets the description for the merge proposal being built. pub fn description(self, description: &str) -> Self { Python::attach(|py| { self.1 .bind(py) .set_item("description", description) .unwrap(); }); self } /// Sets the labels for the merge proposal being built. pub fn labels(self, labels: &[&str]) -> Self { Python::attach(|py| { self.1.bind(py).set_item("labels", labels).unwrap(); }); self } /// Sets the reviewers for the merge proposal being built. pub fn reviewers(self, reviewers: &[&str]) -> Self { Python::attach(|py| { self.1.bind(py).set_item("reviewers", reviewers).unwrap(); }); self } /// Sets whether to allow collaboration for the merge proposal being built. pub fn allow_collaboration(self, allow_collaboration: bool) -> Self { Python::attach(|py| { self.1 .bind(py) .set_item("allow_collaboration", allow_collaboration) .unwrap(); }); self } /// Sets the title for the merge proposal being built. pub fn title(self, title: &str) -> Self { Python::attach(|py| { self.1.bind(py).set_item("title", title).unwrap(); }); self } /// Sets the commit message for the merge proposal being built. pub fn commit_message(self, commit_message: &str) -> Self { Python::attach(|py| { self.1 .bind(py) .set_item("commit_message", commit_message) .unwrap(); }); self } /// Sets whether the merge proposal is a work in progress. pub fn work_in_progress(self, work_in_progress: bool) -> Self { Python::attach(|py| { self.1 .bind(py) .set_item("work_in_progress", work_in_progress) .unwrap(); }); self } /// Creates the merge proposal with all configured properties. pub fn build(self) -> Result { Python::attach(|py| { let kwargs = self.1; let proposal = self .0 .call_method(py, "create_proposal", (), Some(kwargs.bind(py)))?; Ok(MergeProposal::from(proposal)) }) } } impl Forge { fn to_object(&self) -> &Py { &self.0 } /// Retrieves a merge proposal by its URL. pub fn get_proposal_by_url( &self, url: &url::Url, ) -> Result { Python::attach(|py| { let proposal = self .0 .call_method1(py, "get_proposal_by_url", (url.as_str(),))?; Ok(MergeProposal::from(proposal)) }) } /// Returns the web URL for a given branch on this forge. pub fn get_web_url(&self, branch: &dyn PyBranch) -> Result { Python::attach(|py| { let forge_obj = self.to_object(); let branch_obj = branch.to_object(py); let url = forge_obj .call_method1(py, "get_web_url", (&branch_obj,))? .extract::(py) .unwrap(); Ok(url.parse::().unwrap()) }) } /// Returns the base URL of this forge. pub fn base_url(&self) -> url::Url { Python::attach(|py| { let base_url = self.0.getattr(py, "base_url").unwrap(); base_url.extract::(py).unwrap().parse().unwrap() }) } /// Returns the kind of forge (e.g., GitHub, GitLab). pub fn forge_kind(&self) -> String { Python::attach(|py| self.0.bind(py).get_type().name().unwrap().to_string()) } /// Returns the name of the forge. pub fn forge_name(&self) -> String { Python::attach(|py| self.0.bind(py).get_type().name().unwrap().to_string()) } /// Returns the format used for merge proposal descriptions on this forge. pub fn merge_proposal_description_format(&self) -> String { Python::attach(|py| { let merge_proposal_description_format = self .to_object() .getattr(py, "merge_proposal_description_format") .unwrap(); merge_proposal_description_format.extract(py).unwrap() }) } /// Checks if this forge supports setting commit messages for merge proposals. pub fn supports_merge_proposal_commit_message(&self) -> bool { Python::attach(|py| { let supports_merge_proposal_commit_message = self .to_object() .getattr(py, "supports_merge_proposal_commit_message") .unwrap(); supports_merge_proposal_commit_message.extract(py).unwrap() }) } /// Checks if this forge supports setting titles for merge proposals. pub fn supports_merge_proposal_title(&self) -> bool { Python::attach(|py| { let supports_merge_proposal_title = self .to_object() .getattr(py, "supports_merge_proposal_title") .unwrap(); supports_merge_proposal_title.extract(py).unwrap() }) } /// Checks if this forge supports adding labels to merge proposals. pub fn supports_merge_proposal_labels(&self) -> bool { Python::attach(|py| { let supports_merge_proposal_labels = self .to_object() .getattr(py, "supports_merge_proposal_labels") .unwrap(); supports_merge_proposal_labels.extract(py).unwrap() }) } /// Creates a proposal builder for a merge proposal from one branch to another. pub fn get_proposer( &self, from_branch: &dyn PyBranch, to_branch: &dyn PyBranch, ) -> Result { Python::attach(|py| { let from_branch_obj = from_branch.to_object(py); let to_branch_obj = to_branch.to_object(py); Ok(ProposalBuilder( self.0 .call_method1(py, "get_proposer", (from_branch_obj, to_branch_obj))?, PyDict::new(py).into(), )) }) } /// Returns an iterator over merge proposals owned by the current user. pub fn iter_my_proposals( &self, status: Option, author: Option, ) -> Result, Error> { let ret: Vec = Python::attach(|py| -> Result, Error> { Ok(self .0 .call_method(py, "iter_my_proposals", (status, author), None)? .bind(py) .try_iter() .unwrap() .map(|proposal| MergeProposal::from(proposal.unwrap().unbind())) .collect()) })?; Ok(ret.into_iter()) } /// Gets a branch derived from a main branch with the given name and optional owner. pub fn get_derived_branch( &self, main_branch: &dyn PyBranch, name: &str, owner: Option<&str>, preferred_schemes: Option<&[&str]>, ) -> Result, crate::error::Error> { Python::attach(|py| { let kwargs = PyDict::new(py); if let Some(owner) = owner { kwargs.set_item("owner", owner)?; } if let Some(preferred_schemes) = preferred_schemes { kwargs.set_item("preferred_schemes", preferred_schemes)?; } let branch = self.0.call_method( py, "get_derived_branch", (main_branch.to_object(py), name), Some(&kwargs), )?; Ok(Box::new(GenericBranch::from(branch)) as Box) }) } /// Returns an iterator over merge proposals from one branch to another. pub fn iter_proposals( &self, source_branch: &dyn PyBranch, target_branch: &dyn PyBranch, status: MergeProposalStatus, ) -> Result, crate::error::Error> { Python::attach(move |py| { let kwargs = PyDict::new(py); let source_branch_obj = source_branch.to_object(py); let target_branch_obj = target_branch.to_object(py); kwargs.set_item("status", status.to_string())?; let proposal_iter: Py = self.0.call_method( py, "iter_proposals", (&source_branch_obj, &target_branch_obj), Some(&kwargs), )?; let mut ret = Vec::new(); loop { match proposal_iter.call_method0(py, "__next__") { Ok(proposal) => { ret.push(MergeProposal::from(proposal)); } Err(e) => { if e.is_instance_of::(py) { break; } else { return Err(e.into()); } } } } Ok(ret.into_iter()) }) } /// Publishes a derived branch and returns the branch and its URL. pub fn publish_derived( &self, local_branch: &dyn PyBranch, base_branch: &dyn PyBranch, name: &str, overwrite: Option, owner: Option<&str>, revision_id: Option<&RevisionId>, tag_selector: Option bool>>, ) -> Result<(Box, url::Url), crate::error::Error> { Python::attach(|py| { let kwargs = PyDict::new(py); let local_branch_obj = local_branch.to_object(py); let base_branch_obj = base_branch.to_object(py); let forge_obj = self.to_object(); kwargs.set_item("local_branch", local_branch_obj)?; kwargs.set_item("base_branch", base_branch_obj)?; kwargs.set_item("name", name)?; if let Some(overwrite) = overwrite { kwargs.set_item("overwrite", overwrite)?; } if let Some(owner) = owner { kwargs.set_item("owner", owner)?; } if let Some(revision_id) = revision_id { kwargs.set_item("revision_id", revision_id.clone())?; } if let Some(tag_selector) = tag_selector { kwargs.set_item("tag_selector", py_tag_selector(py, tag_selector)?)?; } let (b, u): (Py, String) = forge_obj .call_method(py, "publish_derived", (), Some(&kwargs))? .extract(py)?; Ok(( Box::new(GenericBranch::from(b)) as Box, u.parse::().unwrap(), )) }) } /// Returns the URL for pushing to a branch on this forge. pub fn get_push_url(&self, branch: &dyn PyBranch) -> url::Url { Python::attach(|py| { let forge_obj = self.to_object(); let branch_obj = branch.to_object(py); let url = forge_obj .call_method1(py, "get_push_url", (&branch_obj,)) .unwrap() .extract::(py) .unwrap(); url.parse::().unwrap() }) } /// Returns the URL for a user's profile on this forge. pub fn get_user_url(&self, user: &str) -> Result { Python::attach(|py| { let url = self .to_object() .call_method1(py, "get_user_url", (user,)) .unwrap() .extract::(py) .unwrap(); Ok(url.parse::().unwrap()) }) } /// Returns the username of the currently authenticated user. pub fn get_current_user(&self) -> Result, crate::error::Error> { Python::attach(|py| { let user = self .to_object() .call_method0(py, "get_current_user") .unwrap() .extract::>(py) .unwrap(); Ok(user) }) } } impl std::fmt::Debug for Forge { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { let mut s = f.debug_struct("Forge"); if let Ok(base_url) = self.base_url().to_string().parse::() { s.field("base_url", &base_url); } s.finish() } } impl std::fmt::Display for Forge { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { let base_url = self.base_url(); write!(f, "{}", base_url) } } impl<'a, 'py> FromPyObject<'a, 'py> for Forge { type Error = PyErr; fn extract(ob: Borrowed<'a, 'py, PyAny>) -> PyResult { Ok(Forge(ob.to_owned().unbind())) } } impl<'py> IntoPyObject<'py> for Forge { type Target = PyAny; type Output = Bound<'py, Self::Target>; type Error = std::convert::Infallible; fn into_pyobject(self, py: Python<'py>) -> Result { Ok(self.0.into_bound(py)) } } /// Returns the forge associated with the given branch. pub fn get_forge(branch: &dyn PyBranch) -> Result { Python::attach(|py| { let m = py.import("breezy.forge").unwrap(); let forge = m.call_method1("get_forge", (branch.to_object(py),))?; Ok(Forge(forge.unbind())) }) } /// Returns a forge instance for the given hostname. pub fn get_forge_by_hostname(hostname: &str) -> Result { Python::attach(|py| { let m = py.import("breezy.forge").unwrap(); let forge = m.call_method1("get_forge_by_hostname", (hostname,))?; Ok(Forge(forge.unbind())) }) } /// Extracts a title from a description text. pub fn determine_title(description: &str) -> Result { Python::attach(|py| { let m = py.import("breezy.forge").unwrap(); let title = match m.call_method1("determine_title", (description,)) { Ok(title) => title, Err(e) => return Err(e.to_string()), }; match title.extract::() { Ok(title) => Ok(title), Err(e) => Err(e.to_string()), } }) } /// Returns an iterator over all available forge instances. pub fn iter_forge_instances() -> impl Iterator { let ret = Python::attach(|py| { let m = py.import("breezy.forge").unwrap(); let f = m.getattr("iter_forge_instances").unwrap(); let instances = f.call0().unwrap(); instances .try_iter() .unwrap() .map(|i| Forge(i.unwrap().unbind())) .collect::>() }); ret.into_iter() } /// Creates a new project on a forge with the given name and optional summary. pub fn create_project(name: &str, summary: Option<&str>) -> Result<(), Error> { Python::attach(|py| { let m = py.import("breezy.forge").unwrap(); m.call_method1("create_project", (name, summary))?; Ok(()) }) } /// Retrieves a merge proposal by its URL. pub fn get_proposal_by_url(url: &url::Url) -> Result { Python::attach(|py| { let m = py.import("breezy.forge").unwrap(); let proposal = m.call_method1("get_proposal_by_url", (url.to_string(),))?; Ok(MergeProposal::from(proposal.unbind())) }) } #[cfg(test)] mod tests { #[test] fn test_determine_title() { let description = "This is a test description"; let title = super::determine_title(description).unwrap(); assert_eq!(title, "This is a test description"); } #[test] fn test_determine_title_invalid() { let description = ""; assert_eq!( "ValueError: ", super::determine_title(description).unwrap_err() ); } } breezyshim-0.7.5/src/fossil.rs000064400000000000000000000041471046102023000144600ustar 00000000000000//! Support for detecting Fossil repositories. //! //! This module provides a prober for detecting Fossil repositories, but //! currently does not provide any additional functionality. use pyo3::exceptions::PyModuleNotFoundError; use pyo3::prelude::*; /// A prober that can detect Fossil repositories. pub struct RemoteFossilProber(Py); impl RemoteFossilProber { /// Create a new RemoteFossilProber, returning None if the Fossil plugin is not available. pub fn new() -> Option { Python::attach(|py| { let m = match py.import("breezy.plugins.fossil") { Ok(m) => m, Err(e) => { if e.is_instance_of::(py) { return None; } else { e.print_and_set_sys_last_vars(py); panic!("Failed to import breezy.plugins.fossil"); } } }; let prober = m .getattr("RemoteFossilProber") .expect("Failed to get RemoteFossilProber"); Some(Self(prober.unbind())) }) } } impl<'a, 'py> FromPyObject<'a, 'py> for RemoteFossilProber { type Error = PyErr; fn extract(obj: Borrowed<'a, 'py, PyAny>) -> PyResult { Ok(Self(obj.to_owned().unbind())) } } impl<'py> IntoPyObject<'py> for RemoteFossilProber { type Target = PyAny; type Output = Bound<'py, Self::Target>; type Error = std::convert::Infallible; fn into_pyobject(self, py: Python<'py>) -> Result { Ok(self.0.into_bound(py)) } } impl std::fmt::Debug for RemoteFossilProber { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { f.write_fmt(format_args!("RemoteFossilProber({:?})", self.0)) } } impl crate::controldir::PyProber for RemoteFossilProber { fn to_object(&self, py: Python) -> Py { self.0.clone_ref(py) } } #[cfg(test)] mod tests { use super::*; #[test] fn test_remote_fossil_prober() { let _ = RemoteFossilProber::new(); } } breezyshim-0.7.5/src/git.rs000064400000000000000000000222611046102023000137410ustar 00000000000000//! Git version control system support. use pyo3::exceptions::PyModuleNotFoundError; use pyo3::prelude::*; /// A prober that can detect remote Git repositories. pub struct RemoteGitProber(Py); /// The SHA1 hash consisting of all zeros, representing the absence of a commit in Git. pub const ZERO_SHA: &[u8] = b"0000000000000000000000000000000000000000"; impl RemoteGitProber { /// Create a new RemoteGitProber, returning None if the Git plugin is not available. pub fn new() -> Option { Python::attach(|py| { let m = match py.import("breezy.git") { Ok(m) => m, Err(e) => { if e.is_instance_of::(py) { return None; } else { e.print_and_set_sys_last_vars(py); panic!("Failed to import breezy.git"); } } }; let prober = m .getattr("RemoteGitProber") .expect("Failed to get GitProber"); Some(Self(prober.unbind())) }) } } impl<'a, 'py> FromPyObject<'a, 'py> for RemoteGitProber { type Error = PyErr; fn extract(obj: Borrowed<'a, 'py, PyAny>) -> PyResult { Ok(Self(obj.to_owned().unbind())) } } impl<'py> IntoPyObject<'py> for RemoteGitProber { type Target = PyAny; type Output = Bound<'py, Self::Target>; type Error = std::convert::Infallible; fn into_pyobject(self, py: Python<'py>) -> Result { Ok(self.0.into_bound(py)) } } impl std::fmt::Debug for RemoteGitProber { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { f.write_fmt(format_args!("RemoteGitProber({:?})", self.0)) } } impl crate::controldir::PyProber for RemoteGitProber { fn to_object(&self, py: Python) -> Py { self.0.clone_ref(py) } } /// Format for bare local Git repositories. pub struct BareLocalGitControlDirFormat(Py); impl BareLocalGitControlDirFormat { /// Create a new BareLocalGitControlDirFormat. pub fn new() -> Self { Python::attach(|py| { let m = py .import("breezy.git") .expect("Failed to import breezy.git"); let format = m .getattr("BareLocalGitControlDirFormat") .expect("Failed to get BareLocalGitControlDirFormat"); Self( format .call0() .expect("Failed to create BareLocalGitControlDirFormat") .unbind(), ) }) } } impl<'py> IntoPyObject<'py> for BareLocalGitControlDirFormat { type Target = PyAny; type Output = Bound<'py, Self::Target>; type Error = std::convert::Infallible; fn into_pyobject(self, py: Python<'py>) -> Result { Ok(self.0.into_bound(py)) } } impl crate::controldir::AsFormat for BareLocalGitControlDirFormat { fn as_format(&self) -> Option { Some(Python::attach(|py| { crate::controldir::ControlDirFormat::from(self.0.clone_ref(py)) })) } } /// Retrieve the Git committer information from the working tree's repository. pub fn get_committer(working_tree: &dyn crate::workingtree::PyWorkingTree) -> Option { use crate::branch::Branch; use crate::repository::PyRepository; pyo3::Python::attach(|py| { let repo = working_tree.branch().repository(); let git = match repo.to_object(py).getattr(py, "_git") { Ok(x) => Some(x), Err(e) if e.is_instance_of::(py) => None, Err(e) => { return Err(e); } }; if let Some(git) = git { let cs = git.call_method0(py, "get_config_stack")?; let mut user = std::env::var("GIT_COMMITTER_NAME").ok(); let mut email = std::env::var("GIT_COMMITTER_EMAIL").ok(); if user.is_none() { match cs.call_method1(py, "get", (("user",), "name")) { Ok(x) => { user = Some( std::str::from_utf8(x.extract::<&[u8]>(py)?) .unwrap() .to_string(), ); } Err(e) if e.is_instance_of::(py) => { // Ignore } Err(e) => { return Err(e); } }; } if email.is_none() { match cs.call_method1(py, "get", (("user",), "email")) { Ok(x) => { email = Some( std::str::from_utf8(x.extract::<&[u8]>(py)?) .unwrap() .to_string(), ); } Err(e) if e.is_instance_of::(py) => { // Ignore } Err(e) => { return Err(e); } }; } if let (Some(user), Some(email)) = (user, email) { return Ok(Some(format!("{} <{}>", user, email))); } let gs = crate::config::global_stack().unwrap(); Ok(gs .get("email")? .map(|email| email.extract::(py).unwrap())) } else { Ok(None) } }) .unwrap() } #[cfg(test)] mod tests { use super::*; use crate::controldir::AsFormat; #[test] fn test_zero_sha() { assert_eq!(ZERO_SHA.len(), 40); assert_eq!(ZERO_SHA, b"0000000000000000000000000000000000000000"); } #[test] fn test_remote_git_prober_new() { // This may return None if git plugin is not available let _prober = RemoteGitProber::new(); } #[test] fn test_remote_git_prober_debug() { if let Some(prober) = RemoteGitProber::new() { let debug_str = format!("{:?}", prober); assert!(debug_str.contains("RemoteGitProber")); } } #[test] fn test_bare_local_git_control_dir_format() { // This test will only pass if git plugin is available let result = std::panic::catch_unwind(|| BareLocalGitControlDirFormat::new()); if let Ok(format) = result { let _opt_format = format.as_format(); } } #[test] fn test_remote_git_prober_into_pyobject() { if let Some(prober) = RemoteGitProber::new() { Python::attach(|py| { let _pyobj = prober.into_pyobject(py).unwrap(); }); } } #[test] fn test_bare_local_git_into_pyobject() { let result = std::panic::catch_unwind(|| BareLocalGitControlDirFormat::new()); if let Ok(format) = result { Python::attach(|py| { let _pyobj = format.into_pyobject(py).unwrap(); }); } } #[serial_test::serial] #[test] // Ignored on Windows due to dulwich permission errors when creating .git directories in CI #[cfg_attr(target_os = "windows", ignore)] fn test_git_env() { let td = tempfile::tempdir().unwrap(); let cd = crate::controldir::create_standalone_workingtree(td.path(), "git").unwrap(); let old_name = std::env::var("GIT_COMMITTER_NAME").ok(); let old_email = std::env::var("GIT_COMMITTER_EMAIL").ok(); std::env::set_var("GIT_COMMITTER_NAME", "Some Git Committer"); std::env::set_var("GIT_COMMITTER_EMAIL", "committer@example.com"); let committer = get_committer(&cd).unwrap(); if let Some(old_name) = old_name { std::env::set_var("GIT_COMMITTER_NAME", old_name); } else { std::env::remove_var("GIT_COMMITTER_NAME"); } if let Some(old_email) = old_email { std::env::set_var("GIT_COMMITTER_EMAIL", old_email); } else { std::env::remove_var("GIT_COMMITTER_EMAIL"); } assert_eq!( "Some Git Committer ", committer.as_str() ); // Drop cd before td cleanup to release Python file handles (needed on Windows) drop(cd); } #[serial_test::serial] #[test] // Ignored on Windows due to dulwich permission errors when creating .git directories in CI #[cfg_attr(target_os = "windows", ignore)] fn test_git_config() { let td = tempfile::tempdir().unwrap(); let cd = crate::controldir::create_standalone_workingtree(td.path(), "git").unwrap(); std::fs::write( td.path().join(".git/config"), b"[user]\nname = Some Git Committer\nemail = other@example.com", ) .unwrap(); assert_eq!( get_committer(&cd).unwrap(), "Some Git Committer " ); // Drop cd before td cleanup to release Python file handles (needed on Windows) drop(cd); } } breezyshim-0.7.5/src/github.rs000064400000000000000000000013711046102023000144370ustar 00000000000000//! Basic support for interacting with GitHub. use pyo3::prelude::*; /// Retrieve a GitHub authentication token. pub fn retrieve_github_token() -> String { Python::attach(|py| { let m = py.import("breezy.plugins.github.forge").unwrap(); let token = m.call_method0("retrieve_github_token").unwrap(); token.extract().unwrap() }) } /// Login to GitHub using saved credentials. pub fn login() -> PyResult<()> { Python::attach(|py| { let m = py.import("breezy.plugins.github.cmds").unwrap(); let cmd = m.getattr("cmd_github_login").unwrap(); let cmd_gl = cmd.call0().unwrap(); cmd_gl.call_method0("_setup_outf").unwrap(); cmd_gl.call_method0("run").unwrap(); Ok(()) }) } breezyshim-0.7.5/src/gitlab.rs000064400000000000000000000007601046102023000144200ustar 00000000000000//! Basic support for interacting with GitLab use pyo3::prelude::*; /// Login to GitLab using saved credentials. pub fn login(url: &url::Url) -> PyResult<()> { Python::attach(|py| { let m = py.import("breezy.plugins.gitlab.cmds").unwrap(); let cmd = m.getattr("cmd_gitlab_login").unwrap(); let cmd_gl = cmd.call0().unwrap(); cmd_gl.call_method0("_setup_outf").unwrap(); cmd_gl.call_method1("run", (url.as_str(),)).unwrap(); Ok(()) }) } breezyshim-0.7.5/src/gpg.rs000064400000000000000000000176561046102023000137470ustar 00000000000000//! GPG related functions and types. use crate::repository::PyRepository; use crate::RevisionId; use pyo3::import_exception; use pyo3::prelude::*; #[derive(Debug)] /// Errors that can occur when working with GPG. pub enum Error { /// GPG is not installed on the system. GPGNotInstalled, } #[derive(Debug)] /// GPG signing modes. pub enum Mode { /// Normal signing mode. Normal, /// Detached signature mode. Detach, /// Clear signature mode. Clear, } #[derive(Debug)] /// Status of a GPG signature verification. pub enum Status { /// Signature is valid. Valid, /// Signature key is missing from the keyring. KeyMissing(String), /// Signature with the specified key is not valid. NotValid(String), /// Content is not signed. NotSigned, /// Signature key has expired. Expired(String), } import_exception!(breezy.gpg, GPGNotInstalled); impl From for Error { fn from(e: PyErr) -> Self { Python::attach(|py| { if e.is_instance_of::(py) { Error::GPGNotInstalled } else { panic!("unexpected exception: {:?}", e) } }) } } /// Strategy for handling GPG signatures. pub struct GPGStrategy(Py); impl GPGStrategy { fn to_object(&self) -> &Py { &self.0 } /// Create a new GPG strategy with the given branch configuration. pub fn new(branch_config: &crate::config::BranchConfig) -> Self { Python::attach(|py| { let gpg = PyModule::import(py, "breezy.gpg").unwrap(); let gpg_strategy = gpg.getattr("GPGStrategy").unwrap(); let branch_config = branch_config.clone().into_pyobject(py).unwrap().unbind(); let strategy = gpg_strategy.call1((branch_config,)).unwrap(); GPGStrategy(strategy.unbind()) }) } /// Set the GPG keys that are acceptable for validating signatures. pub fn set_acceptable_keys(&self, keys: &[String]) { Python::attach(|py| { self.0 .call_method1(py, "set_acceptable_keys", (keys.join(","),)) .unwrap(); }) } } impl<'py> IntoPyObject<'py> for GPGStrategy { type Target = PyAny; type Output = Bound<'py, Self::Target>; type Error = std::convert::Infallible; fn into_pyobject(self, py: Python<'py>) -> Result { Ok(self.0.into_bound(py)) } } impl<'a, 'py> FromPyObject<'a, 'py> for GPGStrategy { type Error = PyErr; fn extract(ob: Borrowed<'a, 'py, PyAny>) -> PyResult { Ok(GPGStrategy(ob.to_owned().unbind())) } } #[derive(Debug)] /// Result of verifying a GPG signature. pub enum VerificationResult { /// Signature is valid with the specified key. Valid(String), /// Signature uses a key that is missing from the keyring. KeyMissing(String), /// Signature with the given key is not valid. NotValid(String), /// Content is not signed with a GPG signature. NotSigned, /// Signature is from an expired key. Expired(String), } impl VerificationResult { /// Returns the key string for the signature if available. pub fn key(&self) -> Option<&str> { match self { VerificationResult::Valid(key) => Some(key), VerificationResult::KeyMissing(key) => Some(key), VerificationResult::NotValid(key) => Some(key), VerificationResult::Expired(key) => Some(key), _ => None, } } /// Check if the verification result indicates a valid signature. pub fn is_valid(&self) -> bool { matches!(self, VerificationResult::Valid(_)) } /// Check if the verification result indicates a missing key. pub fn is_key_missing(&self) -> bool { matches!(self, VerificationResult::KeyMissing(_)) } /// Check if the verification result indicates an invalid signature. pub fn is_not_valid(&self) -> bool { matches!(self, VerificationResult::NotValid(_)) } /// Check if the verification result indicates the content is not signed. pub fn is_not_signed(&self) -> bool { matches!(self, VerificationResult::NotSigned) } /// Check if the verification result indicates an expired key. pub fn is_expired(&self) -> bool { matches!(self, VerificationResult::Expired(_)) } } /// Bulk verify GPG signatures for a set of revisions. /// /// # Arguments /// /// * `repository` - The repository containing the revisions /// * `revids` - List of revision IDs to verify signatures for /// * `strategy` - GPG strategy to use for verification /// /// # Returns /// /// A vector of tuples containing revision IDs and their verification results pub fn bulk_verify_signatures( repository: &R, revids: &[&RevisionId], strategy: &GPGStrategy, ) -> Result, Error> { Python::attach(|py| { let gpg = PyModule::import(py, "breezy.gpg").unwrap(); let bulk_verify_signatures = gpg.getattr("bulk_verify_signatures").unwrap(); let r = bulk_verify_signatures .call1(( repository.to_object(py), revids .iter() .map(|r| (*r).clone().into_pyobject(py).unwrap()) .collect::>(), strategy.to_object(), )) .map_err(|e| -> Error { e.into() }) .unwrap(); let (_count, result, _all_verifiable) = r .extract::<(Py, Vec<(RevisionId, isize, String)>, bool)>() .unwrap(); let result: Vec<(RevisionId, VerificationResult)> = result .into_iter() .map(|(revid, status, key)| { let status = match status { 0 => VerificationResult::Valid(key), 1 => VerificationResult::KeyMissing(key), 2 => VerificationResult::NotValid(key), 3 => VerificationResult::NotSigned, 4 => VerificationResult::Expired(key), _ => panic!("unexpected status: {}", status), }; (revid, status) }) .collect::>(); Ok(result) }) } /// Context for interacting with GPG. pub struct GPGContext(Py); /// Represents a GPG key. pub struct GPGKey { /// Fingerprint of the GPG key. pub fpr: String, } impl<'a, 'py> FromPyObject<'a, 'py> for GPGKey { type Error = PyErr; fn extract(ob: Borrowed<'a, 'py, PyAny>) -> PyResult { Ok(GPGKey { fpr: ob.getattr("fpr").unwrap().extract().unwrap(), }) } } impl GPGContext { /// Create a new GPG context. pub fn new() -> Self { Python::attach(|py| { let gpg = PyModule::import(py, "gpg").unwrap(); let gpg_context = gpg.getattr("Context").unwrap(); let context = gpg_context.call0().unwrap(); GPGContext(context.unbind()) }) } /// List GPG keys. /// /// # Arguments /// /// * `secret` - If true, list only secret keys. Otherwise, list all keys. /// /// # Returns /// /// A vector of GPG keys. pub fn keylist(&self, secret: bool) -> Vec { Python::attach(|py| { self.0 .call_method1(py, "keylist", (secret,)) .unwrap() .extract::>(py) .unwrap() }) } /// Export the minimal form of a GPG key. /// /// # Arguments /// /// * `key` - The key ID or fingerprint to export /// /// # Returns /// /// The exported key data as a byte vector. pub fn key_export_minimal(&self, key: &str) -> Vec { Python::attach(|py| { self.0 .call_method1(py, "key_export_minimal", (key,)) .unwrap() .extract::>(py) .unwrap() }) } } breezyshim-0.7.5/src/graph/tests.rs000064400000000000000000000143171046102023000154240ustar 00000000000000use crate::graph::{Graph, GraphNode, Key}; use crate::revisionid::RevisionId; use pyo3::prelude::*; #[test] fn test_graph_node_trait_for_revision_id() { Python::attach(|py| { // Test RevisionId GraphNode implementation let rev_id = RevisionId::from(b"test-revision-id".to_vec()); let py_obj = rev_id.to_pyobject(py).unwrap(); // Should be bytes assert!(py_obj.is_instance_of::()); // Round trip let rev_id2 = RevisionId::from_pyobject(&py_obj).unwrap(); assert_eq!(rev_id, rev_id2); }); } #[test] fn test_graph_node_trait_for_key() { Python::attach(|py| { // Test Key GraphNode implementation let key = Key::from(vec!["file.txt".to_string(), "rev1".to_string()]); let py_obj = key.to_pyobject(py).unwrap(); // Should be a tuple assert!(py_obj.is_instance_of::()); // Round trip let key2 = Key::from_pyobject(&py_obj).unwrap(); assert_eq!(key, key2); }); } fn create_test_graph() -> Graph { Python::attach(|py| { // Create a mock graph for testing let graph_module = py.import("breezy.graph").unwrap(); let dict_parents_provider = graph_module.getattr("DictParentsProvider").unwrap(); // Create a simple graph structure: // null -> rev1 -> rev2 -> rev3 // \-> rev4 let parents_dict = pyo3::types::PyDict::new(py); parents_dict .set_item(b"rev1".as_slice(), pyo3::types::PyTuple::empty(py)) .unwrap(); parents_dict .set_item( b"rev2".as_slice(), pyo3::types::PyTuple::new(py, &[pyo3::types::PyBytes::new(py, b"rev1")]).unwrap(), ) .unwrap(); parents_dict .set_item( b"rev3".as_slice(), pyo3::types::PyTuple::new(py, &[pyo3::types::PyBytes::new(py, b"rev2")]).unwrap(), ) .unwrap(); parents_dict .set_item( b"rev4".as_slice(), pyo3::types::PyTuple::new(py, &[pyo3::types::PyBytes::new(py, b"rev1")]).unwrap(), ) .unwrap(); let parents_provider = dict_parents_provider.call1((parents_dict,)).unwrap(); let graph_class = graph_module.getattr("Graph").unwrap(); let graph = graph_class.call1((parents_provider,)).unwrap(); Graph::from(graph.unbind()) }) } #[test] fn test_is_ancestor() { crate::init(); let graph = create_test_graph(); let rev1 = RevisionId::from(b"rev1".to_vec()); let rev2 = RevisionId::from(b"rev2".to_vec()); let rev3 = RevisionId::from(b"rev3".to_vec()); let rev4 = RevisionId::from(b"rev4".to_vec()); // Test ancestor relationships assert!(graph.is_ancestor(&rev1, &rev2).unwrap()); assert!(graph.is_ancestor(&rev1, &rev3).unwrap()); assert!(graph.is_ancestor(&rev2, &rev3).unwrap()); assert!(graph.is_ancestor(&rev1, &rev4).unwrap()); // Test non-ancestor relationships assert!(!graph.is_ancestor(&rev2, &rev1).unwrap()); assert!(!graph.is_ancestor(&rev3, &rev1).unwrap()); assert!(!graph.is_ancestor(&rev4, &rev2).unwrap()); assert!(!graph.is_ancestor(&rev2, &rev4).unwrap()); } #[test] fn test_is_between() { crate::init(); let graph = create_test_graph(); let rev1 = RevisionId::from(b"rev1".to_vec()); let rev2 = RevisionId::from(b"rev2".to_vec()); let rev3 = RevisionId::from(b"rev3".to_vec()); // rev2 is between rev1 and rev3 assert!(graph.is_between(&rev2, &rev1, &rev3).unwrap()); // rev1 is not between rev2 and rev3 assert!(!graph.is_between(&rev1, &rev2, &rev3).unwrap()); } #[test] fn test_iter_lefthand_ancestry() { crate::init(); let graph = create_test_graph(); let rev3 = RevisionId::from(b"rev3".to_vec()); let rev1 = RevisionId::from(b"rev1".to_vec()); // Get ancestry from rev3 let ancestry: Vec<_> = graph .iter_lefthand_ancestry(&rev3, None) .unwrap() .collect::, _>>() .unwrap(); // Should contain rev3, rev2, rev1 in that order assert_eq!(ancestry.len(), 3); assert_eq!(ancestry[0], RevisionId::from(b"rev3".to_vec())); assert_eq!(ancestry[1], RevisionId::from(b"rev2".to_vec())); assert_eq!(ancestry[2], RevisionId::from(b"rev1".to_vec())); // Test with stop revision let ancestry_with_stop: Vec<_> = graph .iter_lefthand_ancestry(&rev3, Some(&[rev1])) .unwrap() .collect::, _>>() .unwrap(); // Should stop before rev1 assert_eq!(ancestry_with_stop.len(), 2); assert_eq!(ancestry_with_stop[0], RevisionId::from(b"rev3".to_vec())); assert_eq!(ancestry_with_stop[1], RevisionId::from(b"rev2".to_vec())); } #[test] fn test_heads() { crate::init(); let graph = create_test_graph(); let rev1 = RevisionId::from(b"rev1".to_vec()); let rev2 = RevisionId::from(b"rev2".to_vec()); let rev3 = RevisionId::from(b"rev3".to_vec()); let rev4 = RevisionId::from(b"rev4".to_vec()); // Heads of [rev1, rev2, rev3] should be [rev3] let heads = graph.heads(&[rev1.clone(), rev2, rev3.clone()]).unwrap(); assert_eq!(heads.len(), 1); assert!(heads.contains(&rev3)); // Heads of [rev1, rev3, rev4] should be [rev3, rev4] let heads2 = graph.heads(&[rev1, rev3.clone(), rev4.clone()]).unwrap(); assert_eq!(heads2.len(), 2); assert!(heads2.contains(&rev3)); assert!(heads2.contains(&rev4)); } #[test] fn test_get_parent_map() { crate::init(); let graph = create_test_graph(); let rev1 = RevisionId::from(b"rev1".to_vec()); let rev2 = RevisionId::from(b"rev2".to_vec()); let rev3 = RevisionId::from(b"rev3".to_vec()); let rev4 = RevisionId::from(b"rev4".to_vec()); let parent_map = graph .get_parent_map(&[rev1.clone(), rev2.clone(), rev3.clone(), rev4.clone()]) .unwrap(); // Check parent relationships assert_eq!(parent_map.get(&rev1).unwrap().len(), 0); // rev1 has no parents assert_eq!(parent_map.get(&rev2).unwrap(), &vec![rev1.clone()]); assert_eq!(parent_map.get(&rev3).unwrap(), &vec![rev2]); assert_eq!(parent_map.get(&rev4).unwrap(), &vec![rev1]); } breezyshim-0.7.5/src/graph.rs000064400000000000000000000553231046102023000142640ustar 00000000000000//! Graph traversal operations on revision graphs. use crate::revisionid::RevisionId; use pyo3::exceptions::PyStopIteration; use pyo3::prelude::*; use pyo3::types::{PyFrozenSet, PyIterator, PyTuple}; use std::collections::HashMap; use std::hash::Hash; /// Trait for types that can be used as nodes in a graph. /// /// This trait allows graph operations to work with any type that can be /// converted to a Python object and compared for equality. pub trait GraphNode: Eq + Hash + Clone { /// Convert this node to a Python object representation. fn to_pyobject<'py>(&self, py: Python<'py>) -> PyResult>; /// Create a node from a Python object. fn from_pyobject(obj: &Bound) -> PyResult; } /// Represents a graph of revisions. /// /// This struct provides methods for traversing and querying relationships /// between revisions in a version control repository. pub struct Graph(Py); impl<'py> IntoPyObject<'py> for Graph { type Target = PyAny; type Output = Bound<'py, Self::Target>; type Error = std::convert::Infallible; fn into_pyobject(self, py: Python<'py>) -> Result { Ok(self.0.into_bound(py)) } } impl<'a, 'py> FromPyObject<'a, 'py> for Graph { type Error = PyErr; fn extract(ob: Borrowed<'a, 'py, PyAny>) -> PyResult { Ok(Graph(ob.to_owned().unbind())) } } impl From> for Graph { fn from(ob: Py) -> Self { Graph(ob) } } /// Implement GraphNode for RevisionId impl GraphNode for RevisionId { fn to_pyobject<'py>(&self, py: Python<'py>) -> PyResult> { Ok(self.as_bytes().into_pyobject(py)?.into_any()) } fn from_pyobject(obj: &Bound) -> PyResult { let bytes: Vec = obj.extract()?; Ok(RevisionId::from(bytes)) } } struct NodeIter(Py, std::marker::PhantomData); impl Iterator for NodeIter { type Item = Result; fn next(&mut self) -> Option { Python::attach(|py| match self.0.call_method0(py, "__next__") { Ok(item) => match T::from_pyobject(item.bind(py)) { Ok(node) => Some(Ok(node)), Err(e) => Some(Err(e.into())), }, Err(e) if e.is_instance_of::(py) => None, Err(e) => Some(Err(e.into())), }) } } struct TopoOrderIter(Py, std::marker::PhantomData); impl Iterator for TopoOrderIter { type Item = Result<(usize, T, usize, bool), crate::error::Error>; fn next(&mut self) -> Option { Python::attach(|py| match self.0.call_method0(py, "__next__") { Ok(item) => { let tuple = match item.bind(py).cast::() { Ok(t) => t, Err(e) => return Some(Err(PyErr::from(e).into())), }; if tuple.len() != 4 { return Some(Err(pyo3::exceptions::PyValueError::new_err( "Expected 4-tuple from iter_topo_order", ) .into())); } match ( tuple.get_item(0).and_then(|i| i.extract::()), tuple.get_item(1).and_then(|i| T::from_pyobject(&i)), tuple.get_item(2).and_then(|i| i.extract::()), tuple.get_item(3).and_then(|i| i.extract::()), ) { (Ok(seq), Ok(node), Ok(depth), Ok(eom)) => Some(Ok((seq, node, depth, eom))), _ => Some(Err(pyo3::exceptions::PyValueError::new_err( "Failed to extract values from topo_order tuple", ) .into())), } } Err(e) if e.is_instance_of::(py) => None, Err(e) => Some(Err(e.into())), }) } } impl Graph { /// Get the underlying Py. pub(crate) fn as_pyobject(&self) -> &Py { &self.0 } /// Check if one node is an ancestor of another. /// /// # Arguments /// /// * `node1` - The potential ancestor node /// * `node2` - The potential descendant node /// /// # Returns /// /// `true` if `node1` is an ancestor of `node2`, `false` otherwise pub fn is_ancestor( &self, node1: &T, node2: &T, ) -> Result { Python::attach(|py| { let result = self.0.call_method1( py, "is_ancestor", (node1.to_pyobject(py)?, node2.to_pyobject(py)?), )?; Ok(result.extract(py)?) }) } /// Iterate through the left-hand ancestry of a node. /// /// # Arguments /// /// * `node` - The node to start from /// * `stop_nodes` - Optional list of nodes where iteration should stop /// /// # Returns /// /// An iterator that yields nodes in the ancestry chain pub fn iter_lefthand_ancestry( &self, node: &T, stop_nodes: Option<&[T]>, ) -> Result>, crate::error::Error> { Python::attach(|py| { let stop_py = if let Some(nodes) = stop_nodes { let py_nodes: Result, _> = nodes.iter().map(|n| n.to_pyobject(py)).collect(); Some(py_nodes?) } else { None }; let iter = self.0.call_method1( py, "iter_lefthand_ancestry", (node.to_pyobject(py)?, stop_py), )?; Ok(NodeIter(iter, std::marker::PhantomData)) }) } /// Find the least common ancestor(s) of a set of nodes. /// /// # Arguments /// /// * `nodes` - A list of nodes to find the LCA for /// /// # Returns /// /// A vector of nodes that are the least common ancestors pub fn find_lca(&self, nodes: &[T]) -> Result, crate::error::Error> { Python::attach(|py| { let py_nodes: Result, _> = nodes.iter().map(|n| n.to_pyobject(py)).collect(); let result = self.0.call_method1(py, "find_lca", (py_nodes?,))?; let py_set = result .cast_bound::(py) .map_err(PyErr::from)?; let mut lca_nodes = Vec::new(); for item in py_set { lca_nodes.push(T::from_pyobject(&item)?) } Ok(lca_nodes) }) } /// Get the heads from a set of nodes. /// /// Heads are nodes that are not ancestors of any other node in the set. /// /// # Arguments /// /// * `nodes` - List of nodes to find heads from /// /// # Returns /// /// A vector of nodes that are heads pub fn heads(&self, nodes: &[T]) -> Result, crate::error::Error> { Python::attach(|py| { let py_nodes: Result, _> = nodes.iter().map(|n| n.to_pyobject(py)).collect(); let result = self.0.call_method1(py, "heads", (py_nodes?,))?; let py_set = result .cast_bound::(py) .map_err(PyErr::from)?; let mut head_nodes = Vec::new(); for item in py_set { head_nodes.push(T::from_pyobject(&item)?) } Ok(head_nodes) }) } /// Find the unique ancestors of one set of nodes that are not ancestors of another set. /// /// # Arguments /// /// * `nodes` - List of nodes to check /// * `common_nodes` - List of common nodes to exclude /// /// # Returns /// /// A vector of nodes that are unique ancestors pub fn find_unique_ancestors( &self, nodes: &[T], common_nodes: &[T], ) -> Result, crate::error::Error> { Python::attach(|py| { let py_nodes: Result, _> = nodes.iter().map(|n| n.to_pyobject(py)).collect(); let py_common: Result, _> = common_nodes.iter().map(|n| n.to_pyobject(py)).collect(); let result = self.0 .call_method1(py, "find_unique_ancestors", (py_nodes?, py_common?))?; let py_list = result .cast_bound::(py) .map_err(PyErr::from)?; let mut unique_ancestors = Vec::new(); for item in py_list { unique_ancestors.push(T::from_pyobject(&item)?) } Ok(unique_ancestors) }) } /// Find the difference between two sets of nodes. /// /// # Arguments /// /// * `left_nodes` - The left set of nodes /// * `right_nodes` - The right set of nodes /// /// # Returns /// /// A tuple of (nodes only in left, nodes only in right) pub fn find_difference( &self, left_nodes: &[T], right_nodes: &[T], ) -> Result<(Vec, Vec), crate::error::Error> { Python::attach(|py| { let py_left: Result, _> = left_nodes.iter().map(|n| n.to_pyobject(py)).collect(); let py_right: Result, _> = right_nodes.iter().map(|n| n.to_pyobject(py)).collect(); let result = self .0 .call_method1(py, "find_difference", (py_left?, py_right?))?; let tuple = result.cast_bound::(py).map_err(PyErr::from)?; let left_only = tuple.get_item(0)?; let right_only = tuple.get_item(1)?; let mut left_result = Vec::new(); for item in left_only .cast::() .map_err(PyErr::from)? { left_result.push(T::from_pyobject(&item)?); } let mut right_result = Vec::new(); for item in right_only .cast::() .map_err(PyErr::from)? { right_result.push(T::from_pyobject(&item)?); } Ok((left_result, right_result)) }) } /// Iterate through ancestry of given nodes. /// /// # Arguments /// /// * `nodes` - List of nodes to get ancestry for /// /// # Returns /// /// An iterator that yields nodes in the ancestry pub fn iter_ancestry( &self, nodes: &[T], ) -> Result>, crate::error::Error> { Python::attach(|py| { let py_nodes: Result, _> = nodes.iter().map(|n| n.to_pyobject(py)).collect(); let iter = self.0.call_method1(py, "iter_ancestry", (py_nodes?,))?; Ok(NodeIter(iter, std::marker::PhantomData)) }) } /// Get the parent map for a set of nodes. /// /// # Arguments /// /// * `nodes` - List of nodes to get parents for /// /// # Returns /// /// A map from node to list of parent nodes pub fn get_parent_map( &self, nodes: &[T], ) -> Result>, crate::error::Error> { Python::attach(|py| { let py_nodes: Result, _> = nodes.iter().map(|n| n.to_pyobject(py)).collect(); let result = self.0.call_method1(py, "get_parent_map", (py_nodes?,))?; let py_dict = result .cast_bound::(py) .map_err(PyErr::from)?; let mut parent_map = HashMap::new(); for (key, value) in py_dict { let key_node = T::from_pyobject(&key)?; let mut parents = Vec::new(); for parent in value.cast::().map_err(PyErr::from)? { parents.push(T::from_pyobject(&parent)?); } parent_map.insert(key_node, parents); } Ok(parent_map) }) } /// Check if a node is between two other nodes. /// /// # Arguments /// /// * `candidate` - The node to check /// * `ancestor` - The potential ancestor /// * `descendant` - The potential descendant /// /// # Returns /// /// `true` if `candidate` is between `ancestor` and `descendant` pub fn is_between( &self, candidate: &T, ancestor: &T, descendant: &T, ) -> Result { Python::attach(|py| { let result = self.0.call_method1( py, "is_between", ( candidate.to_pyobject(py)?, ancestor.to_pyobject(py)?, descendant.to_pyobject(py)?, ), )?; Ok(result.extract(py)?) }) } /// Iterate through nodes in topological order. /// /// # Arguments /// /// * `nodes` - List of nodes to order /// /// # Returns /// /// An iterator that yields (sequence_number, node, depth, end_of_merge) pub fn iter_topo_order( &self, nodes: &[T], ) -> Result< impl Iterator>, crate::error::Error, > { Python::attach(|py| { let py_nodes: Result, _> = nodes.iter().map(|n| n.to_pyobject(py)).collect(); let iter = self.0.call_method1(py, "iter_topo_order", (py_nodes?,))?; Ok(TopoOrderIter(iter, std::marker::PhantomData)) }) } /// Find all descendants of the given nodes. /// /// # Arguments /// /// * `nodes` - List of nodes to find descendants for /// /// # Returns /// /// A vector of nodes that are descendants pub fn find_descendants( &self, nodes: &[T], ) -> Result, crate::error::Error> { Python::attach(|py| { let py_nodes: Result, _> = nodes.iter().map(|n| n.to_pyobject(py)).collect(); let result = self.0.call_method1(py, "find_descendants", (py_nodes?,))?; let py_set = result .cast_bound::(py) .map_err(PyErr::from)?; let mut descendants = Vec::new(); for item in py_set { descendants.push(T::from_pyobject(&item)?); } Ok(descendants) }) } /// Find the distance from nodes to null. /// /// # Arguments /// /// * `nodes` - List of nodes to find distance for /// /// # Returns /// /// A map from node to distance pub fn find_distance_to_null( &self, nodes: &[T], ) -> Result, crate::error::Error> { Python::attach(|py| { let py_nodes: Result, _> = nodes.iter().map(|n| n.to_pyobject(py)).collect(); let result = self .0 .call_method1(py, "find_distance_to_null", (py_nodes?,))?; let py_dict = result .cast_bound::(py) .map_err(PyErr::from)?; let mut distance_map = HashMap::new(); for (key, value) in py_dict { let key_node = T::from_pyobject(&key)?; let distance: usize = value.extract()?; distance_map.insert(key_node, distance); } Ok(distance_map) }) } /// Find the unique least common ancestor. /// /// # Arguments /// /// * `nodes` - List of nodes to find unique LCA for /// * `count` - The number of heads to look for (optional) /// /// # Returns /// /// The unique LCA node or None if there isn't a unique one pub fn find_unique_lca( &self, nodes: &[T], count: Option, ) -> Result, crate::error::Error> { Python::attach(|py| { let py_nodes: Result, _> = nodes.iter().map(|n| n.to_pyobject(py)).collect(); let result = if let Some(c) = count { self.0.call_method1(py, "find_unique_lca", (py_nodes?, c))? } else { self.0.call_method1(py, "find_unique_lca", (py_nodes?,))? }; if result.is_none(py) { Ok(None) } else { Ok(Some(T::from_pyobject(result.bind(py))?)) } }) } /// Find merge order for nodes. /// /// # Arguments /// /// * `nodes` - List of nodes to find merge order for /// /// # Returns /// /// An ordered list of nodes pub fn find_merge_order( &self, nodes: &[T], ) -> Result, crate::error::Error> { Python::attach(|py| { let py_nodes: Result, _> = nodes.iter().map(|n| n.to_pyobject(py)).collect(); let result = self.0.call_method1(py, "find_merge_order", (py_nodes?,))?; let py_list = result .cast_bound::(py) .map_err(PyErr::from)?; let mut merge_order = Vec::new(); for item in py_list { merge_order.push(T::from_pyobject(&item)?); } Ok(merge_order) }) } /// Find the lefthand merger of a node. /// /// # Arguments /// /// * `node` - Node to find merger for /// * `tip` - Optional tip node /// /// # Returns /// /// The lefthand merger node pub fn find_lefthand_merger( &self, node: &T, tip: Option<&T>, ) -> Result, crate::error::Error> { Python::attach(|py| { let args = if let Some(t) = tip { (node.to_pyobject(py)?, t.to_pyobject(py)?) } else { (node.to_pyobject(py)?, py.None().into_bound(py)) }; let result = self.0.call_method1(py, "find_lefthand_merger", args)?; if result.is_none(py) { Ok(None) } else { Ok(Some(T::from_pyobject(result.bind(py))?)) } }) } /// Find lefthand distances for nodes. /// /// # Arguments /// /// * `nodes` - List of nodes to find distances for /// /// # Returns /// /// A map from node to distance pub fn find_lefthand_distances( &self, nodes: &[T], ) -> Result, crate::error::Error> { Python::attach(|py| { let py_nodes: Result, _> = nodes.iter().map(|n| n.to_pyobject(py)).collect(); let result = self .0 .call_method1(py, "find_lefthand_distances", (py_nodes?,))?; let py_dict = result .cast_bound::(py) .map_err(PyErr::from)?; let mut distance_map = HashMap::new(); for (key, value) in py_dict { let key_node = T::from_pyobject(&key)?; let distance: usize = value.extract()?; distance_map.insert(key_node, distance); } Ok(distance_map) }) } /// Get the child map for a set of nodes. /// /// # Arguments /// /// * `nodes` - List of nodes to get children for /// /// # Returns /// /// A map from node to list of child nodes pub fn get_child_map( &self, nodes: &[T], ) -> Result>, crate::error::Error> { Python::attach(|py| { let py_nodes: Result, _> = nodes.iter().map(|n| n.to_pyobject(py)).collect(); let result = self.0.call_method1(py, "get_child_map", (py_nodes?,))?; let py_dict = result .cast_bound::(py) .map_err(PyErr::from)?; let mut child_map = HashMap::new(); for (key, value) in py_dict { let key_node = T::from_pyobject(&key)?; let mut children = Vec::new(); for child in value.cast::().map_err(PyErr::from)? { children.push(T::from_pyobject(&child)?); } child_map.insert(key_node, children); } Ok(child_map) }) } } /// A key identifying a specific version of a file #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct Key(Vec); impl From> for Key { fn from(v: Vec) -> Self { Key(v) } } impl From for Vec { fn from(k: Key) -> Self { k.0 } } impl<'py> IntoPyObject<'py> for Key { type Target = PyTuple; type Output = Bound<'py, Self::Target>; type Error = PyErr; fn into_pyobject(self, py: Python<'py>) -> Result { PyTuple::new(py, self.0) } } impl<'a, 'py> FromPyObject<'a, 'py> for Key { type Error = PyErr; fn extract(ob: Borrowed<'a, 'py, PyAny>) -> PyResult { let tuple = ob.cast::()?; let mut items = Vec::new(); for item in tuple.iter() { items.push(item.extract::()?); } Ok(Key(items)) } } /// Implement GraphNode for Key impl GraphNode for Key { fn to_pyobject<'py>(&self, py: Python<'py>) -> PyResult> { Ok(PyTuple::new(py, &self.0)?.into_any()) } fn from_pyobject(obj: &Bound) -> PyResult { obj.extract::() } } /// A known graph of file versions pub struct KnownGraph(Py); impl KnownGraph { /// Create a new KnownGraph from a Python object pub fn new(py_obj: Py) -> Self { Self(py_obj) } /// Get the heads of the given nodes pub fn heads(&self, nodes: Vec) -> Result, crate::error::Error> { Python::attach(|py| { let nodes_py: Vec<_> = nodes .into_iter() .map(|n| n.to_pyobject(py)) .collect::, _>>()?; let nodes_frozenset = PyFrozenSet::new(py, &nodes_py)?; let result = self.0.call_method1(py, "heads", (nodes_frozenset,))?; let mut heads = Vec::new(); for head_py in result .cast_bound::(py) .map_err(|_| pyo3::exceptions::PyTypeError::new_err("Expected iterator"))? { let head = T::from_pyobject(&head_py?)?; heads.push(head); } Ok(heads) }) } } impl Clone for KnownGraph { fn clone(&self) -> Self { Python::attach(|py| KnownGraph(self.0.clone_ref(py))) } } #[cfg(test)] mod tests; breezyshim-0.7.5/src/groupcompress.rs000064400000000000000000000234421046102023000160700ustar 00000000000000//! Group compression versioned files implementation #![allow(missing_docs)] use crate::error::Error; use crate::graph::Key; use crate::versionedfiles::PyVersionedFiles; use pyo3::prelude::*; pub struct GroupCompressVersionedFiles(Py); impl GroupCompressVersionedFiles { pub fn new(py_obj: Py) -> Self { Self(py_obj) } pub fn from_transport( py: Python, transport: &crate::transport::Transport, index: Option>, delta: bool, _is_locked: impl Fn() -> bool + 'static, track_external_parent_refs: bool, track_anomalous_cross_references: bool, use_chk_index: bool, ) -> PyResult { let gc_mod = py.import("breezy.bzr.groupcompress")?; let gcvf_cls = gc_mod.getattr("GroupCompressVersionedFiles")?; let kwargs = pyo3::types::PyDict::new(py); kwargs.set_item("delta", delta)?; // For testing, we can pass None for is_locked and let Python handle it kwargs.set_item("is_locked", py.None())?; kwargs.set_item("track_external_parent_refs", track_external_parent_refs)?; kwargs.set_item( "track_anomalous_cross_references", track_anomalous_cross_references, )?; kwargs.set_item("use_chk_index", use_chk_index)?; let transport_obj = transport.as_pyobject().clone_ref(py); let args = if let Some(idx) = index { (transport_obj, idx) } else { (transport_obj, py.None()) }; let obj = gcvf_cls.call(args, Some(&kwargs))?; Ok(GroupCompressVersionedFiles(obj.unbind())) } pub fn without_fallbacks(&self) -> Result { Python::attach(|py| { let obj = self.0.call_method0(py, "without_fallbacks")?; Ok(GroupCompressVersionedFiles(obj)) }) } pub fn get_missing_compression_parent_keys(&self) -> Result, Error> { Python::attach(|py| { let result = self .0 .call_method0(py, "get_missing_compression_parent_keys")?; let keys_iter = result .cast_bound::(py) .map_err(|_| pyo3::exceptions::PyTypeError::new_err("Expected iterator"))?; let mut keys = Vec::new(); for key_py in keys_iter { let key = key_py?.extract::()?; keys.push(key); } Ok(keys) }) } } impl Clone for GroupCompressVersionedFiles { fn clone(&self) -> Self { Python::attach(|py| GroupCompressVersionedFiles(self.0.clone_ref(py))) } } impl PyVersionedFiles for GroupCompressVersionedFiles { fn to_object(&self, py: Python) -> Py { self.0.clone_ref(py) } } impl<'py> IntoPyObject<'py> for GroupCompressVersionedFiles { type Target = PyAny; type Output = Bound<'py, Self::Target>; type Error = PyErr; fn into_pyobject(self, py: Python<'py>) -> Result { Ok(self.0.into_bound(py)) } } impl<'a, 'py> FromPyObject<'a, 'py> for GroupCompressVersionedFiles { type Error = PyErr; fn extract(ob: Borrowed<'a, 'py, PyAny>) -> PyResult { Ok(GroupCompressVersionedFiles(ob.to_owned().unbind())) } } pub struct GroupCompressor(Py); impl GroupCompressor { pub fn new(py: Python) -> PyResult { let gc_mod = py.import("breezy.bzr.groupcompress")?; let gc_cls = gc_mod.getattr("GroupCompressor")?; let obj = gc_cls.call0()?; Ok(GroupCompressor(obj.unbind())) } /// Create a test instance that properly handles chunks #[cfg(test)] pub fn new_for_testing(py: Python) -> PyResult { Self::new(py) } pub fn compress( &self, key: &Key, lines: Vec<&str>, expected_sha: Option<&str>, soft: bool, ) -> Result<(Option, usize, Option), Error> { Python::attach(|py| { // Convert lines to bytes as GroupCompressor expects bytes let lines_bytes: Vec<_> = lines .iter() .map(|line| pyo3::types::PyBytes::new(py, line.as_bytes())) .collect(); let lines_list = pyo3::types::PyList::new(py, &lines_bytes)?; // Calculate total length let length: usize = lines.iter().map(|l| l.len()).sum(); let expected_sha_arg: Py = if let Some(sha) = expected_sha { pyo3::types::PyBytes::new(py, sha.as_bytes()) .unbind() .into() } else { py.None() }; let result = self.0.call_method1( py, "compress", ( key.clone().into_pyobject(py)?, lines_list, length, expected_sha_arg, soft, ), )?; let tuple = result .cast_bound::(py) .map_err(|_| pyo3::exceptions::PyTypeError::new_err("Expected tuple"))?; // compress returns (sha1, start_offset, end_offset, type) let sha1 = if tuple.get_item(0)?.is_none() { None } else { let item0 = tuple.get_item(0)?; let sha_bytes = item0 .cast::() .map_err(|_| pyo3::exceptions::PyTypeError::new_err("Expected bytes"))?; Some( std::str::from_utf8(sha_bytes.as_bytes()) .map_err(|_| { pyo3::exceptions::PyValueError::new_err("Invalid UTF-8 in SHA1") })? .to_string(), ) }; let _start_offset = tuple.get_item(1)?.extract::()?; let _end_offset = tuple.get_item(2)?.extract::()?; let _type = tuple.get_item(3)?.extract::()?; // GroupCompressor doesn't return a record from compress, only from flush // Return the input length since that's what was compressed Ok((sha1, length, None)) }) } pub fn flush(&self) -> Result { Python::attach(|py| { let record = self.0.call_method0(py, "flush")?; Ok(CompressorRecord(record)) }) } } impl Clone for GroupCompressor { fn clone(&self) -> Self { Python::attach(|py| GroupCompressor(self.0.clone_ref(py))) } } pub struct CompressorRecord(Py); impl CompressorRecord { pub fn to_chunks(&self) -> Result<(usize, Vec>), Error> { Python::attach(|py| { let result = self.0.call_method0(py, "to_chunks")?; let tuple = result .cast_bound::(py) .map_err(|_| pyo3::exceptions::PyTypeError::new_err("Expected tuple"))?; let total_bytes = tuple.get_item(0)?.extract::()?; let chunks_item = tuple.get_item(1)?; let chunks_list = chunks_item .cast::() .map_err(|_| pyo3::exceptions::PyTypeError::new_err("Expected list"))?; let mut chunks = Vec::new(); for chunk_py in chunks_list { let chunk_bytes = chunk_py .cast::() .map_err(|_| pyo3::exceptions::PyTypeError::new_err("Expected bytes"))?; chunks.push(chunk_bytes.as_bytes().to_vec()); } Ok((total_bytes, chunks)) }) } pub fn to_bytes(&self) -> Result, Error> { Python::attach(|py| { let result = self.0.call_method0(py, "to_bytes")?; let bytes = result .cast_bound::(py) .map_err(|_| pyo3::exceptions::PyTypeError::new_err("Expected bytes"))?; Ok(bytes.as_bytes().to_vec()) }) } } impl Clone for CompressorRecord { fn clone(&self) -> Self { Python::attach(|py| CompressorRecord(self.0.clone_ref(py))) } } #[cfg(test)] mod tests { use super::*; #[test] fn test_group_compressor_basic() { crate::init(); crate::init_bzr(); pyo3::Python::attach(|py| { let compressor = GroupCompressor::new_for_testing(py).unwrap(); let key = Key::from(vec!["file1".to_string()]); let lines = vec!["line1\n", "line2\n", "line3\n"]; let expected_length: usize = lines.iter().map(|l| l.len()).sum(); let (sha1, length, record) = compressor.compress(&key, lines, None, false).unwrap(); assert!(sha1.is_some()); assert_eq!(length, expected_length); assert!(record.is_none()); // compress doesn't return a record }); } #[test] fn test_group_compressor_simple() { crate::init(); crate::init_bzr(); pyo3::Python::attach(|py| { let compressor = GroupCompressor::new_for_testing(py).unwrap(); let key = Key::from(vec!["file2".to_string()]); let content = "test content\n"; let lines = vec![content]; let (sha1, length, record) = compressor.compress(&key, lines, None, false).unwrap(); assert!(sha1.is_some()); assert_eq!(length, content.len()); assert!(record.is_none()); // compress doesn't return a record // Test flush to get the record let flush_record = compressor.flush().unwrap(); let (total_bytes, chunks) = flush_record.to_chunks().unwrap(); assert!(total_bytes > 0); assert!(!chunks.is_empty()); }); } } breezyshim-0.7.5/src/hooks.rs000064400000000000000000000045301046102023000143000ustar 00000000000000//! Hooks use pyo3::prelude::*; /// Dictionary-like container for Breezy hooks. pub struct HookDict(Py); /// Represents an individual hook function. pub struct Hook(Py); impl HookDict { /// Create a new hook dictionary. /// /// # Arguments /// /// * `module` - The Python module containing the hook point /// * `cls` - The class name within the module /// * `name` - The name of the hook point pub fn new(module: &str, cls: &str, name: &str) -> Self { Python::attach(|py| -> PyResult { let module = PyModule::import(py, module)?; let cls = module.getattr(cls)?; let entrypoint = cls.getattr(name)?; Ok(Self(entrypoint.unbind())) }) .unwrap() } /// Clear all hooks registered for a given name. /// /// # Arguments /// /// * `name` - The name of the hook point /// /// # Returns /// /// `Ok(())` on success, or an error if the operation fails pub fn clear(&self, name: &str) -> Result<(), crate::error::Error> { Python::attach(|py| { let entrypoint = self.0.bind(py).get_item(name)?; entrypoint.call_method0("clear")?; Ok(()) }) } /// Add a hook function for a given name. /// /// # Arguments /// /// * `name` - The name of the hook point /// * `func` - The hook function to add /// /// # Returns /// /// `Ok(())` on success, or an error if the operation fails pub fn add(&self, name: &str, func: Hook) -> Result<(), crate::error::Error> { Python::attach(|py| { let entrypoint = self.0.bind(py).get_item(name)?; entrypoint.call_method1("add", (func.0,))?; Ok(()) }) } /// Get all hook functions registered for a given name. /// /// # Arguments /// /// * `name` - The name of the hook point /// /// # Returns /// /// A vector of hook functions, or an error if the operation fails pub fn get(&self, name: &str) -> Result, crate::error::Error> { Python::attach(|py| { let entrypoint = self.0.bind(py).get_item(name)?; Ok(entrypoint .extract::>>()? .into_iter() .map(Hook) .collect()) }) } } breezyshim-0.7.5/src/interrepository.rs000064400000000000000000000156751046102023000164520ustar 00000000000000//! Operations between repositories. use crate::error::Error; use crate::repository::{GenericRepository, PyRepository}; use crate::RevisionId; use pyo3::prelude::*; use pyo3::types::{PyBytes, PyDict}; use std::collections::HashMap; /// Trait for types that can be converted to Python InterRepository objects. /// /// This trait is implemented by types that represent a Breezy InterRepository, /// which handles operations between repositories. pub trait PyInterRepository: for<'py> IntoPyObject<'py> + std::any::Any + std::fmt::Debug { /// Get the underlying Python object for this inter-repository. fn to_object(&self, py: Python<'_>) -> Py; } /// Generic wrapper for a Python InterRepository object. /// /// This struct provides a Rust interface to a Breezy InterRepository object. pub struct GenericInterRepository(Py); impl<'py> IntoPyObject<'py> for GenericInterRepository { type Target = PyAny; type Output = Bound<'py, Self::Target>; type Error = std::convert::Infallible; fn into_pyobject(self, py: Python<'py>) -> Result { Ok(self.0.into_bound(py)) } } impl<'a, 'py> FromPyObject<'a, 'py> for GenericInterRepository { type Error = PyErr; fn extract(obj: Borrowed<'a, 'py, PyAny>) -> PyResult { Ok(GenericInterRepository(obj.to_owned().unbind())) } } impl PyInterRepository for GenericInterRepository { fn to_object(&self, py: Python<'_>) -> Py { self.0.clone_ref(py) } } impl GenericInterRepository { /// Create a new GenericInterRepository from a Python object. /// /// # Arguments /// /// * `obj` - The Python object representing a Breezy InterRepository /// /// # Returns /// /// A new GenericInterRepository wrapping the provided Python object pub fn new(obj: Py) -> Self { Self(obj) } } impl std::fmt::Debug for GenericInterRepository { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { f.write_fmt(format_args!("GenericInterRepository({:?})", self.0)) } } /// Get an InterRepository for operations between two repositories. /// /// # Arguments /// /// * `source` - The source repository /// * `target` - The target repository /// /// # Returns /// /// A boxed InterRepository trait object that can perform operations between the repositories /// /// # Errors /// /// Returns an error if the operation fails, such as if the repositories are incompatible pub fn get( source: &S, target: &T, ) -> Result, Error> { Python::attach(|py| { let m = py.import("breezy.repository")?; let interrepo = m.getattr("InterRepository")?; let inter_repository = interrepo.call_method1("get", (source.to_object(py), target.to_object(py)))?; Ok( Box::new(GenericInterRepository::new(inter_repository.unbind())) as Box, ) }) } /// Trait for operations between repositories. /// /// This trait defines the operations that can be performed between two repositories, /// such as fetching revisions from one repository to another. pub trait InterRepository: std::fmt::Debug { /// Get the source repository. /// /// # Returns /// /// The source repository fn get_source(&self) -> GenericRepository; /// Get the target repository. /// /// # Returns /// /// The target repository fn get_target(&self) -> GenericRepository; /// Fetch references from the source repository to the target repository. /// /// # Arguments /// /// * `get_changed_refs` - A mutex-protected function to get the references to fetch /// * `lossy` - If true, lossy conversion is allowed /// * `overwrite` - If true, existing references can be overwritten /// /// # Returns /// /// Ok(()) on success, or an error if the operation fails // TODO: This should really be on InterGitRepository fn fetch_refs( &self, get_changed_refs: std::sync::Mutex< Box< dyn FnMut( &HashMap, (Vec, Option)>, ) -> HashMap, (Vec, Option)> + Send, >, >, lossy: bool, overwrite: bool, ) -> Result<(), Error>; } impl InterRepository for T { fn get_source(&self) -> GenericRepository { Python::attach(|py| -> PyResult { let source = self.to_object(py).getattr(py, "source")?; Ok(GenericRepository::new(source)) }) .unwrap() } fn get_target(&self) -> GenericRepository { Python::attach(|py| -> PyResult { let target = self.to_object(py).getattr(py, "target")?; Ok(GenericRepository::new(target)) }) .unwrap() } // TODO: This should really be on InterGitRepository fn fetch_refs( &self, get_changed_refs: std::sync::Mutex< Box< dyn FnMut( &HashMap, (Vec, Option)>, ) -> HashMap, (Vec, Option)> + Send, >, >, lossy: bool, overwrite: bool, ) -> Result<(), Error> { Python::attach(|py| { let get_changed_refs = pyo3::types::PyCFunction::new_closure(py, None, None, move |args, _kwargs| { let refs = args .extract::<(HashMap, (Vec, Option)>,)>() .unwrap() .0; // Call get_changed_refs let result = if let Ok(mut get_changed_refs) = get_changed_refs.lock() { get_changed_refs(&refs) } else { refs }; Python::attach(|py| -> PyResult> { let ret = PyDict::new(py); for (k, (v, r)) in result { ret.set_item( PyBytes::new(py, k.as_slice()), ( PyBytes::new(py, v.as_slice()), r.map(|r| r.into_pyobject(py).unwrap().unbind()), ), )?; } // We need to change the return type since pyo3::Python can't be sent between // threads Ok(ret.unbind().into()) }) }) .unwrap(); self.to_object(py).call_method1( py, "fetch_refs", (get_changed_refs, lossy, overwrite), )?; Ok(()) }) } } breezyshim-0.7.5/src/intertree.rs000064400000000000000000000027551046102023000151650ustar 00000000000000//! Operations between two trees. use crate::delta::TreeDelta; use pyo3::prelude::*; /// Represents operations between two trees. /// /// InterTree allows comparing and performing operations between two trees, /// such as finding differences or applying changes from one tree to another. pub struct InterTree(Py); /// Get an InterTree for operations between two trees. /// /// # Arguments /// /// * `source` - The source tree /// * `target` - The target tree /// /// # Returns /// /// An InterTree object that can be used to perform operations between the trees pub fn get(source: &S, target: &T) -> InterTree { Python::attach(|py| { let source = source.to_object(py); let target = target.to_object(py); let intertree_cls = py .import("breezy.tree") .unwrap() .getattr("InterTree") .unwrap(); InterTree( intertree_cls .call_method1("get", (source, target)) .unwrap() .unbind(), ) }) } impl InterTree { /// Compare the source and target trees. /// /// # Returns /// /// A TreeDelta representing the differences between the source and target trees pub fn compare(&self) -> TreeDelta { Python::attach(|py| { self.0 .call_method0(py, "compare") .unwrap() .extract(py) .unwrap() }) } } breezyshim-0.7.5/src/knit.rs000064400000000000000000000147621046102023000141320ustar 00000000000000//! Knit versioned files implementation #![allow(missing_docs)] use crate::graph::Key; use crate::versionedfiles::PyVersionedFiles; use pyo3::prelude::*; pub struct KnitVersionedFiles(Py); impl KnitVersionedFiles { pub fn new(py_obj: Py) -> Self { Self(py_obj) } pub fn from_transport( py: Python, transport: &crate::transport::Transport, file_mode: Option, dir_mode: Option, access_mode: Option<&str>, ) -> PyResult { let knit_mod = py.import("breezy.bzr.knit")?; let kvf_cls = knit_mod.getattr("KnitVersionedFiles")?; let kwargs = pyo3::types::PyDict::new(py); if let Some(mode) = file_mode { kwargs.set_item("file_mode", mode)?; } if let Some(mode) = dir_mode { kwargs.set_item("dir_mode", mode)?; } if let Some(mode) = access_mode { kwargs.set_item("access_mode", mode)?; } let obj = kvf_cls.call((transport.as_pyobject().clone_ref(py),), Some(&kwargs))?; Ok(KnitVersionedFiles(obj.unbind())) } } impl Clone for KnitVersionedFiles { fn clone(&self) -> Self { Python::attach(|py| KnitVersionedFiles(self.0.clone_ref(py))) } } impl PyVersionedFiles for KnitVersionedFiles { fn to_object(&self, py: Python) -> Py { self.0.clone_ref(py) } } impl<'py> IntoPyObject<'py> for KnitVersionedFiles { type Target = PyAny; type Output = Bound<'py, Self::Target>; type Error = PyErr; fn into_pyobject(self, py: Python<'py>) -> Result { Ok(self.0.into_bound(py)) } } impl<'a, 'py> FromPyObject<'a, 'py> for KnitVersionedFiles { type Error = PyErr; fn extract(ob: Borrowed<'a, 'py, PyAny>) -> PyResult { Ok(KnitVersionedFiles(ob.to_owned().unbind())) } } pub struct KnitPlainFactory { pub key: Key, pub parents: Vec, pub sha1: Option, pub delta: Option>, } impl KnitPlainFactory { pub fn new(key: Key, parents: Vec, sha1: Option, delta: Option>) -> Self { Self { key, parents, sha1, delta, } } } impl<'py> IntoPyObject<'py> for KnitPlainFactory { type Target = PyAny; type Output = Bound<'py, Self::Target>; type Error = PyErr; fn into_pyobject(self, py: Python<'py>) -> Result { let knit_mod = py.import("breezy.bzr.knit")?; let factory_cls = knit_mod.getattr("KnitPlainFactory")?; // Create empty factory let factory = factory_cls.call0()?; // Set attributes factory.setattr("key", self.key.into_pyobject(py)?)?; let parent_tuples: Vec<_> = self .parents .into_iter() .map(|p| p.into_pyobject(py)) .collect::, _>>()?; let parents_py = pyo3::types::PyTuple::new(py, parent_tuples)?; factory.setattr("parents", parents_py)?; if let Some(sha1) = self.sha1 { factory.setattr("sha1", pyo3::types::PyBytes::new(py, sha1.as_bytes()))?; } if let Some(delta) = self.delta { factory.setattr("delta", pyo3::types::PyBytes::new(py, &delta))?; } Ok(factory) } } pub struct KnitAnnotateFactory { pub key: Key, pub parents: Vec, pub annotated_lines: Vec<(Key, String)>, } impl KnitAnnotateFactory { pub fn new(key: Key, parents: Vec, annotated_lines: Vec<(Key, String)>) -> Self { Self { key, parents, annotated_lines, } } } impl<'py> IntoPyObject<'py> for KnitAnnotateFactory { type Target = PyAny; type Output = Bound<'py, Self::Target>; type Error = PyErr; fn into_pyobject(self, py: Python<'py>) -> Result { let knit_mod = py.import("breezy.bzr.knit")?; let factory_cls = knit_mod.getattr("KnitAnnotateFactory")?; // Create empty factory let factory = factory_cls.call0()?; // Set attributes factory.setattr("key", self.key.into_pyobject(py)?)?; let parent_tuples: Vec<_> = self .parents .into_iter() .map(|p| p.into_pyobject(py)) .collect::, _>>()?; let parents_py = pyo3::types::PyTuple::new(py, parent_tuples)?; factory.setattr("parents", parents_py)?; let lines_list = pyo3::types::PyList::empty(py); for (origin_key, line) in self.annotated_lines { let tuple = pyo3::types::PyTuple::new( py, &[ origin_key.into_pyobject(py)?.into_any(), line.into_pyobject(py)?.into_any(), ], )?; lines_list.append(tuple)?; } factory.setattr("annotated", lines_list)?; Ok(factory) } } #[cfg(test)] mod tests { use super::*; #[test] fn test_knit_plain_factory() { crate::init(); crate::init_bzr(); pyo3::Python::attach(|py| { let key = Key::from(vec!["file1".to_string()]); let parents = vec![Key::from(vec!["parent1".to_string()])]; let factory = KnitPlainFactory::new( key.clone(), parents, Some("abc123".to_string()), Some(b"delta content".to_vec()), ); // Test conversion to Py let _py_obj = factory.into_pyobject(py).unwrap(); }); } #[test] fn test_knit_annotate_factory() { crate::init(); crate::init_bzr(); pyo3::Python::attach(|py| { let key = Key::from(vec!["file1".to_string()]); let parents = vec![]; let annotated_lines = vec![ ( Key::from(vec!["origin1".to_string()]), "line1\n".to_string(), ), ( Key::from(vec!["origin1".to_string()]), "line2\n".to_string(), ), ]; let factory = KnitAnnotateFactory::new(key, parents, annotated_lines); // Test conversion to Py let _py_obj = factory.into_pyobject(py).unwrap(); }); } // Note: Full KnitVersionedFiles tests require complex setup with indices and data access // which are difficult to mock in unit tests. Integration tests would be more appropriate. } breezyshim-0.7.5/src/launchpad.rs000064400000000000000000000041411046102023000151120ustar 00000000000000//! Launchpad login and related functions use pyo3::prelude::*; #[cfg(test)] use launchpadlib::uris; /// Log in to Launchpad using the provided URL. /// /// This function authenticates the user with Launchpad via OAuth, allowing /// subsequent API calls to be made with the authenticated user's credentials. pub fn login(url: &url::Url) { Python::attach(|py| -> PyResult<()> { let m = py.import("breezy.plugins.launchpad.cmds")?; let cmd = m.getattr("cmd_launchpad_login")?; let cmd_lp = cmd.call0()?; cmd_lp.call_method0("_setup_outf")?; cmd_lp.call_method1("run", (url.as_str(),))?; let lp_api = py.import("breezy.plugins.launchpad.lp_api")?; // The original code extracted a service root like this: let lp_uris = lp_api .getattr("uris")? .call0()? .extract::>()?; let lp_service_root = lp_uris .iter() .find(|(_key, root)| { url.host_str() == Some(root) || url.host_str() == Some(root.trim_end_matches('/')) }) .unwrap() .1 .clone(); let kwargs = pyo3::types::PyDict::new(py); kwargs.set_item("version", "devel")?; lp_api.call_method("connect_launchpad", (lp_service_root,), Some(&kwargs))?; Ok(()) }) .unwrap() } // Test function to identify uri function #[test] fn test_uri_functions() { // Sample URL let url_str = "https://launchpad.net/breezy"; let url = url::Url::parse(url_str).unwrap(); // Try available functions println!("Host: {}", url.host_str().unwrap_or_default()); // Try lookup_service_root let result1 = uris::lookup_service_root("production"); println!("lookup_service_root('production'): {:?}", result1); // Try lookup_web_root let result2 = uris::lookup_web_root("production"); println!("lookup_web_root('production'): {:?}", result2); // Try web_root_for_service_root let result3 = uris::web_root_for_service_root(&result1.unwrap()); println!("web_root_for_service_root: {:?}", result3); } breezyshim-0.7.5/src/lib.rs000064400000000000000000000142111046102023000137200ustar 00000000000000//! This crate contains a rust wrapper for the Breezy API, which is written in Python. //! //! Breezy itself is being ported to Rust, but until that port has completed, this crate allows //! access to the most important Breezy APIs via Rust. //! //! The Rust API here will follow the Breezy 4.0 Rust API as much as possible, to make porting //! easier. //! //! # Example //! //! ```no_run //! use breezyshim::prelude::*; //! use breezyshim::branch::open as open_branch; //! breezyshim::plugin::load_plugins(); //! let b = open_branch(&"https://code.launchpad.net/brz".parse().unwrap()).unwrap(); //! println!("Last revision: {:?}", b.last_revision()); //! ``` #![deny(missing_docs)] // Necessary for pyo3, which uses the gil-refs feature in macros // which is not defined in breezyshim #![allow(unexpected_cfgs)] // TODO: Fix large error enum variants by boxing large fields #![allow(clippy::result_large_err)] pub mod bazaar; pub mod branch; pub mod clean_tree; pub mod commit; pub mod config; pub mod controldir; pub mod cvs; pub mod darcs; pub mod delta; pub mod diff; #[cfg(feature = "dirty-tracker")] pub mod dirty_tracker; pub mod error; pub mod export; pub mod foreign; pub mod forge; pub mod fossil; pub mod git; pub mod github; pub mod gitlab; pub mod gpg; pub mod graph; /// Group compression versioned files implementation pub mod groupcompress; pub mod hooks; pub mod interrepository; pub mod intertree; /// Knit versioned files implementation pub mod knit; #[cfg(feature = "launchpad")] pub mod launchpad; pub mod location; pub mod lock; pub mod mercurial; pub mod merge; pub mod osutils; pub mod patches; pub mod plugin; pub mod prelude; pub mod rename_map; pub mod repository; pub mod revisionid; pub mod status; pub mod subversion; pub mod tags; pub mod testing; pub mod transform; pub mod transport; pub mod tree; pub mod ui; pub mod urlutils; pub mod version; /// Versioned files API for storing file content history pub mod versionedfiles; /// Weave versioned files implementation pub mod weave; pub mod workingtree; pub mod workspace; #[cfg(feature = "debian")] pub mod debian; // Re-export core types and functions /// Branch trait representing a branch in a version control system pub use branch::Branch; /// Control directory traits and types pub use controldir::{ControlDir, Prober}; /// Forge related types and functions for interacting with source code hosting services pub use forge::{get_forge, Forge, MergeProposal, MergeProposalStatus}; /// Lock type for managing access to resources pub use lock::Lock; use pyo3::exceptions::PyImportError; use pyo3::prelude::*; /// Revision identifier type pub use revisionid::RevisionId; use std::sync::Once; /// Transport functions and types for accessing remote repositories pub use transport::{get_transport, Transport}; /// Tree-related traits and types pub use tree::{RevisionTree, Tree, WorkingTree}; /// URL utility functions pub use urlutils::{join_segment_parameters, split_segment_parameters}; /// Workspace functions pub use workspace::reset_tree; /// Initialize Git support in Breezy. /// /// This function imports the breezy.git module to ensure Git functionality is available. pub fn init_git() { pyo3::Python::attach(|py| { py.import("breezy.git").unwrap(); }) } /// Initialize Bazaar support in Breezy. /// /// This function imports the breezy.bzr module to ensure Bazaar functionality is available. pub fn init_bzr() { pyo3::Python::attach(|py| { py.import("breezy.bzr").unwrap(); }) } #[cfg(feature = "auto-initialize")] /// Initialize #[ctor::ctor] fn ensure_initialized() { init(); } /// The minimum supported Breezy version. const MINIMUM_VERSION: (usize, usize, usize) = (3, 3, 6); /// Initialization lock to ensure Breezy is only initialized once. static INIT_BREEZY: Once = Once::new(); /// Initialize the Breezy library and Python interpreter. /// /// This function ensures Python is initialized and Breezy is loaded. /// It is safe to call multiple times. /// /// This function ensures that Breezy is properly initialized, checking version /// compatibility and loading required modules. It should be called before /// using any other functionality in this crate unless the "auto-initialize" /// feature is enabled. /// /// # Panics /// /// - If Breezy is not installed /// - If the installed Breezy version is too old pub fn init() { INIT_BREEZY.call_once(|| { pyo3::Python::initialize(); let (major, minor, micro) = pyo3::Python::attach(|py| match py.import("breezy") { Ok(breezy) => { let (major, minor, micro, _releaselevel, _serial): ( usize, usize, usize, String, usize, ) = breezy.getattr("version_info").unwrap().extract().unwrap(); (major, minor, micro) } Err(e) => { if e.is_instance_of::(py) { panic!("Breezy is not installed. Please install Breezy first."); } else { panic!("{}", e); } } }); if (major, minor, micro) < MINIMUM_VERSION { panic!( "Breezy version {}.{}.{} is too old, please upgrade to at least {}.{}.{}.", major, minor, micro, MINIMUM_VERSION.0, MINIMUM_VERSION.1, MINIMUM_VERSION.2 ); } if major >= 4 { log::warn!("Support for Breezy 4.0 is experimental and incomplete."); } init_git(); init_bzr(); // Work around a breezy bug pyo3::Python::attach(|py| { let m = py.import("breezy.controldir").unwrap(); let f = m.getattr("ControlDirFormat").unwrap(); f.call_method0("known_formats").unwrap(); }); // Prevent race conditions pyo3::Python::attach(|py| { let m = py.import("breezy.config").unwrap(); m.call_method0("GlobalStack").unwrap(); m.call_method1("LocationStack", ("file:///",)).unwrap(); }); }); } /// Shorthand for the standard result type used throughout this crate. pub type Result = std::result::Result; breezyshim-0.7.5/src/location.rs000064400000000000000000000072711046102023000147720ustar 00000000000000//! UI-layer location handling use pyo3::prelude::*; use url::Url; /// Convert a CVS root string to a URL. /// /// # Arguments /// /// * `cvsroot` - The CVS root string to convert /// /// # Returns /// /// A URL representing the CVS repository location pub fn cvs_to_url(cvsroot: &str) -> Url { Python::attach(|py| { let breezy_location = py.import("breezy.location").unwrap(); breezy_location .call_method1("cvs_to_url", (cvsroot,)) .unwrap() .extract::() .unwrap() .parse() .unwrap() }) } #[test] fn test_cvs_to_url() { assert_eq!( cvs_to_url(":pserver:anonymous@localhost:/var/lib/cvs"), Url::parse("cvs+pserver://anonymous@localhost/var/lib/cvs").unwrap() ); } /// Convert an RCP (remote copy) location string to a URL. /// /// RCP locations are in the format "user@host:/path/to/repo" and are converted /// to URLs like "ssh://user@host/path/to/repo". /// /// # Arguments /// /// * `rcp_location` - The RCP location string to convert /// /// # Returns /// /// A Result containing the converted URL or an error string pub fn rcp_location_to_url(rcp_location: &str) -> Result { Python::attach(|py| { let breezy_location = py.import("breezy.location").unwrap(); Ok(breezy_location .call_method1("rcp_location_to_url", (rcp_location,)) .map_err(|e| e.to_string())? .extract::() .unwrap() .parse() .unwrap()) }) } #[test] fn test_rcp_location_to_url() { assert_eq!( rcp_location_to_url("user@host:/path/to/repo").unwrap(), Url::parse("ssh://user@host/path/to/repo").unwrap() ); } /// Trait for types that can be converted to a location that Breezy understands. /// /// This trait is implemented by types that can be converted to a string /// representation that can be used as a location in Breezy API calls. pub trait AsLocation { /// Convert the object to a Python object representing a location. /// /// # Returns /// /// A Python object (string) representing the location fn as_location(&self) -> Py; } impl AsLocation for &url::Url { fn as_location(&self) -> Py { Python::attach(|py| { pyo3::types::PyString::new(py, self.as_ref()) .unbind() .into() }) } } #[test] fn test_as_location_url() { Python::attach(|py| { assert_eq!( Url::parse("ssh://user@host/path/to/repo") .unwrap() .as_ref() .as_location() .extract::(py) .unwrap(), "ssh://user@host/path/to/repo" ); }); } impl AsLocation for &str { fn as_location(&self) -> Py { Python::attach(|py| pyo3::types::PyString::new(py, self).unbind().into()) } } #[test] fn test_as_location_str() { Python::attach(|py| { assert_eq!( "ssh://user@host/path/to/repo" .as_location() .extract::(py) .unwrap(), "ssh://user@host/path/to/repo" ); }); } impl AsLocation for &std::path::Path { fn as_location(&self) -> Py { Python::attach(|py| { pyo3::types::PyString::new(py, self.to_str().unwrap()) .unbind() .into() }) } } #[test] fn test_as_location_path() { Python::attach(|py| { assert_eq!( std::path::Path::new("/path/to/repo") .as_location() .extract::(py) .unwrap(), "/path/to/repo" ); }); } breezyshim-0.7.5/src/lock.rs000064400000000000000000000016131046102023000141040ustar 00000000000000//! Locking of Breezy objects. use pyo3::prelude::*; /// Represents a lock on a Breezy object. /// /// The lock is automatically released when the Lock object is dropped, /// providing RAII (Resource Acquisition Is Initialization) style locking. /// /// This ensures that locked resources are properly released even if an error occurs. pub struct Lock(Py); impl From> for Lock { fn from(obj: Py) -> Self { Lock(obj) } } impl<'py> IntoPyObject<'py> for Lock { type Target = PyAny; type Output = Bound<'py, Self::Target>; type Error = std::convert::Infallible; fn into_pyobject(self, py: Python<'py>) -> Result { Ok(self.0.clone_ref(py).into_bound(py)) } } impl Drop for Lock { fn drop(&mut self) { Python::attach(|py| { self.0.call_method0(py, "unlock").unwrap(); }); } } breezyshim-0.7.5/src/mercurial.rs000064400000000000000000000043661046102023000151470ustar 00000000000000//! Mercurial prober. //! //! This allows detecting Mercurial repositories, but does not provide any //! functionality to interact with them. use pyo3::exceptions::PyModuleNotFoundError; use pyo3::prelude::*; /// Prober for Mercurial repositories. /// /// This struct can detect Mercurial repositories but does not provide /// functionality to interact with them directly. It requires the Breezy /// Mercurial plugin to be installed. pub struct SmartHgProber(Py); impl SmartHgProber { /// Create a new SmartHgProber instance. /// /// # Returns /// /// Some(SmartHgProber) if the Mercurial plugin is installed, /// None otherwise. pub fn new() -> Option { Python::attach(|py| { let m = match py.import("breezy.plugins.hg") { Ok(m) => m, Err(e) => { if e.is_instance_of::(py) { return None; } else { e.print_and_set_sys_last_vars(py); panic!("Failed to import breezy.plugins.hg"); } } }; let prober = m .getattr("SmartHgProber") .expect("Failed to get SmartHgProber"); Some(Self(prober.unbind())) }) } } impl<'a, 'py> FromPyObject<'a, 'py> for SmartHgProber { type Error = PyErr; fn extract(obj: Borrowed<'a, 'py, PyAny>) -> PyResult { Ok(Self(obj.to_owned().unbind())) } } impl<'py> IntoPyObject<'py> for SmartHgProber { type Target = PyAny; type Output = Bound<'py, Self::Target>; type Error = std::convert::Infallible; fn into_pyobject(self, py: Python<'py>) -> Result { Ok(self.0.into_bound(py)) } } impl std::fmt::Debug for SmartHgProber { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { f.write_fmt(format_args!("SmartHgProber({:?})", self.0)) } } impl crate::controldir::PyProber for SmartHgProber { fn to_object(&self, py: Python) -> Py { self.0.clone_ref(py) } } #[cfg(test)] mod tests { use super::*; #[test] fn test_smart_hg_prober() { let _ = SmartHgProber::new(); } } breezyshim-0.7.5/src/merge.rs000064400000000000000000000171111046102023000142530ustar 00000000000000//! Tree merging. use crate::branch::PyBranch; use crate::graph::Graph; use crate::hooks::HookDict; use crate::transform::TreeTransform; use crate::tree::PyTree; use crate::RevisionId; use pyo3::import_exception; use pyo3::prelude::*; use pyo3::types::PyDict; import_exception!(breezy.errors, UnrelatedBranches); /// Errors that can occur during merge operations. pub enum Error { /// Error indicating that the branches being merged are unrelated. /// /// This occurs when the branches have no common ancestor. UnrelatedBranches, } impl From for Error { fn from(e: PyErr) -> Self { Python::attach(|py| { if e.is_instance_of::(py) { Error::UnrelatedBranches } else { panic!("unexpected error: {:?}", e) } }) } } /// Represents a merge operation between two branches. /// /// This struct provides methods to configure and perform merges between branches, /// including finding the base revision, setting merge parameters, and executing the merge. pub struct Merger(Py); /// Types of merge algorithms that can be used. pub enum MergeType { /// Three-way merge algorithm. /// /// This is the standard merge algorithm that uses a common base revision /// and the two branches to be merged. Merge3, } impl From> for Merger { fn from(obj: Py) -> Self { Merger(obj) } } impl Merger { /// Create a new merger for merging into a tree. /// /// # Arguments /// /// * `branch` - The branch to merge from /// * `this_tree` - The tree to merge into /// * `revision_graph` - The graph of revisions to use for finding common ancestors /// /// # Returns /// /// A new Merger object pub fn new(branch: &dyn PyBranch, this_tree: &T, revision_graph: &Graph) -> Self { Python::attach(|py| { let m = py.import("breezy.merge").unwrap(); let cls = m.getattr("Merger").unwrap(); let kwargs = PyDict::new(py); kwargs .set_item("this_tree", this_tree.to_object(py)) .unwrap(); kwargs .set_item("revision_graph", revision_graph.as_pyobject()) .unwrap(); let merger = cls.call((branch.to_object(py),), Some(&kwargs)).unwrap(); Merger(merger.into()) }) } /// Find the base revision for the merge. /// /// # Returns /// /// The base revision ID if found, or None if the branches are unrelated pub fn find_base(&self) -> Result, crate::error::Error> { Python::attach(|py| match self.0.call_method0(py, "find_base") { Ok(_py_obj) => Ok(self .0 .getattr(py, "base_rev_id") .unwrap() .extract(py) .unwrap()), Err(err) => { if err.is_instance_of::(py) { Ok(None) } else { Err(err) } } }) .map_err(Into::into) } /// Set the other revision to merge. /// /// # Arguments /// /// * `other_revision` - The revision ID to merge /// * `other_branch` - The branch containing the revision /// /// # Returns /// /// Ok(()) on success, or an error if the operation fails pub fn set_other_revision( &mut self, other_revision: &RevisionId, other_branch: &dyn PyBranch, ) -> Result<(), crate::error::Error> { Python::attach(|py| { self.0.call_method1( py, "set_other_revision", (other_revision.clone(), other_branch.to_object(py)), )?; Ok(()) }) } /// Set the base revision for the merge. /// /// # Arguments /// /// * `base_revision` - The base revision ID to use /// * `base_branch` - The branch containing the base revision /// /// # Returns /// /// Ok(()) on success, or an error if the operation fails pub fn set_base_revision( &mut self, base_revision: &RevisionId, base_branch: &dyn PyBranch, ) -> Result<(), crate::error::Error> { Python::attach(|py| { self.0.call_method1( py, "set_base_revision", (base_revision.clone(), base_branch.to_object(py)), )?; Ok(()) }) } /// Set the merge algorithm to use. /// /// # Arguments /// /// * `merge_type` - The merge algorithm to use pub fn set_merge_type(&mut self, merge_type: MergeType) { Python::attach(|py| { let m = py.import("breezy.merge").unwrap(); let merge_type = match merge_type { MergeType::Merge3 => m.getattr("Merge3Merger").unwrap(), }; self.0.setattr(py, "merge_type", merge_type).unwrap(); }) } /// Create a submerger to execute the merge. /// /// # Returns /// /// A Submerger object that can perform the actual merge pub fn make_merger(&self) -> Result { Python::attach(|py| { let merger = self.0.call_method0(py, "make_merger")?; Ok(Submerger(merger)) }) } /// Create a merger from specific revision IDs. /// /// # Arguments /// /// * `other_tree` - The tree to merge from /// * `other_branch` - The branch containing the revision to merge /// * `other` - The revision ID to merge /// * `tree_branch` - The branch containing the tree to merge into /// /// # Returns /// /// A new Merger object, or an error if the operation fails pub fn from_revision_ids( other_tree: &T, other_branch: &dyn PyBranch, other: &RevisionId, tree_branch: &dyn PyBranch, ) -> Result { Python::attach(|py| { let m = py.import("breezy.merge").unwrap(); let cls = m.getattr("Merger").unwrap(); let kwargs = PyDict::new(py); kwargs .set_item("other_branch", other_branch.to_object(py)) .unwrap(); kwargs.set_item("other", other.clone()).unwrap(); kwargs .set_item("tree_branch", tree_branch.to_object(py)) .unwrap(); let merger = cls.call_method( "from_revision_ids", (other_tree.to_object(py),), Some(&kwargs), )?; Ok(Merger(merger.into())) }) } } /// Performs the actual merge operation. /// /// This struct is created by the Merger.make_merger() method and provides /// methods to execute the merge and create transformations. pub struct Submerger(Py); impl Submerger { /// Create a preview transformation of the merge. /// /// This allows inspecting the changes that would be made by the merge /// without actually applying them to the working tree. /// /// # Returns /// /// A TreeTransform object representing the merge changes pub fn make_preview_transform(&self) -> Result { Python::attach(|py| { let transform = self.0.call_method0(py, "make_preview_transform")?; Ok(TreeTransform::from(transform)) }) } } lazy_static::lazy_static! { /// Hooks that are called during merge operations. pub static ref MERGE_HOOKS: HookDict = HookDict::new("breezy.merge", "Merger", "hooks"); } breezyshim-0.7.5/src/osutils.rs000064400000000000000000000061761046102023000146670ustar 00000000000000//! OS-specific utilities. use std::path::Path; /// Check if a file is inside a directory. /// /// # Arguments /// /// * `dir` - The directory to check /// * `fname` - The file path to check /// /// # Returns /// /// `true` if the file is inside the directory, `false` otherwise pub fn is_inside(dir: &Path, fname: &Path) -> bool { fname.starts_with(dir) } /// Check if a file is inside any of the directories in a list. /// /// # Arguments /// /// * `dir_list` - The list of directories to check /// * `fname` - The file path to check /// /// # Returns /// /// `true` if the file is inside any of the directories, `false` otherwise pub fn is_inside_any(dir_list: &[&Path], fname: &Path) -> bool { for dirname in dir_list { if is_inside(dirname, fname) { return true; } } false } #[cfg(test)] mod tests { use super::*; use std::path::Path; #[test] fn test_is_inside_basic() { let dir = Path::new("/home/user"); let file = Path::new("/home/user/document.txt"); assert!(is_inside(dir, file)); } #[test] fn test_is_inside_not_inside() { let dir = Path::new("/home/user"); let file = Path::new("/home/other/document.txt"); assert!(!is_inside(dir, file)); } #[test] fn test_is_inside_nested() { let dir = Path::new("/home/user"); let file = Path::new("/home/user/subdir/document.txt"); assert!(is_inside(dir, file)); } #[test] fn test_is_inside_same_path() { let dir = Path::new("/home/user"); let file = Path::new("/home/user"); assert!(is_inside(dir, file)); } #[test] fn test_is_inside_relative_paths() { let dir = Path::new("user"); let file = Path::new("user/document.txt"); assert!(is_inside(dir, file)); } #[test] fn test_is_inside_any_found() { let dirs = vec![ Path::new("/home/user1"), Path::new("/home/user2"), Path::new("/home/user3"), ]; let dir_refs: Vec<&Path> = dirs.iter().map(|p| *p).collect(); let file = Path::new("/home/user2/document.txt"); assert!(is_inside_any(&dir_refs, file)); } #[test] fn test_is_inside_any_not_found() { let dirs = vec![ Path::new("/home/user1"), Path::new("/home/user2"), Path::new("/home/user3"), ]; let dir_refs: Vec<&Path> = dirs.iter().map(|p| *p).collect(); let file = Path::new("/home/other/document.txt"); assert!(!is_inside_any(&dir_refs, file)); } #[test] fn test_is_inside_any_empty_list() { let dirs: Vec<&Path> = vec![]; let file = Path::new("/home/user/document.txt"); assert!(!is_inside_any(&dirs, file)); } #[test] fn test_is_inside_any_first_match() { let dirs = vec![Path::new("/home/user"), Path::new("/home/user/subdir")]; let dir_refs: Vec<&Path> = dirs.iter().map(|p| *p).collect(); let file = Path::new("/home/user/subdir/document.txt"); // Should match the first one that matches assert!(is_inside_any(&dir_refs, file)); } } breezyshim-0.7.5/src/patches.rs000064400000000000000000000176241046102023000146140ustar 00000000000000//! Patching support for Breezy. use crate::transform::TreeTransform; use patchkit::unified::{HunkLine, UnifiedPatch}; use pyo3::intern; use pyo3::prelude::*; use pyo3::types::{PyBytes, PyList}; fn py_patches(iter_patches: impl Iterator) -> PyResult> { Python::attach(|py| { let m = py.import("breezy.patches")?; let patchc = m.getattr("Patch")?; let hunkc = m.getattr("Hunk")?; let insertlinec = m.getattr("InsertLine")?; let removelinec = m.getattr("RemoveLine")?; let contextlinec = m.getattr("ContextLine")?; let mut ret = vec![]; for patch in iter_patches { let pypatch = patchc.call1(( PyBytes::new(py, &patch.orig_name), PyBytes::new(py, &patch.mod_name), patch.orig_ts, patch.mod_ts, ))?; let pyhunks = pypatch.getattr("hunks")?; for hunk in patch.hunks { let pyhunk = hunkc.call1(( hunk.orig_pos, hunk.orig_range, hunk.mod_pos, hunk.mod_range, hunk.tail, ))?; pyhunks.call_method1("append", (&pyhunk,))?; let pylines = pyhunk.getattr("lines")?; for line in hunk.lines { pylines.call_method1( "append", (match line { HunkLine::ContextLine(l) => { contextlinec.call1((PyBytes::new(py, l.as_slice()),))? } HunkLine::InsertLine(l) => { insertlinec.call1((PyBytes::new(py, l.as_slice()),))? } HunkLine::RemoveLine(l) => { removelinec.call1((PyBytes::new(py, l.as_slice()),))? } },), )?; } } ret.push(pypatch); } Ok(PyList::new(py, ret.iter())?.unbind().into()) }) } /// Apply patches to a TreeTransform. /// /// # Arguments /// * `tt`: TreeTransform instance /// * `patches`: List of patches /// * `prefix`: Number leading path segments to strip pub fn apply_patches( tt: &TreeTransform, patches: impl Iterator, prefix: Option, ) -> crate::Result<()> { Python::attach(|py| { let patches = py_patches(patches)?; let m = py.import("breezy.tree")?; let apply_patches = m.getattr("apply_patches")?; apply_patches.call1((tt.as_pyobject(), patches, prefix))?; Ok(()) }) } /// Represents patches that have been applied to a tree. /// /// This struct provides a way to temporarily apply patches to a tree /// and automatically revert them when the object is dropped. pub struct AppliedPatches(Py, Py); impl AppliedPatches { /// Create a new AppliedPatches instance. /// /// # Arguments /// /// * `tree` - The tree to apply patches to /// * `patches` - List of patches to apply /// * `prefix` - Number of leading path segments to strip from patch paths /// /// # Returns /// /// A new AppliedPatches object, which will revert the patches when dropped pub fn new( tree: &T, patches: Vec, prefix: Option, ) -> crate::Result { let (ap, tree) = Python::attach(|py| -> Result<_, PyErr> { let patches = py_patches(patches.into_iter())?; let m = py.import("breezy.patches")?; let c = m.getattr("AppliedPatches")?; let ap = c.call1((tree.to_object(py), patches, prefix))?; let tree = ap.call_method0(intern!(py, "__enter__"))?; Ok((ap.unbind(), tree.unbind())) })?; Ok(Self(tree, ap)) } } impl Drop for AppliedPatches { fn drop(&mut self) { Python::attach(|py| -> Result<(), PyErr> { self.1.call_method1( py, intern!(py, "__exit__"), (py.None(), py.None(), py.None()), )?; Ok(()) }) .unwrap(); } } impl<'py> IntoPyObject<'py> for AppliedPatches { type Target = PyAny; type Output = Bound<'py, Self::Target>; type Error = std::convert::Infallible; fn into_pyobject(self, py: Python<'py>) -> Result { Ok(self.0.clone_ref(py).into_bound(py)) } } impl crate::tree::PyTree for AppliedPatches { fn to_object(&self, py: Python) -> Py { self.0.clone_ref(py) } } #[cfg(test)] mod applied_patches_tests { use super::*; use crate::controldir::ControlDirFormat; use crate::tree::MutableTree; use crate::tree::Tree; use crate::workingtree::WorkingTree; use serial_test::serial; #[test] #[serial] fn test_apply_simple() { let env = crate::testing::TestEnv::new(); let td = tempfile::tempdir().unwrap(); let tree = crate::controldir::create_standalone_workingtree( td.path(), &ControlDirFormat::default(), ) .unwrap(); std::fs::write(td.path().join("a"), "a\n").unwrap(); tree.add(&[std::path::Path::new("a")]).unwrap(); tree.build_commit() .message("Add a") .reporter(&crate::commit::NullCommitReporter::new()) .commit() .unwrap(); let patch = UnifiedPatch::parse_patch(patchkit::unified::splitlines( br#"--- a/a +++ b/a @@ -1 +1 @@ -a +b "#, )) .unwrap(); let newtree = crate::patches::AppliedPatches::new(&tree, vec![patch], None).unwrap(); assert_eq!( b"b\n".to_vec(), newtree.get_file_text(std::path::Path::new("a")).unwrap() ); std::mem::drop(newtree); std::mem::drop(env); } #[test] #[serial] fn test_apply_delete() { let env = crate::testing::TestEnv::new(); let td = tempfile::tempdir().unwrap(); let tree = crate::controldir::create_standalone_workingtree( td.path(), &ControlDirFormat::default(), ) .unwrap(); std::fs::write(td.path().join("a"), "a\n").unwrap(); tree.add(&[std::path::Path::new("a")]).unwrap(); tree.build_commit() .reporter(&crate::commit::NullCommitReporter::new()) .message("Add a") .commit() .unwrap(); let patch = UnifiedPatch::parse_patch(patchkit::unified::splitlines( br#"--- a/a +++ /dev/null @@ -1 +0,0 @@ -a "#, )) .unwrap(); let newtree = crate::patches::AppliedPatches::new(&tree, vec![patch], None).unwrap(); assert!(!newtree.has_filename(std::path::Path::new("a"))); std::mem::drop(env); } #[test] #[serial] fn test_apply_add() { let env = crate::testing::TestEnv::new(); let td = tempfile::tempdir().unwrap(); let tree = crate::controldir::create_standalone_workingtree( td.path(), &ControlDirFormat::default(), ) .unwrap(); std::fs::write(td.path().join("a"), "a\n").unwrap(); tree.add(&[std::path::Path::new("a")]).unwrap(); tree.build_commit() .reporter(&crate::commit::NullCommitReporter::new()) .message("Add a") .commit() .unwrap(); let patch = UnifiedPatch::parse_patch(patchkit::unified::splitlines( br#"--- /dev/null +++ b/b @@ -0,0 +1 @@ +b "#, )) .unwrap(); let newtree = crate::patches::AppliedPatches::new(&tree, vec![patch], None).unwrap(); assert_eq!( b"b\n".to_vec(), newtree.get_file_text(std::path::Path::new("b")).unwrap() ); std::mem::drop(env); } } breezyshim-0.7.5/src/plugin.rs000064400000000000000000000017631046102023000144600ustar 00000000000000//! Plugin loading and management. use pyo3::exceptions::PyRuntimeError; use pyo3::prelude::*; /// Load all Breezy plugins. /// /// This function loads all available Breezy plugins. It should be called /// before using functionality that might depend on plugins, such as /// support for specific version control systems. /// /// # Returns /// /// `true` if plugins were loaded, `false` if Breezy was already initialized pub fn load_plugins() -> bool { Python::attach(|py| { let m = py.import("breezy.plugin").unwrap(); match m.call_method0("load_plugins") { Ok(_) => true, Err(e) if e.is_instance_of::(py) && e.to_string().contains("Breezy already initialized") => { false } Err(e) => panic!("Error loading plugins: {}", e), } }) } #[cfg(test)] mod tests { use super::*; #[test] fn test_load_plugins() { load_plugins(); } } breezyshim-0.7.5/src/prelude.rs000064400000000000000000000005701046102023000146150ustar 00000000000000//! Prelude for the breezyshim crate //! //! This module re-exports commonly used items from the crate, pub use crate::branch::Branch; pub use crate::controldir::ControlDir; pub use crate::error::Error as BrzError; pub use crate::repository::Repository; pub use crate::revisionid::RevisionId; pub use crate::tree::{MutableTree, Tree}; pub use crate::workingtree::WorkingTree; breezyshim-0.7.5/src/rename_map.rs000064400000000000000000000044671046102023000152720ustar 00000000000000//! Detect renames between two trees based on file contents. use crate::tree::{PyMutableTree, PyTree}; use pyo3::prelude::*; /// Guess file renames between two trees based on file contents. /// /// This function detects files that were renamed between the source tree /// and the target tree by comparing file contents, and updates the /// target tree to reflect these renames. /// /// # Arguments /// /// * `from_tree` - The source tree to detect renames from /// * `mutable_tree` - The target tree to apply renames to /// /// # Returns /// /// `Ok(())` on success, or an error if the operation fails pub fn guess_renames( from_tree: &dyn PyTree, mutable_tree: &dyn PyMutableTree, ) -> Result<(), crate::error::Error> { pyo3::Python::attach(|py| -> Result<(), pyo3::PyErr> { let m = py.import("breezy.rename_map")?; let rename_map = m.getattr("RenameMap")?; rename_map.call_method1( "guess_renames", (from_tree.to_object(py), mutable_tree.to_object(py)), )?; Ok(()) }) .map_err(Into::into) } #[cfg(test)] mod tests { use super::*; use crate::controldir::create_standalone_workingtree; use crate::tree::MutableTree; use crate::workingtree::WorkingTree; use serial_test::serial; use std::path::Path; #[test] #[serial] fn test_guess_renames() { let env = crate::testing::TestEnv::new(); let tmp_dir = tempfile::tempdir().unwrap(); let wt = create_standalone_workingtree(tmp_dir.path(), "2a").unwrap(); // Add some content to make rename detection meaningful std::fs::write(tmp_dir.path().join("file1.txt"), "content1").unwrap(); wt.add(&[Path::new("file1.txt")]).unwrap(); wt.build_commit() .message("Initial commit") .commit() .unwrap(); let from_tree = wt.basis_tree().unwrap(); // Create a second working tree and simulate a rename let tmp_dir2 = tempfile::tempdir().unwrap(); let wt2 = create_standalone_workingtree(tmp_dir2.path(), "2a").unwrap(); std::fs::write(tmp_dir2.path().join("file2.txt"), "content1").unwrap(); wt2.add(&[Path::new("file2.txt")]).unwrap(); let result = guess_renames(&from_tree, &wt2); assert!(result.is_ok()); std::mem::drop(env); } } breezyshim-0.7.5/src/repository.rs000064400000000000000000001356001046102023000153770ustar 00000000000000//! Repository handling //! //! A repository is a collection of revisions and their associated data. use crate::branch::GenericBranch; use crate::controldir::{ControlDir, GenericControlDir}; use crate::delta::TreeDelta; use crate::foreign::VcsType; use crate::graph::Graph; use crate::location::AsLocation; use crate::lock::Lock; use crate::revisionid::RevisionId; use crate::tree::RevisionTree; use chrono::DateTime; use chrono::TimeZone; use pyo3::exceptions::PyStopIteration; use pyo3::intern; use pyo3::prelude::*; use pyo3::types::PyDict; use std::collections::HashMap; /// Represents the format of a repository. /// /// Different repository formats have different capabilities, such as /// support for content hash keys (CHKs). pub struct RepositoryFormat(Py); impl Clone for RepositoryFormat { fn clone(&self) -> Self { Python::attach(|py| RepositoryFormat(self.0.clone_ref(py))) } } impl RepositoryFormat { /// Check if this repository format supports content hash keys (CHKs). /// /// # Returns /// /// `true` if the format supports CHKs, `false` otherwise pub fn supports_chks(&self) -> bool { Python::attach(|py| { self.0 .getattr(py, "supports_chks") .and_then(|attr| attr.extract(py)) .unwrap_or(false) }) } } /// Represents the lock status of a repository. #[derive(Debug, Clone)] pub struct LockStatus { /// Whether the repository is locked. pub is_locked: bool, /// The holder of the lock, if any. pub lock_holder: Option, } /// Statistics about a repository. #[derive(Debug, Clone)] pub struct RepositoryStats { /// Number of revisions in the repository. pub revision_count: u32, /// Number of files in the repository. pub file_count: u32, /// Committer statistics, if requested. pub committers: Option>, } /// Trait for repository operations. /// /// This trait defines the operations that can be performed on a repository, /// such as fetching revisions, getting a revision tree, or looking up revisions. pub trait Repository { /// Get a reference to the underlying Any type for downcasting. fn as_any(&self) -> &dyn std::any::Any; /// Get the version control system type for this repository. fn vcs_type(&self) -> VcsType; /// Get the user-facing URL for this repository. fn get_user_url(&self) -> url::Url; /// Get a transport for the user-facing URL. fn user_transport(&self) -> crate::transport::Transport; /// Get a transport for the control directory. fn control_transport(&self) -> crate::transport::Transport; /// Fetch revisions from another repository. /// /// # Arguments /// /// * `other_repository` - The repository to fetch from /// * `stop_revision` - Optional revision to stop fetching at fn fetch( &self, other_repository: &dyn Repository, stop_revision: Option<&RevisionId>, ) -> Result<(), crate::error::Error>; /// Get a revision tree for a specific revision. /// /// # Arguments /// /// * `revid` - The revision ID to get the tree for fn revision_tree(&self, revid: &RevisionId) -> Result; /// Get the revision graph for this repository. fn get_graph(&self) -> Graph; /// Get the control directory for this repository. fn controldir( &self, ) -> Box< dyn ControlDir< Branch = GenericBranch, Repository = GenericRepository, WorkingTree = crate::workingtree::GenericWorkingTree, >, >; /// Get the repository format. fn format(&self) -> RepositoryFormat; /// Iterate over revisions with the given IDs. /// /// # Arguments /// /// * `revision_ids` - The revision IDs to iterate over fn iter_revisions( &self, revision_ids: Vec, ) -> Box)>>; /// Get revision deltas for the given revisions. /// /// # Arguments /// /// * `revs` - The revisions to get deltas for /// * `specific_files` - Optional list of specific files to get deltas for fn get_revision_deltas( &self, revs: &[Revision], specific_files: Option<&[&std::path::Path]>, ) -> Box>; /// Get a specific revision. /// /// # Arguments /// /// * `revision_id` - The revision ID to get fn get_revision(&self, revision_id: &RevisionId) -> Result; /// Look up a Bazaar revision ID. /// /// # Arguments /// /// * `revision_id` - The revision ID to look up fn lookup_bzr_revision_id( &self, revision_id: &RevisionId, ) -> Result<(Vec,), crate::error::Error>; /// Look up a foreign revision ID. /// /// # Arguments /// /// * `foreign_revid` - The foreign revision ID to look up fn lookup_foreign_revision_id( &self, foreign_revid: &[u8], ) -> Result; /// Lock the repository for reading. fn lock_read(&self) -> Result; /// Lock the repository for writing. fn lock_write(&self) -> Result; /// Check if the repository has a specific revision. /// /// # Arguments /// /// * `revision_id` - The revision ID to check for fn has_revision(&self, revision_id: &RevisionId) -> Result; /// Get all revision IDs in the repository. fn all_revision_ids(&self) -> Result, crate::error::Error>; /// Check if the repository is shared (can be used by multiple branches). fn is_shared(&self) -> Result; /// Get the signature text for a revision. /// /// # Arguments /// /// * `revision_id` - The revision ID to get the signature for fn get_signature_text(&self, revision_id: &RevisionId) -> Result; /// Check if a revision has a signature. /// /// # Arguments /// /// * `revision_id` - The revision ID to check fn has_signature_for_revision_id( &self, revision_id: &RevisionId, ) -> Result; /// Pack the repository to optimize storage. /// /// # Arguments /// /// * `hint` - Optional list of revision IDs to focus on /// * `clean_obsolete_packs` - Whether to clean obsolete packs fn pack( &self, hint: Option<&[RevisionId]>, clean_obsolete_packs: bool, ) -> Result<(), crate::error::Error>; /// Start a write group for batch operations. fn start_write_group(&self) -> Result<(), crate::error::Error>; /// Commit a write group. fn commit_write_group(&self) -> Result<(), crate::error::Error>; /// Abort a write group. fn abort_write_group(&self) -> Result<(), crate::error::Error>; /// Check if a write group is active. fn is_in_write_group(&self) -> bool; /// Get parent revision IDs for given revisions. /// /// # Arguments /// /// * `revision_ids` - The revision IDs to get parents for fn get_parent_map( &self, revision_ids: &[RevisionId], ) -> Result>, crate::error::Error>; /// Get missing revision IDs between this repository and another. /// /// # Arguments /// /// * `other` - The other repository to compare with /// * `revision_id` - Optional revision to stop at fn missing_revision_ids( &self, other: &dyn Repository, revision_id: Option<&RevisionId>, ) -> Result, crate::error::Error>; /// Find branches that use this repository. fn find_branches(&self) -> Result, crate::error::Error>; /// Get physical lock status. fn get_physical_lock_status(&self) -> Result; /// Add a fallback repository. /// /// # Arguments /// /// * `repository` - The repository to add as a fallback fn add_fallback_repository( &self, repository: &dyn Repository, ) -> Result<(), crate::error::Error>; /// Get ancestry of revisions. /// /// # Arguments /// /// * `revision_ids` - The revision IDs to get ancestry for /// * `topo_sorted` - Whether to sort topologically fn get_ancestry( &self, revision_ids: &[RevisionId], topo_sorted: bool, ) -> Result, crate::error::Error>; /// Gather statistics about the repository. /// /// # Arguments /// /// * `committers` - Whether to gather committer statistics /// * `log` - Whether to log progress fn gather_stats( &self, committers: Option, log: Option, ) -> Result; /// Get file graph for specific files. fn get_file_graph(&self) -> Result; } /// Trait for types that can be converted to Python repository objects. /// /// This trait is implemented by types that represent Breezy repositories /// and can be converted to Python objects. pub trait PyRepository: Repository + std::any::Any { /// Get the underlying Python object for this repository. fn to_object(&self, py: Python) -> Py; } impl dyn PyRepository { /// Get a reference to self as a Repository trait object. pub fn as_repository(&self) -> &dyn Repository { self } } /// Generic wrapper for a Python repository object. /// /// This struct provides a Rust interface to a Breezy repository object. pub struct GenericRepository(Py); impl Clone for GenericRepository { fn clone(&self) -> Self { Python::attach(|py| GenericRepository(self.0.clone_ref(py))) } } #[derive(Debug)] /// Represents a revision in a version control repository. /// /// A revision contains metadata about a specific version of the code, /// such as the revision ID, parent revisions, commit message, committer, /// and timestamp. #[derive(Clone)] pub struct Revision { /// The unique identifier for this revision. pub revision_id: RevisionId, /// The IDs of the parent revisions (usually one, but can be multiple for merges). pub parent_ids: Vec, /// The commit message for this revision. pub message: String, /// The name and email of the person who committed this revision. pub committer: String, /// The timestamp when this revision was committed (in seconds since the Unix epoch). pub timestamp: f64, /// The timezone offset for the timestamp, in seconds east of UTC. pub timezone: i32, /// Revision properties as key-value pairs. pub properties: std::collections::HashMap, } impl Revision { /// Get the commit timestamp as a DateTime object. /// /// # Returns /// /// A DateTime object representing the commit timestamp with its timezone pub fn datetime(&self) -> DateTime { let tz = chrono::FixedOffset::east_opt(self.timezone).unwrap(); tz.timestamp_opt(self.timestamp as i64, 0).unwrap() } /// Get the revision properties for this revision. /// /// # Returns /// /// A reference to the HashMap containing the revision properties as key-value pairs pub fn get_properties(&self) -> &std::collections::HashMap { &self.properties } } impl<'py> IntoPyObject<'py> for Revision { type Target = PyAny; type Output = Bound<'py, Self::Target>; type Error = std::convert::Infallible; fn into_pyobject(self, py: Python<'py>) -> Result { let kwargs = PyDict::new(py); kwargs.set_item("message", self.message).unwrap(); kwargs.set_item("committer", self.committer).unwrap(); kwargs.set_item("timestamp", self.timestamp).unwrap(); kwargs.set_item("timezone", self.timezone).unwrap(); kwargs.set_item("revision_id", self.revision_id).unwrap(); kwargs .set_item( "parent_ids", self.parent_ids.into_iter().collect::>(), ) .unwrap(); // Add properties if they exist if !self.properties.is_empty() { let py_properties = pyo3::types::PyDict::new(py); for (key, value) in self.properties { py_properties.set_item(key, value).unwrap(); } kwargs.set_item("properties", py_properties).unwrap(); } Ok(py .import("breezy.revision") .unwrap() .getattr("Revision") .unwrap() .call((), Some(&kwargs)) .unwrap()) } } impl<'a, 'py> FromPyObject<'a, 'py> for Revision { type Error = PyErr; fn extract(ob: Borrowed<'a, 'py, PyAny>) -> PyResult { // Extract properties if they exist let mut properties = std::collections::HashMap::new(); if let Ok(py_properties) = ob.getattr("properties") { if !py_properties.is_none() { if let Ok(py_dict) = py_properties.cast::() { for (key, value) in py_dict.iter() { let key_str: String = key.extract()?; let value_str: String = value.extract()?; properties.insert(key_str, value_str); } } } } Ok(Revision { revision_id: ob.getattr("revision_id")?.extract()?, parent_ids: ob.getattr("parent_ids")?.extract()?, message: ob.getattr("message")?.extract()?, committer: ob.getattr("committer")?.extract()?, timestamp: ob.getattr("timestamp")?.extract()?, timezone: ob.getattr("timezone")?.extract()?, properties, }) } } /// Iterator over revisions in a repository. /// /// This struct provides an iterator interface for accessing revisions /// in a repository, returning pairs of revision IDs and revision objects. pub struct RevisionIterator(Py); impl Iterator for RevisionIterator { type Item = (RevisionId, Option); fn next(&mut self) -> Option { Python::attach( |py| match self.0.call_method0(py, intern!(py, "__next__")) { Err(e) if e.is_instance_of::(py) => None, Ok(o) => o.extract(py).ok(), Err(_) => None, }, ) } } /// Iterator over tree deltas in a repository. /// /// This struct provides an iterator interface for accessing tree deltas /// in a repository, which represent changes between revisions. pub struct DeltaIterator(Py); impl Iterator for DeltaIterator { type Item = TreeDelta; fn next(&mut self) -> Option { Python::attach( |py| match self.0.call_method0(py, intern!(py, "__next__")) { Err(e) if e.is_instance_of::(py) => None, Ok(o) => o.extract(py).ok(), Err(_) => None, }, ) } } impl<'py> IntoPyObject<'py> for GenericRepository { type Target = PyAny; type Output = Bound<'py, Self::Target>; type Error = std::convert::Infallible; fn into_pyobject(self, py: Python<'py>) -> Result { Ok(self.0.into_bound(py)) } } impl PyRepository for GenericRepository { fn to_object(&self, py: Python) -> Py { self.0.clone_ref(py) } } impl GenericRepository { /// Create a new GenericRepository from a Python object. /// /// # Arguments /// /// * `obj` - The Python object representing a Breezy repository /// /// # Returns /// /// A new GenericRepository wrapping the provided Python object pub fn new(obj: Py) -> Self { GenericRepository(obj) } } impl From> for GenericRepository { fn from(obj: Py) -> Self { GenericRepository(obj) } } impl Repository for T { fn as_any(&self) -> &dyn std::any::Any { self } fn vcs_type(&self) -> VcsType { Python::attach(|py| { if self.to_object(py).getattr(py, "_git").is_ok() { VcsType::Git } else { VcsType::Bazaar } }) } fn get_user_url(&self) -> url::Url { Python::attach(|py| { self.to_object(py) .getattr(py, "user_url") .unwrap() .extract::(py) .unwrap() .parse() .unwrap() }) } fn user_transport(&self) -> crate::transport::Transport { Python::attach(|py| { crate::transport::Transport::new( self.to_object(py).getattr(py, "user_transport").unwrap(), ) }) } fn control_transport(&self) -> crate::transport::Transport { Python::attach(|py| { crate::transport::Transport::new( self.to_object(py).getattr(py, "control_transport").unwrap(), ) }) } fn fetch( &self, other_repository: &dyn Repository, stop_revision: Option<&RevisionId>, ) -> Result<(), crate::error::Error> { Python::attach(|py| { // Try to get the Python object from the other repository let other_py = if let Some(py_repo) = other_repository .as_any() .downcast_ref::() { py_repo.to_object(py) } else { return Err(PyErr::new::( "Repository must be a PyRepository", ) .into()); }; self.to_object(py).call_method1( py, "fetch", ( other_py, stop_revision.map(|r| r.clone().into_pyobject(py).unwrap().unbind()), ), )?; Ok(()) }) } fn revision_tree(&self, revid: &RevisionId) -> Result { Python::attach(|py| { let o = self .to_object(py) .call_method1(py, "revision_tree", (revid.clone(),))?; Ok(RevisionTree(o)) }) } fn get_graph(&self) -> Graph { Python::attach(|py| Graph::from(self.to_object(py).call_method0(py, "get_graph").unwrap())) } fn controldir( &self, ) -> Box< dyn ControlDir< Branch = GenericBranch, Repository = GenericRepository, WorkingTree = crate::workingtree::GenericWorkingTree, >, > { Python::attach(|py| { Box::new(GenericControlDir::new( self.to_object(py).getattr(py, "controldir").unwrap(), )) as Box< dyn ControlDir< Branch = GenericBranch, Repository = GenericRepository, WorkingTree = crate::workingtree::GenericWorkingTree, >, > }) } fn format(&self) -> RepositoryFormat { Python::attach(|py| RepositoryFormat(self.to_object(py).getattr(py, "_format").unwrap())) } fn iter_revisions( &self, revision_ids: Vec, ) -> Box)>> { Python::attach(|py| { let o = self .to_object(py) .call_method1(py, "iter_revisions", (revision_ids,)) .unwrap(); Box::new(RevisionIterator(o)) }) } fn get_revision_deltas( &self, revs: &[Revision], specific_files: Option<&[&std::path::Path]>, ) -> Box> { Python::attach(|py| { let revs = revs .iter() .map(|r| r.clone().into_pyobject(py).unwrap()) .collect::>(); let specific_files = specific_files .map(|files| files.iter().map(|f| f.to_path_buf()).collect::>()); let o = self .to_object(py) .call_method1(py, "get_revision_deltas", (revs, specific_files)) .unwrap(); Box::new(DeltaIterator(o)) }) } fn get_revision(&self, revision_id: &RevisionId) -> Result { Python::attach(|py| { self.to_object(py) .call_method1(py, "get_revision", (revision_id.clone(),))? .extract(py) }) .map_err(Into::into) } // TODO: This should really be on ForeignRepository fn lookup_bzr_revision_id( &self, revision_id: &RevisionId, ) -> Result<(Vec,), crate::error::Error> { Python::attach(|py| { self.to_object(py) .call_method1(py, "lookup_bzr_revision_id", (revision_id.clone(),))? .extract::<(Vec, Py)>(py) }) .map_err(Into::into) .map(|(v, _m)| (v,)) } fn lookup_foreign_revision_id( &self, foreign_revid: &[u8], ) -> Result { Python::attach(|py| { self.to_object(py) .call_method1(py, "lookup_foreign_revision_id", (foreign_revid,))? .extract(py) }) .map_err(Into::into) } fn lock_read(&self) -> Result { Python::attach(|py| { Ok(Lock::from( self.to_object(py) .call_method0(py, intern!(py, "lock_read"))?, )) }) } fn lock_write(&self) -> Result { Python::attach(|py| { Ok(Lock::from( self.to_object(py) .call_method0(py, intern!(py, "lock_write"))?, )) }) } fn has_revision(&self, revision_id: &RevisionId) -> Result { Python::attach(|py| { self.to_object(py) .call_method1(py, "has_revision", (revision_id.clone(),))? .extract(py) .map_err(Into::into) }) } fn all_revision_ids(&self) -> Result, crate::error::Error> { Python::attach(|py| { self.to_object(py) .call_method0(py, "all_revision_ids")? .extract(py) .map_err(Into::into) }) } fn is_shared(&self) -> Result { Python::attach(|py| { self.to_object(py) .call_method0(py, "is_shared")? .extract(py) .map_err(Into::into) }) } fn get_signature_text(&self, revision_id: &RevisionId) -> Result { Python::attach(|py| { self.to_object(py) .call_method1(py, "get_signature_text", (revision_id.clone(),))? .extract(py) .map_err(Into::into) }) } fn has_signature_for_revision_id( &self, revision_id: &RevisionId, ) -> Result { Python::attach(|py| { self.to_object(py) .call_method1(py, "has_signature_for_revision_id", (revision_id.clone(),))? .extract(py) .map_err(Into::into) }) } fn pack( &self, hint: Option<&[RevisionId]>, clean_obsolete_packs: bool, ) -> Result<(), crate::error::Error> { Python::attach(|py| { let hint_py = hint.map(|h| h.to_vec()); self.to_object(py) .call_method1(py, "pack", (hint_py, clean_obsolete_packs))?; Ok(()) }) } fn start_write_group(&self) -> Result<(), crate::error::Error> { Python::attach(|py| { self.to_object(py).call_method0(py, "start_write_group")?; Ok(()) }) } fn commit_write_group(&self) -> Result<(), crate::error::Error> { Python::attach(|py| { self.to_object(py).call_method0(py, "commit_write_group")?; Ok(()) }) } fn abort_write_group(&self) -> Result<(), crate::error::Error> { Python::attach(|py| { self.to_object(py).call_method0(py, "abort_write_group")?; Ok(()) }) } fn is_in_write_group(&self) -> bool { Python::attach(|py| { self.to_object(py) .call_method0(py, "is_in_write_group") .and_then(|r| r.extract(py)) .unwrap_or(false) }) } fn get_parent_map( &self, revision_ids: &[RevisionId], ) -> Result>, crate::error::Error> { Python::attach(|py| { let result = self.to_object(py) .call_method1(py, "get_parent_map", (revision_ids.to_vec(),))?; let dict = result .cast_bound::(py) .expect("get_parent_map should return a dict"); let mut map = HashMap::new(); for (key, value) in dict.iter() { let rev_id: RevisionId = key.extract()?; let parents: Vec = value.extract()?; map.insert(rev_id, parents); } Ok(map) }) } fn missing_revision_ids( &self, other: &dyn Repository, revision_id: Option<&RevisionId>, ) -> Result, crate::error::Error> { Python::attach(|py| { // Try to get the Python object from the other repository let other_py = if let Some(py_repo) = other.as_any().downcast_ref::() { py_repo.to_object(py) } else { return Err(PyErr::new::( "Repository must be a PyRepository", ) .into()); }; self.to_object(py) .call_method1(py, "missing_revision_ids", (other_py, revision_id.cloned()))? .extract(py) .map_err(Into::into) }) } fn find_branches(&self) -> Result, crate::error::Error> { Python::attach(|py| { let result = self.to_object(py).call_method0(py, "find_branches")?; // find_branches returns a generator, so we need to convert it to a list let list_module = py.import("builtins")?; let list_result = list_module.call_method1("list", (result,))?; let list = list_result .cast::() .expect("list() should return a list"); let mut branches = Vec::new(); for item in list.iter() { branches.push(GenericBranch::from(item)); } Ok(branches) }) } fn get_physical_lock_status(&self) -> Result { Python::attach(|py| { let result = self .to_object(py) .call_method0(py, "get_physical_lock_status")?; if result.is_none(py) { return Ok(LockStatus { is_locked: false, lock_holder: None, }); } // The result is typically a tuple (is_locked, lock_info) if let Ok(tuple) = result.cast_bound::(py) { if tuple.len() >= 2 { let is_locked = tuple.get_item(0)?.extract::()?; let lock_info = tuple.get_item(1)?; let lock_holder = if lock_info.is_none() { None } else if let Ok(info_dict) = lock_info.cast::() { info_dict .get_item("user")? .and_then(|u| u.extract::().ok()) } else { lock_info.extract::().ok() }; return Ok(LockStatus { is_locked, lock_holder, }); } } // Fallback: try to extract as bool let is_locked = result.extract::(py)?; Ok(LockStatus { is_locked, lock_holder: None, }) }) } fn add_fallback_repository( &self, repository: &dyn Repository, ) -> Result<(), crate::error::Error> { Python::attach(|py| { // Try to get the Python object from the repository let repo_py = if let Some(py_repo) = repository.as_any().downcast_ref::() { py_repo.to_object(py) } else { return Err(PyErr::new::( "Repository must be a PyRepository", ) .into()); }; self.to_object(py) .call_method1(py, "add_fallback_repository", (repo_py,))?; Ok(()) }) } fn get_ancestry( &self, revision_ids: &[RevisionId], topo_sorted: bool, ) -> Result, crate::error::Error> { Python::attach(|py| { self.to_object(py) .call_method1(py, "get_ancestry", (revision_ids.to_vec(), topo_sorted))? .extract(py) .map_err(Into::into) }) } fn gather_stats( &self, committers: Option, log: Option, ) -> Result { Python::attach(|py| { let kwargs = PyDict::new(py); if let Some(c) = committers { kwargs.set_item("committers", c)?; } if let Some(l) = log { kwargs.set_item("log", l)?; } let result = self .to_object(py) .call_method(py, "gather_stats", (), Some(&kwargs))?; let stats_dict = result .cast_bound::(py) .expect("gather_stats should return a dict"); let revision_count = stats_dict .get_item("revisions")? .and_then(|v| v.extract::().ok()) .unwrap_or(0); let file_count = stats_dict .get_item("files")? .and_then(|v| v.extract::().ok()) .unwrap_or(0); let committers = if let Some(committers_dict) = stats_dict.get_item("committers")? { if !committers_dict.is_none() { let dict = committers_dict .cast::() .expect("committers should be a dict"); let mut map = HashMap::new(); for (key, value) in dict.iter() { let name: String = key.extract()?; let count: u32 = value.extract()?; map.insert(name, count); } Some(map) } else { None } } else { None }; Ok(RepositoryStats { revision_count, file_count, committers, }) }) } fn get_file_graph(&self) -> Result { Python::attach(|py| { Ok(Graph::from( self.to_object(py).call_method0(py, "get_file_graph")?, )) }) } } /// Open a repository at the specified location. /// /// # Arguments /// /// * `base` - The location to open the repository at /// /// # Returns /// /// A GenericRepository object, or an error if the operation fails /// /// # Examples /// /// ```no_run /// use breezyshim::repository::open; /// let repo = open("https://code.launchpad.net/brz").unwrap(); /// ``` pub fn open(base: impl AsLocation) -> Result { Python::attach(|py| { let o = py .import("breezy.repository")? .getattr("Repository")? .call_method1("open", (base.as_location(),))?; Ok(GenericRepository::new(o.into())) }) } #[cfg(test)] mod repository_tests { use super::{GenericRepository, Repository}; use crate::controldir::ControlDirFormat; use crate::foreign::VcsType; use crate::revisionid::RevisionId; use crate::tree::MutableTree; use crate::workingtree::WorkingTree; use std::path::Path; #[test] fn test_simple() { let td = tempfile::tempdir().unwrap(); let _dir = crate::controldir::create_standalone_workingtree( td.path(), &ControlDirFormat::default(), ) .unwrap(); let _repo = crate::repository::open(td.path()).unwrap(); } #[test] fn test_clone() { let td = tempfile::tempdir().unwrap(); let _dir = crate::controldir::create_standalone_workingtree( td.path(), &ControlDirFormat::default(), ) .unwrap(); let repo: GenericRepository = crate::repository::open(td.path()).unwrap(); let _repo2 = repo.clone(); } #[test] fn test_repository_format() { let td = tempfile::tempdir().unwrap(); let _dir = crate::controldir::create_standalone_workingtree( td.path(), &ControlDirFormat::default(), ) .unwrap(); let repo: GenericRepository = crate::repository::open(td.path()).unwrap(); let format = repo.format(); let _supports_chks = format.supports_chks(); } #[test] fn test_repository_format_clone() { let td = tempfile::tempdir().unwrap(); let _dir = crate::controldir::create_standalone_workingtree( td.path(), &ControlDirFormat::default(), ) .unwrap(); let repo: GenericRepository = crate::repository::open(td.path()).unwrap(); let format = repo.format(); let _format2 = format.clone(); } #[test] fn test_vcs_type() { let td = tempfile::tempdir().unwrap(); let _dir = crate::controldir::create_standalone_workingtree( td.path(), &ControlDirFormat::default(), ) .unwrap(); let repo: GenericRepository = crate::repository::open(td.path()).unwrap(); let vcs_type = repo.vcs_type(); assert!(matches!(vcs_type, VcsType::Bazaar | VcsType::Git)); } #[test] fn test_user_url() { let td = tempfile::tempdir().unwrap(); let _dir = crate::controldir::create_standalone_workingtree( td.path(), &ControlDirFormat::default(), ) .unwrap(); let repo: GenericRepository = crate::repository::open(td.path()).unwrap(); let _url = repo.get_user_url(); } #[test] fn test_transports() { let td = tempfile::tempdir().unwrap(); let _dir = crate::controldir::create_standalone_workingtree( td.path(), &ControlDirFormat::default(), ) .unwrap(); let repo: GenericRepository = crate::repository::open(td.path()).unwrap(); let _user_transport = repo.user_transport(); let _control_transport = repo.control_transport(); } #[test] fn test_controldir() { let td = tempfile::tempdir().unwrap(); let _dir = crate::controldir::create_standalone_workingtree( td.path(), &ControlDirFormat::default(), ) .unwrap(); let repo: GenericRepository = crate::repository::open(td.path()).unwrap(); let _controldir = repo.controldir(); } #[test] fn test_get_graph() { let td = tempfile::tempdir().unwrap(); let _dir = crate::controldir::create_standalone_workingtree( td.path(), &ControlDirFormat::default(), ) .unwrap(); let repo: GenericRepository = crate::repository::open(td.path()).unwrap(); let _graph = repo.get_graph(); } #[test] fn test_revision_tree() { let td = tempfile::tempdir().unwrap(); let _dir = crate::controldir::create_standalone_workingtree( td.path(), &ControlDirFormat::default(), ) .unwrap(); let repo: GenericRepository = crate::repository::open(td.path()).unwrap(); // Try to get revision tree for null revision let null_revid = RevisionId::null(); let _tree = repo.revision_tree(&null_revid).unwrap(); } #[test] fn test_iter_revisions() { let td = tempfile::tempdir().unwrap(); let _dir = crate::controldir::create_standalone_workingtree( td.path(), &ControlDirFormat::default(), ) .unwrap(); let repo: GenericRepository = crate::repository::open(td.path()).unwrap(); // Test with empty list let revisions = vec![]; let mut iter = repo.iter_revisions(revisions); assert!(iter.next().is_none()); } #[test] fn test_lock_operations() { let td = tempfile::tempdir().unwrap(); let _dir = crate::controldir::create_standalone_workingtree( td.path(), &ControlDirFormat::default(), ) .unwrap(); let repo: GenericRepository = crate::repository::open(td.path()).unwrap(); // Test read lock let read_lock = repo.lock_read(); assert!(read_lock.is_ok()); // Test write lock let write_lock = repo.lock_write(); assert!(!write_lock.is_ok()); } #[test] fn test_has_revision() { let td = tempfile::tempdir().unwrap(); let _dir = crate::controldir::create_standalone_workingtree( td.path(), &ControlDirFormat::default(), ) .unwrap(); let repo: GenericRepository = crate::repository::open(td.path()).unwrap(); // Test with null revision let null_revid = RevisionId::null(); let has_null = repo.has_revision(&null_revid).unwrap(); assert!(has_null); // Test with non-existent revision let fake_revid = RevisionId::from("fake-revision-id".as_bytes()); let has_fake = repo.has_revision(&fake_revid).unwrap(); assert!(!has_fake); } #[test] fn test_all_revision_ids() { let td = tempfile::tempdir().unwrap(); let _dir = crate::controldir::create_standalone_workingtree( td.path(), &ControlDirFormat::default(), ) .unwrap(); let repo: GenericRepository = crate::repository::open(td.path()).unwrap(); let revision_ids = repo.all_revision_ids().unwrap(); // New repository should have no revisions assert_eq!(revision_ids.len(), 0); } #[test] fn test_is_shared() { let td = tempfile::tempdir().unwrap(); let _dir = crate::controldir::create_standalone_workingtree( td.path(), &ControlDirFormat::default(), ) .unwrap(); let repo: GenericRepository = crate::repository::open(td.path()).unwrap(); let _is_shared = repo.is_shared().unwrap(); // Just test that the method works } #[test] fn test_write_group_operations() { let td = tempfile::tempdir().unwrap(); let _dir = crate::controldir::create_standalone_workingtree( td.path(), &ControlDirFormat::default(), ) .unwrap(); let repo: GenericRepository = crate::repository::open(td.path()).unwrap(); // Test initial state assert!(!repo.is_in_write_group()); // Acquire write lock first let _lock = repo.lock_write().unwrap(); // Start a write group repo.start_write_group().unwrap(); assert!(repo.is_in_write_group()); // Abort the write group repo.abort_write_group().unwrap(); assert!(!repo.is_in_write_group()); } #[test] fn test_get_parent_map() { let td = tempfile::tempdir().unwrap(); let _dir = crate::controldir::create_standalone_workingtree( td.path(), &ControlDirFormat::default(), ) .unwrap(); let repo: GenericRepository = crate::repository::open(td.path()).unwrap(); // Test with empty list let parent_map = repo.get_parent_map(&[]).unwrap(); assert!(parent_map.is_empty()); // Test with null revision let null_revid = RevisionId::null(); let parent_map = repo.get_parent_map(&[null_revid]).unwrap(); assert_eq!(parent_map.len(), 1); } #[test] fn test_find_branches() { let td = tempfile::tempdir().unwrap(); let _dir = crate::controldir::create_standalone_workingtree( td.path(), &ControlDirFormat::default(), ) .unwrap(); let repo: GenericRepository = crate::repository::open(td.path()).unwrap(); let branches = repo.find_branches().unwrap(); // A new standalone workingtree should have at least one branch assert!(!branches.is_empty()); } #[test] fn test_get_physical_lock_status() { let td = tempfile::tempdir().unwrap(); let _dir = crate::controldir::create_standalone_workingtree( td.path(), &ControlDirFormat::default(), ) .unwrap(); let repo: GenericRepository = crate::repository::open(td.path()).unwrap(); let status = repo.get_physical_lock_status().unwrap(); // Repository should not be locked initially assert!(!status.is_locked); assert!(status.lock_holder.is_none()); } #[test] fn test_gather_stats() { let td = tempfile::tempdir().unwrap(); let _dir = crate::controldir::create_standalone_workingtree( td.path(), &ControlDirFormat::default(), ) .unwrap(); let repo: GenericRepository = crate::repository::open(td.path()).unwrap(); let stats = repo.gather_stats(None, None).unwrap(); // New repository should have 0 revisions assert_eq!(stats.revision_count, 0); } #[test] fn test_get_file_graph() { let td = tempfile::tempdir().unwrap(); let _dir = crate::controldir::create_standalone_workingtree( td.path(), &ControlDirFormat::default(), ) .unwrap(); let repo: GenericRepository = crate::repository::open(td.path()).unwrap(); let _graph = repo.get_file_graph().unwrap(); // Just test that the method works } #[test] fn test_pack() { let td = tempfile::tempdir().unwrap(); let _dir = crate::controldir::create_standalone_workingtree( td.path(), &ControlDirFormat::default(), ) .unwrap(); let repo: GenericRepository = crate::repository::open(td.path()).unwrap(); // Test pack without hints repo.pack(None, false).unwrap(); // Test pack with empty hints repo.pack(Some(&[]), true).unwrap(); } #[test] fn test_commit_with_revision_properties() { let td = tempfile::tempdir().unwrap(); let wt = crate::controldir::create_standalone_workingtree( td.path(), &ControlDirFormat::default(), ) .unwrap(); let repo: GenericRepository = crate::repository::open(td.path()).unwrap(); // Create a file to commit let test_file = td.path().join("test.txt"); std::fs::write(&test_file, "test content").unwrap(); // Use add() with relative path instead of smart_add to avoid path issues wt.add(&[Path::new("test.txt")]).unwrap(); // Test CommitBuilder with revision properties let test_key = "test-property"; let test_value = "test-value-data"; let test_key2 = "deb-pristine-delta-foo.tar.gz"; let test_value2 = "binary-delta-data-here"; let revision_id = wt .build_commit() .message("Test commit with properties") .committer("Test User ") .set_revprop(test_key, test_value) .unwrap() .set_revprop(test_key2, test_value2) .unwrap() .commit() .unwrap(); // Retrieve the revision and check properties let revision = repo.get_revision(&revision_id).unwrap(); let properties = revision.get_properties(); // Check that our properties are present with correct values assert!( properties.contains_key(test_key), "Property '{}' not found", test_key ); assert!( properties.contains_key(test_key2), "Property '{}' not found", test_key2 ); // Verify the values match what we set let retrieved_value = properties.get(test_key).unwrap(); let retrieved_value2 = properties.get(test_key2).unwrap(); assert_eq!( retrieved_value, test_value, "Property '{}' value mismatch", test_key ); assert_eq!( retrieved_value2, test_value2, "Property '{}' value mismatch", test_key2 ); } #[test] fn test_revision_properties_empty() { let td = tempfile::tempdir().unwrap(); let wt = crate::controldir::create_standalone_workingtree( td.path(), &ControlDirFormat::default(), ) .unwrap(); let repo: GenericRepository = crate::repository::open(td.path()).unwrap(); // Create a file to commit let test_file = td.path().join("test.txt"); std::fs::write(&test_file, "test content").unwrap(); // Use add() with relative path instead of smart_add to avoid path issues wt.add(&[Path::new("test.txt")]).unwrap(); // Create a commit without revision properties let revision_id = wt .build_commit() .message("Test commit without properties") .committer("Test User ") .commit() .unwrap(); // Retrieve the revision and check properties let revision = repo.get_revision(&revision_id).unwrap(); let properties = revision.get_properties(); // Breezy automatically adds a "branch-nick" property // Just check that it exists and no other custom properties are present assert!( properties.contains_key("branch-nick"), "Expected branch-nick property" ); assert!(!properties.contains_key("test-property")); assert!(!properties.contains_key("deb-pristine-delta-foo.tar.gz")); } } breezyshim-0.7.5/src/revisionid.rs000064400000000000000000000124311046102023000153270ustar 00000000000000//! Revision ID type and related functions. use pyo3::prelude::*; use serde::{Deserialize, Deserializer, Serialize, Serializer}; #[derive(Clone, PartialEq, Eq, Ord, PartialOrd, Hash)] /// Represents a unique identifier for a revision in a version control system. /// /// RevisionId is typically a string in UTF-8 encoding, but is stored as bytes /// to efficiently handle all possible revision formats across different VCS systems. pub struct RevisionId(Vec); impl std::fmt::Debug for RevisionId { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { let s = String::from_utf8_lossy(&self.0); write!(f, "{}", s) } } impl RevisionId { /// Get the raw bytes of the revision ID. /// /// # Returns /// /// A slice of the underlying bytes pub fn as_bytes(&self) -> &[u8] { &self.0 } /// Check if this revision ID is the null revision. /// /// # Returns /// /// `true` if this is the null revision, `false` otherwise pub fn is_null(&self) -> bool { self.0 == NULL_REVISION } /// Check if this revision ID is a reserved revision. /// /// Reserved revision IDs start with a colon character. /// /// # Returns /// /// `true` if this is a reserved revision, `false` otherwise pub fn is_reserved(&self) -> bool { self.0.starts_with(b":") } /// Create a new null revision ID. /// /// # Returns /// /// A new RevisionId representing the null revision pub fn null() -> Self { Self(NULL_REVISION.to_vec()) } /// Get the revision ID as a UTF-8 string. /// /// # Returns /// /// The revision ID as a string slice /// /// # Panics /// /// Panics if the revision ID is not valid UTF-8 pub fn as_str(&self) -> &str { std::str::from_utf8(&self.0).unwrap() } } #[cfg(feature = "sqlx")] use sqlx::{postgres::PgTypeInfo, Postgres}; #[cfg(feature = "sqlx")] impl sqlx::Type for RevisionId { fn type_info() -> PgTypeInfo { >::type_info() } } #[cfg(feature = "sqlx")] impl sqlx::Encode<'_, Postgres> for RevisionId { fn encode_by_ref( &self, buf: &mut sqlx::postgres::PgArgumentBuffer, ) -> Result> { sqlx::Encode::::encode_by_ref(&self.as_str(), buf) } } #[cfg(feature = "sqlx")] impl sqlx::Decode<'_, Postgres> for RevisionId { fn decode( value: sqlx::postgres::PgValueRef<'_>, ) -> Result> { let s: &str = sqlx::Decode::::decode(value)?; Ok(RevisionId::from(s.as_bytes())) } } impl From> for RevisionId { fn from(value: Vec) -> Self { Self(value) } } impl From<&[u8]> for RevisionId { fn from(value: &[u8]) -> Self { Self(value.to_vec()) } } impl Serialize for RevisionId { fn serialize(&self, serializer: S) -> Result where S: Serializer, { serializer.serialize_str(&String::from_utf8_lossy(&self.0)) } } impl<'de> Deserialize<'de> for RevisionId { fn deserialize(deserializer: D) -> Result where D: Deserializer<'de>, { String::deserialize(deserializer).map(|s| Self(s.into_bytes())) } } impl<'a, 'py> FromPyObject<'a, 'py> for RevisionId { type Error = PyErr; fn extract(ob: Borrowed<'a, 'py, PyAny>) -> PyResult { let bytes = ob.extract::>()?; Ok(Self(bytes)) } } impl<'py> IntoPyObject<'py> for RevisionId { type Target = PyAny; type Output = Bound<'py, Self::Target>; type Error = std::convert::Infallible; fn into_pyobject(self, py: Python<'py>) -> Result { Ok(pyo3::types::PyBytes::new(py, &self.0).into_any()) } } // IntoPy is replaced by IntoPyObject in PyO3 0.25 // The IntoPyObject implementation above handles the conversion impl std::fmt::Display for RevisionId { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { let s = String::from_utf8_lossy(&self.0); write!(f, "{}", s) } } /// Constant representing the "current" revision identifier. /// /// This is used to refer to the current revision in working trees. pub const CURRENT_REVISION: &[u8] = b"current:"; /// Constant representing the "null" revision identifier. /// /// The null revision is used to represent the absence of a revision, /// such as the parent of the first commit in a repository. pub const NULL_REVISION: &[u8] = b"null:"; #[cfg(test)] mod tests { use super::*; #[test] fn test_revision_id() { let id = RevisionId::null(); assert!(id.is_null()); assert!(!id.is_reserved()); } #[test] fn test_revision_id_from_vec() { let id = RevisionId::from(b"test".to_vec()); assert!(!id.is_null()); assert!(!id.is_reserved()); } #[test] fn test_reserved_revision_id() { let id = RevisionId::from(b":test".to_vec()); assert!(!id.is_null()); assert!(id.is_reserved()); } #[test] fn test_as_bytes() { let id = RevisionId::from(b"test".to_vec()); assert_eq!(id.as_bytes(), b"test"); } } breezyshim-0.7.5/src/status.rs000064400000000000000000000021341046102023000144760ustar 00000000000000//! Status reporting functions. use crate::workingtree::PyWorkingTree; use pyo3::prelude::*; /// Display the status of a working tree. /// /// This function prints the status of the working tree to stdout, /// showing which files have been modified, added, or removed. /// /// # Arguments /// /// * `wt` - The working tree to show the status for /// /// # Returns /// /// `Ok(())` on success, or an error if the operation fails pub fn show_tree_status(wt: &dyn PyWorkingTree) -> crate::Result<()> { Python::attach(|py| { let m = py.import("breezy.status")?; let f = m.getattr("show_tree_status")?; f.call1((&wt.to_object(py),))?; Ok(()) }) } #[cfg(test)] mod tests { use super::*; use crate::controldir::create_standalone_workingtree; #[test] fn test_show_tree_status() { let tmp_dir = tempfile::tempdir().unwrap(); let wt = create_standalone_workingtree(tmp_dir.path(), "2a").unwrap(); // This should not panic and should work with an empty tree let result = show_tree_status(&wt); assert!(result.is_ok()); } } breezyshim-0.7.5/src/subversion.rs000064400000000000000000000043671046102023000153640ustar 00000000000000//! Subversion repository prober. //! //! This module provides a prober for Subversion repositories, but no actual //! implementation is provided. use pyo3::exceptions::PyModuleNotFoundError; use pyo3::prelude::*; /// Prober for Subversion repositories. /// /// This struct can detect Subversion repositories but requires the Breezy /// Subversion plugin to be installed. pub struct SvnRepositoryProber(Py); impl SvnRepositoryProber { /// Create a new SvnRepositoryProber instance. /// /// # Returns /// /// Some(SvnRepositoryProber) if the Subversion plugin is installed, /// None otherwise. pub fn new() -> Option { Python::attach(|py| { let m = match py.import("breezy.plugins.svn") { Ok(m) => m, Err(e) => { if e.is_instance_of::(py) { return None; } else { e.print_and_set_sys_last_vars(py); panic!("Failed to import breezy.plugins.svn"); } } }; let prober = m .getattr("SvnRepositoryProber") .expect("Failed to get SvnRepositoryProber"); Some(Self(prober.unbind())) }) } } impl<'a, 'py> FromPyObject<'a, 'py> for SvnRepositoryProber { type Error = PyErr; fn extract(obj: Borrowed<'a, 'py, PyAny>) -> PyResult { Ok(Self(obj.to_owned().unbind())) } } impl<'py> IntoPyObject<'py> for SvnRepositoryProber { type Target = PyAny; type Output = Bound<'py, Self::Target>; type Error = std::convert::Infallible; fn into_pyobject(self, py: Python<'py>) -> Result { Ok(self.0.into_bound(py)) } } impl std::fmt::Debug for SvnRepositoryProber { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { f.write_fmt(format_args!("SvnRepositoryProber({:?})", self.0)) } } impl crate::controldir::PyProber for SvnRepositoryProber { fn to_object(&self, py: Python) -> Py { self.0.clone_ref(py) } } #[cfg(test)] mod tests { use super::*; #[test] fn test_new() { let _ = SvnRepositoryProber::new(); } } breezyshim-0.7.5/src/tags.rs000064400000000000000000000062131046102023000141130ustar 00000000000000//! Revision tags use crate::error::Error; use crate::revisionid::RevisionId; use pyo3::intern; use pyo3::prelude::*; use std::collections::{HashMap, HashSet}; /// Represents a collection of revision tags. /// /// Tags allow associating human-readable names with specific revision IDs. /// This struct provides methods to manage and query these tags. pub struct Tags(Py); impl From> for Tags { fn from(obj: Py) -> Self { Tags(obj) } } impl Tags { /// Get a mapping from revision IDs to sets of tags. /// /// # Returns /// /// A HashMap mapping each revision ID to a set of tag names that point to it, /// or an error if the operation fails pub fn get_reverse_tag_dict( &self, ) -> Result>, crate::error::Error> { Python::attach(|py| self.0.call_method0(py, "get_reverse_tag_dict")?.extract(py)) .map_err(Into::into) } /// Get a mapping from tag names to revision IDs. /// /// # Returns /// /// A HashMap mapping each tag name to the revision ID it points to, /// or an error if the operation fails pub fn get_tag_dict(&self) -> Result, crate::error::Error> { Python::attach(|py| { self.0 .call_method0(py, intern!(py, "get_tag_dict"))? .extract(py) }) .map_err(Into::into) } /// Look up a revision ID by tag name. /// /// # Arguments /// /// * `tag` - The tag name to look up /// /// # Returns /// /// The revision ID the tag points to, or an error if the tag doesn't exist pub fn lookup_tag(&self, tag: &str) -> Result { Ok(Python::attach(|py| { self.0.call_method1(py, "lookup_tag", (tag,))?.extract(py) })?) } /// Check if a tag exists. /// /// # Arguments /// /// * `tag` - The tag name to check /// /// # Returns /// /// `true` if the tag exists, `false` otherwise pub fn has_tag(&self, tag: &str) -> bool { Python::attach(|py| { self.0 .call_method1(py, "has_tag", (tag,)) .unwrap() .extract(py) .unwrap() }) } /// Set a tag to point to a specific revision. /// /// # Arguments /// /// * `tag` - The tag name to set /// * `revision_id` - The revision ID the tag should point to /// /// # Returns /// /// `Ok(())` on success, or an error if the operation fails pub fn set_tag(&self, tag: &str, revision_id: &RevisionId) -> Result<(), Error> { Python::attach(|py| { self.0 .call_method1(py, "set_tag", (tag, revision_id.clone())) })?; Ok(()) } /// Delete a tag. /// /// # Arguments /// /// * `tag` - The tag name to delete /// /// # Returns /// /// `Ok(())` on success, or an error if the operation fails pub fn delete_tag(&self, tag: &str) -> Result<(), Error> { Python::attach(|py| self.0.call_method1(py, "delete_tag", (tag,)))?; Ok(()) } } breezyshim-0.7.5/src/testing.rs000064400000000000000000000132261046102023000146340ustar 00000000000000//! Test utilities for the Breezy Rust bindings. use pyo3::prelude::*; use std::collections::HashMap; use std::fs; use std::path::PathBuf; use tempfile::TempDir; /// Environment for running Breezy tests. /// /// This struct sets up a temporary environment for running Breezy tests, /// including temporary directories and environment variables, and cleans /// up after itself when dropped. pub struct TestEnv { /// The temporary directory that contains all test files. pub temp_dir: TempDir, /// The working directory where tests will run. pub working_dir: PathBuf, /// The home directory for the test environment. pub home_dir: PathBuf, /// The original working directory before the test environment was set up. pub old_cwd: Option, /// The original environment variables before the test environment was set up. pub old_env: HashMap>, } impl TestEnv { /// Create a new testing environment. /// /// This sets up a temporary directory structure with a working directory /// and home directory, and configures environment variables for Breezy. /// /// # Returns /// /// A new TestEnv instance pub fn new() -> Self { // Ensure Python and Breezy are initialized crate::init(); let temp_dir = TempDir::new().unwrap(); let working_dir = temp_dir.path().join("test"); fs::create_dir(&working_dir).unwrap(); let home_dir = temp_dir.path().join("home"); fs::create_dir(&home_dir).unwrap(); let mut old_env = HashMap::new(); let old_cwd = std::env::current_dir().ok(); old_env.insert("HOME".to_string(), std::env::var("HOME").ok()); old_env.insert("BRZ_EMAIL".to_string(), std::env::var("BRZ_EMAIL").ok()); old_env.insert("BRZ_HOME".to_string(), std::env::var("BRZ_HOME").ok()); let brz_email = "Joe Tester "; let breezy_home = home_dir.join(".config/breezy"); std::env::set_current_dir(&working_dir).unwrap(); std::env::set_var("HOME", &home_dir); std::env::set_var("BRZ_EMAIL", brz_email); std::env::set_var("BRZ_HOME", &breezy_home); pyo3::Python::attach(|py| { let os = py.import("os").unwrap(); os.call_method1("chdir", (working_dir.to_str().unwrap(),)) .unwrap(); let environ = os.getattr("environ").unwrap(); environ .set_item("HOME", home_dir.to_str().unwrap()) .unwrap(); environ.set_item("BRZ_EMAIL", brz_email).unwrap(); environ .set_item("BRZ_HOME", breezy_home.to_str().unwrap()) .unwrap(); }); fs::create_dir_all(&breezy_home).unwrap(); fs::write( breezy_home.join("breezy.conf"), r#" [DEFAULT] email = Joe Tester "#, ) .unwrap(); Self { temp_dir, home_dir, working_dir, old_cwd, old_env, } } } impl Drop for TestEnv { fn drop(&mut self) { for (key, value) in self.old_env.drain() { if let Some(value) = value.as_ref() { std::env::set_var(&key, value); } else { std::env::remove_var(&key); } Python::attach(|py| { let os = py.import("os").unwrap(); let environ = os.getattr("environ").unwrap(); if let Some(value) = value { environ.set_item(key, value).unwrap(); } else { environ.del_item(key).unwrap(); } }); } if let Some(ref old_cwd) = self.old_cwd { let _ = std::env::set_current_dir(old_cwd); } } } impl Default for TestEnv { fn default() -> Self { Self::new() } } #[cfg(test)] mod tests { use super::*; use serial_test::serial; #[test] #[serial] fn test_testenv() { let env = TestEnv::new(); assert_eq!(env.home_dir, env.temp_dir.path().join("home")); assert_eq!(env.working_dir, env.temp_dir.path().join("test")); // On Windows, canonicalize() can add \\?\ prefix and resolve short names differently // So we compare the canonicalized versions of both paths assert_eq!( std::env::current_dir().unwrap().canonicalize().unwrap(), env.working_dir.canonicalize().unwrap() ); assert_eq!( std::env::var("HOME").unwrap(), env.home_dir.to_str().unwrap() ); assert_eq!( std::env::var("BRZ_EMAIL").unwrap(), "Joe Tester " ); Python::attach(|py| { let os = py.import("os").unwrap(); // On Windows, canonicalize both paths to handle short/long names and \\?\ prefix let py_cwd = os .call_method0("getcwd") .unwrap() .extract::() .unwrap(); assert_eq!( py_cwd.canonicalize().unwrap(), env.working_dir.canonicalize().unwrap(), ); assert_eq!( os.call_method1("getenv", ("HOME",)) .unwrap() .extract::() .unwrap(), env.home_dir.to_str().unwrap() ); assert_eq!( os.call_method1("getenv", ("BRZ_EMAIL",)) .unwrap() .extract::() .unwrap(), "Joe Tester " ); }); } } breezyshim-0.7.5/src/transform.rs000064400000000000000000000246141046102023000151750ustar 00000000000000//! Tree transformation API. use crate::tree::{PathBuf, PyTree}; use pyo3::exceptions::PyValueError; use pyo3::prelude::*; use pyo3::types::PyTuple; use pyo3::types::PyTupleMethods; /// A tree transform is used to apply a set of changes to a tree. pub struct TreeTransform(Py); #[derive(Clone)] /// Represents a change to a file or directory in a tree transformation. pub struct TreeChange {} impl From> for TreeChange { fn from(_ob: Py) -> Self { TreeChange {} } } impl<'a, 'py> FromPyObject<'a, 'py> for TreeChange { type Error = PyErr; fn extract(_ob: Borrowed<'a, 'py, PyAny>) -> PyResult { Ok(TreeChange {}) } } /// Represents a conflict that occurs during a tree transformation. pub struct Conflict(Py); impl Clone for Conflict { fn clone(&self) -> Self { Python::attach(|py| Conflict(self.0.clone_ref(py))) } } impl Conflict { /// Get the file paths associated with this conflict. pub fn associated_filenames(&self) -> Result, crate::error::Error> { let mut v: Vec = vec![]; Python::attach(|py| { let ret = self.0.getattr(py, "associated_filenames")?; for item in ret.bind(py).try_iter()? { v.push(item?.extract()?); } Ok(v) }) } /// Get a human-readable description of this conflict. pub fn describe(&self) -> Result { Python::attach(|py| { let ret = self.0.call_method0(py, "describe")?; Ok(ret.extract(py)?) }) } /// Clean up any temporary files created by this conflict. pub fn cleanup(&self, tree: &T) -> Result<(), crate::error::Error> { Python::attach(|py| { self.0.call_method1(py, "cleanup", (tree.to_object(py),))?; Ok(()) }) } } /// A tree that shows what a tree would look like after applying a transform. pub struct PreviewTree(Py); impl<'py> IntoPyObject<'py> for PreviewTree { type Target = PyAny; type Output = Bound<'py, Self::Target>; type Error = std::convert::Infallible; fn into_pyobject(self, py: Python<'py>) -> Result { Ok(self.0.into_bound(py)) } } impl From> for PreviewTree { fn from(ob: Py) -> Self { PreviewTree(ob) } } impl TreeTransform { fn to_object(&self) -> &Py { &self.0 } /// Get the underlying Py. pub(crate) fn as_pyobject(&self) -> &Py { &self.0 } /// Apply the transform to the tree. pub fn finalize(&self) -> Result<(), crate::error::Error> { Python::attach(|py| { self.to_object().call_method0(py, "finalize")?; Ok(()) }) } /// Iterate through the changes in this transform. pub fn iter_changes( &self, ) -> Result>, crate::error::Error> { let mut v: Vec = vec![]; Python::attach(|py| { let ret = self.to_object().call_method0(py, "iter_changes")?; for item in ret.bind(py).try_iter()? { v.push(item?.extract()?); } Ok(Box::new(v.into_iter()) as Box>) }) } /// Get a list of conflicts that would occur when applying this transform. pub fn cooked_conflicts(&self) -> Result, crate::error::Error> { let mut v: Vec = vec![]; Python::attach(|py| { let ret = self.to_object().getattr(py, "cooked_conflicts")?; for item in ret.bind(py).try_iter()? { v.push(Conflict(item?.into())); } Ok(v) }) } /// Get a preview tree showing what would happen if this transform was applied. pub fn get_preview_tree(&self) -> Result { Python::attach(|py| { let ret = self.to_object().getattr(py, "preview_tree")?; Ok(PreviewTree(ret)) }) } } impl From> for TreeTransform { fn from(ob: Py) -> Self { TreeTransform(ob) } } impl<'py> IntoPyObject<'py> for TreeTransform { type Target = PyAny; type Output = Bound<'py, Self::Target>; type Error = std::convert::Infallible; fn into_pyobject(self, py: Python<'py>) -> Result { Ok(self.0.into_bound(py)) } } impl<'a, 'py> FromPyObject<'a, 'py> for TreeTransform { type Error = PyErr; fn extract(ob: Borrowed<'a, 'py, PyAny>) -> PyResult { Ok(TreeTransform(ob.to_owned().unbind())) } } #[derive(PartialEq, Eq, Hash, Clone, Debug)] /// An identifier for a transformation operation. pub struct TransId(String); impl<'a, 'py> FromPyObject<'a, 'py> for TransId { type Error = PyErr; fn extract(ob: Borrowed<'a, 'py, PyAny>) -> PyResult { Ok(TransId(ob.extract::()?)) } } impl<'py> IntoPyObject<'py> for TransId { type Target = PyAny; type Output = Bound<'py, Self::Target>; type Error = std::convert::Infallible; fn into_pyobject(self, py: Python<'py>) -> Result { Ok(self.0.into_pyobject(py)?.into_any()) } } #[derive(PartialEq, Eq, Hash, Clone, Debug)] /// Enum representing different types of conflicts that can occur during transformation. pub enum RawConflict { /// Conflict caused by trying to change executability of an unversioned file. UnversionedExecutability(TransId), /// Conflict caused by trying to set executability on a non-file. NonFileExecutability(TransId), /// Conflict caused by trying to overwrite an existing file with different content. Overwrite(TransId, String), /// Conflict caused by a directory loop in the parent structure. ParentLoop(TransId), /// Conflict caused by trying to version a file with an unversioned parent. UnversionedParent(TransId), /// Conflict caused by trying to version a file without contents. VersioningNoContents(TransId), /// Conflict caused by trying to version a file with an unsupported kind. VersioningBadKind(TransId), /// Conflict caused by trying to add the same file path twice. Duplicate(TransId, TransId, String), /// Conflict caused by a missing parent directory. MissingParent(TransId), /// Conflict caused by a parent that is not a directory. NonDirectoryParent(TransId), } impl<'py> IntoPyObject<'py> for RawConflict { type Target = PyAny; type Output = Bound<'py, Self::Target>; type Error = std::convert::Infallible; fn into_pyobject(self, py: Python<'py>) -> Result { Ok(match self { RawConflict::UnversionedExecutability(id) => { PyTuple::new(py, [("unversioned executability", id)]) .unwrap() .into_any() } RawConflict::NonFileExecutability(id) => { PyTuple::new(py, [("non-file executability", id)]) .unwrap() .into_any() } RawConflict::Overwrite(id, path) => PyTuple::new(py, [("overwrite", id, path)]) .unwrap() .into_any(), RawConflict::ParentLoop(id) => { PyTuple::new(py, [("parent loop", id)]).unwrap().into_any() } RawConflict::UnversionedParent(id) => PyTuple::new(py, [("unversioned parent", id)]) .unwrap() .into_any(), RawConflict::VersioningNoContents(id) => { PyTuple::new(py, [("versioning no contents", id)]) .unwrap() .into_any() } RawConflict::VersioningBadKind(id) => PyTuple::new(py, [("versioning bad kind", id)]) .unwrap() .into_any(), RawConflict::Duplicate(id1, id2, path) => { PyTuple::new(py, [("duplicate", id1, id2, path)]) .unwrap() .into_any() } RawConflict::MissingParent(id) => PyTuple::new(py, [("missing parent", id)]) .unwrap() .into_any(), RawConflict::NonDirectoryParent(id) => PyTuple::new(py, [("non-directory parent", id)]) .unwrap() .into_any(), }) } } impl<'a, 'py> FromPyObject<'a, 'py> for RawConflict { type Error = PyErr; fn extract(ob: Borrowed<'a, 'py, PyAny>) -> PyResult { let tuple = ob.extract::>()?; match tuple.get_item(0)?.extract::()?.as_str() { "unversioned executability" => Ok(Self::UnversionedExecutability(TransId( tuple.get_item(1)?.extract::()?, ))), "non-file executability" => Ok(Self::NonFileExecutability(TransId( tuple.get_item(1)?.extract::()?, ))), "overwrite" => Ok(Self::Overwrite( TransId(tuple.get_item(1)?.extract::()?), tuple.get_item(2)?.extract::()?, )), "parent loop" => Ok(Self::ParentLoop(TransId( tuple.get_item(1)?.extract::()?, ))), "unversioned parent" => Ok(Self::UnversionedParent(TransId( tuple.get_item(1)?.extract::()?, ))), "versioning no contents" => Ok(Self::VersioningNoContents(TransId( tuple.get_item(1)?.extract::()?, ))), "versioning bad kind" => Ok(Self::VersioningBadKind(TransId( tuple.get_item(1)?.extract::()?, ))), "duplicate" => Ok(Self::Duplicate( TransId(tuple.get_item(1)?.extract::()?), TransId(tuple.get_item(2)?.extract::()?), tuple.get_item(3)?.extract::()?, )), "missing parent" => Ok(Self::MissingParent(TransId( tuple.get_item(1)?.extract::()?, ))), "non-directory parent" => Ok(Self::NonDirectoryParent(TransId( tuple.get_item(1)?.extract::()?, ))), _ => Err(PyErr::new::(format!( "Unknown conflict type: {}", tuple.get_item(0)?.extract::()? ))), } } } breezyshim-0.7.5/src/transport.rs000064400000000000000000000162171046102023000152160ustar 00000000000000//! Transport module use crate::error::Error; use pyo3::prelude::*; use pyo3::types::PyDict; use std::path::{Path, PathBuf}; /// A transport represents a way to access content in a branch. pub struct Transport(Py); impl Transport { /// Create a new transport from a Python object. pub fn new(obj: Py) -> Self { Transport(obj) } /// Get the underlying Py. pub(crate) fn as_pyobject(&self) -> &Py { &self.0 } /// Get the base URL of this transport. pub fn base(&self) -> url::Url { pyo3::Python::attach(|py| { self.as_pyobject() .getattr(py, "base") .unwrap() .extract::(py) .unwrap() .parse() .unwrap() }) } /// Check if this is a local transport. pub fn is_local(&self) -> bool { pyo3::import_exception!(breezy.errors, NotLocalUrl); pyo3::Python::attach(|py| { self.0 .call_method1(py, "local_abspath", (".",)) .map(|_| true) .or_else(|e| { if e.is_instance_of::(py) { Ok::<_, PyErr>(false) } else { panic!("Unexpected error: {:?}", e) } }) .unwrap() }) } /// Get the local absolute path for a path within this transport. pub fn local_abspath(&self, path: &Path) -> Result { pyo3::Python::attach(|py| { Ok(self .0 .call_method1(py, "local_abspath", (path.to_string_lossy().as_ref(),))? .extract::(py)?) }) } /// Check if a path exists in this transport. pub fn has(&self, path: &str) -> Result { pyo3::Python::attach(|py| { Ok(self .0 .call_method1(py, "has", (path,))? .extract::(py) .unwrap()) }) } /// Ensure the base directory exists. pub fn ensure_base(&self) -> Result<(), Error> { pyo3::Python::attach(|py| { self.0.call_method0(py, "ensure_base")?; Ok(()) }) } /// Create all the directories leading up to the final path component. pub fn create_prefix(&self) -> Result<(), Error> { pyo3::Python::attach(|py| { self.0.call_method0(py, "create_prefix")?; Ok(()) }) } /// Create a new transport with a different path. pub fn clone(&self, path: &str) -> Result { pyo3::Python::attach(|py| { let o = self.0.call_method1(py, "clone", (path,))?; Ok(Transport(o)) }) } } impl<'a, 'py> FromPyObject<'a, 'py> for Transport { type Error = PyErr; fn extract(obj: Borrowed<'a, 'py, PyAny>) -> PyResult { Ok(Transport(obj.to_owned().unbind())) } } impl<'py> IntoPyObject<'py> for Transport { type Target = PyAny; type Output = Bound<'py, Self::Target>; type Error = std::convert::Infallible; fn into_pyobject(self, py: Python<'py>) -> Result { Ok(self.0.into_bound(py)) } } /// Get a transport for a given URL. /// /// # Arguments /// * `url` - The URL to get a transport for /// * `possible_transports` - Optional list of transports to try reusing pub fn get_transport( url: &url::Url, possible_transports: Option<&mut Vec>, ) -> Result { pyo3::Python::attach(|py| { let urlutils = py.import("breezy.transport").unwrap(); let kwargs = PyDict::new(py); kwargs.set_item( "possible_transports", possible_transports.map(|t| { t.iter() .map(|t| t.0.clone_ref(py)) .collect::>>() }), )?; let o = urlutils.call_method("get_transport", (url.to_string(),), Some(&kwargs))?; Ok(Transport(o.unbind())) }) } #[cfg(test)] mod tests { use super::*; use std::path::Path; #[test] fn test_get_transport() { let td = tempfile::tempdir().unwrap(); let url = url::Url::from_file_path(td.path()).unwrap(); let transport = get_transport(&url, None).unwrap(); // Test base URL let base = transport.base(); assert!(base.to_string().starts_with("file://")); } #[test] fn test_transport_is_local() { let td = tempfile::tempdir().unwrap(); let url = url::Url::from_file_path(td.path()).unwrap(); let transport = get_transport(&url, None).unwrap(); assert!(transport.is_local()); } #[test] fn test_transport_local_abspath() { let td = tempfile::tempdir().unwrap(); let url = url::Url::from_file_path(td.path()).unwrap(); let transport = get_transport(&url, None).unwrap(); let path = Path::new("test.txt"); let abspath = transport.local_abspath(path).unwrap(); assert!(abspath.is_absolute()); } #[test] fn test_transport_has() { let td = tempfile::tempdir().unwrap(); let url = url::Url::from_file_path(td.path()).unwrap(); let transport = get_transport(&url, None).unwrap(); // Test for non-existent file let exists = transport.has("nonexistent.txt").unwrap(); assert!(!exists); } #[test] fn test_transport_ensure_base() { let td = tempfile::tempdir().unwrap(); let url = url::Url::from_file_path(td.path()).unwrap(); let transport = get_transport(&url, None).unwrap(); let result = transport.ensure_base(); assert!(result.is_ok()); } #[test] fn test_transport_create_prefix() { let td = tempfile::tempdir().unwrap(); let url = url::Url::from_file_path(td.path()).unwrap(); let transport = get_transport(&url, None).unwrap(); let result = transport.create_prefix(); assert!(result.is_ok()); } #[test] fn test_transport_clone() { let td = tempfile::tempdir().unwrap(); let url = url::Url::from_file_path(td.path()).unwrap(); let transport = get_transport(&url, None).unwrap(); let cloned = transport.clone("subdir").unwrap(); let cloned_base = cloned.base(); assert!(cloned_base.to_string().contains("subdir")); } #[test] fn test_transport_into_pyobject() { let td = tempfile::tempdir().unwrap(); let url = url::Url::from_file_path(td.path()).unwrap(); let transport = get_transport(&url, None).unwrap(); Python::attach(|py| { let _pyobj = transport.into_pyobject(py).unwrap(); }); } #[test] fn test_get_transport_with_possible_transports() { let td = tempfile::tempdir().unwrap(); let url = url::Url::from_file_path(td.path()).unwrap(); let mut possible_transports = vec![]; let transport = get_transport(&url, Some(&mut possible_transports)).unwrap(); let base = transport.base(); assert!(base.to_string().starts_with("file://")); } } breezyshim-0.7.5/src/tree.rs000064400000000000000000002010141046102023000141100ustar 00000000000000//! Trees use crate::error::Error; use crate::lock::Lock; use crate::revisionid::RevisionId; use pyo3::intern; use pyo3::prelude::*; /// Type alias for std::path::Path. pub type Path = std::path::Path; /// Type alias for std::path::PathBuf. pub type PathBuf = std::path::PathBuf; /// Result of walking directories in a tree. #[derive(Debug)] pub struct WalkdirResult { /// The path relative to the tree root. pub relpath: PathBuf, /// The absolute path. pub abspath: PathBuf, /// The kind of the entry. pub kind: Kind, /// The stat information for the entry. pub stat: Option, /// Whether the entry is versioned. pub versioned: bool, } /// Summary of path content. #[derive(Debug)] pub struct PathContentSummary { /// The kind of the content. pub kind: Kind, /// The size in bytes (for files). pub size: Option, /// Whether the file is executable. pub executable: Option, /// The SHA1 hash (for files). pub sha1: Option, /// The target (for symlinks). pub target: Option, } /// Search rule for path matching. #[derive(Debug)] pub struct SearchRule { /// The pattern to match. pub pattern: String, /// The type of rule. pub rule_type: SearchRuleType, } /// Type of search rule. #[derive(Debug)] pub enum SearchRuleType { /// Include the matched paths. Include, /// Exclude the matched paths. Exclude, } /// Represents a conflict in a tree. #[derive(Debug)] pub struct Conflict { /// The path involved in the conflict. pub path: PathBuf, /// The type of conflict. pub conflict_type: String, /// Additional information about the conflict. pub message: Option, } impl<'a, 'py> FromPyObject<'a, 'py> for Conflict { type Error = PyErr; fn extract(ob: Borrowed<'a, 'py, PyAny>) -> PyResult { let path: String = ob.getattr("path")?.extract()?; let conflict_type: String = ob.getattr("typestring")?.extract()?; let message: Option = ob.getattr("message").ok().and_then(|m| m.extract().ok()); Ok(Conflict { path: PathBuf::from(path), conflict_type, message, }) } } /// Represents a tree reference. #[derive(Debug)] pub struct TreeReference { /// The path where the reference should be added. pub path: PathBuf, /// The kind of reference. pub kind: Kind, /// The reference revision. pub reference_revision: Option, } /// Represents a change in the inventory. #[derive(Debug)] pub struct InventoryDelta { /// The old path (None if new). pub old_path: Option, /// The new path (None if deleted). pub new_path: Option, /// The file ID. pub file_id: String, /// The entry details. pub entry: Option, } #[derive(Debug, PartialEq, Clone, Eq)] /// Kind of object in a tree. pub enum Kind { /// Regular file. File, /// Directory. Directory, /// Symbolic link. Symlink, /// Reference to another tree. TreeReference, } impl Kind { /// Get a marker string for this kind of tree object. pub fn marker(&self) -> &'static str { match self { Kind::File => "", Kind::Directory => "/", Kind::Symlink => "@", Kind::TreeReference => "+", } } } impl std::fmt::Display for Kind { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { match self { Kind::File => write!(f, "file"), Kind::Directory => write!(f, "directory"), Kind::Symlink => write!(f, "symlink"), Kind::TreeReference => write!(f, "tree-reference"), } } } impl std::str::FromStr for Kind { type Err = String; fn from_str(s: &str) -> Result { match s { "file" => Ok(Kind::File), "directory" => Ok(Kind::Directory), "symlink" => Ok(Kind::Symlink), "tree-reference" => Ok(Kind::TreeReference), n => Err(format!("Invalid kind: {}", n)), } } } impl<'py> pyo3::IntoPyObject<'py> for Kind { type Target = pyo3::PyAny; type Output = pyo3::Bound<'py, Self::Target>; type Error = std::convert::Infallible; fn into_pyobject(self, py: pyo3::Python<'py>) -> Result { let s = match self { Kind::File => "file", Kind::Directory => "directory", Kind::Symlink => "symlink", Kind::TreeReference => "tree-reference", }; Ok(pyo3::types::PyString::new(py, s).into_any()) } } impl<'a, 'py> pyo3::FromPyObject<'a, 'py> for Kind { type Error = PyErr; fn extract(ob: Borrowed<'a, 'py, pyo3::PyAny>) -> PyResult { let s: String = ob.extract()?; match s.as_str() { "file" => Ok(Kind::File), "directory" => Ok(Kind::Directory), "symlink" => Ok(Kind::Symlink), "tree-reference" => Ok(Kind::TreeReference), _ => Err(pyo3::exceptions::PyValueError::new_err(format!( "Invalid kind: {}", s ))), } } } /// A tree entry, representing different types of objects in a tree. #[derive(Debug)] pub enum TreeEntry { /// A regular file entry. File { /// Whether the file is executable. executable: bool, /// The kind of file. kind: Kind, /// The revision ID that introduced this file, if known. revision: Option, /// The size of the file in bytes. size: u64, }, /// A directory entry. Directory { /// The revision ID that introduced this directory, if known. revision: Option, }, /// A symbolic link entry. Symlink { /// The revision ID that introduced this symlink, if known. revision: Option, /// The target path of the symbolic link. symlink_target: String, }, /// A reference to another tree. TreeReference { /// The revision ID that introduced this reference, if known. revision: Option, /// The revision ID this reference points to. reference_revision: RevisionId, }, } impl<'a, 'py> FromPyObject<'a, 'py> for TreeEntry { type Error = PyErr; fn extract(ob: Borrowed<'a, 'py, PyAny>) -> PyResult { let kind: String = ob.getattr("kind")?.extract()?; match kind.as_str() { "file" => { let executable: bool = ob.getattr("executable")?.extract()?; let kind: Kind = ob.getattr("kind")?.extract()?; let size: u64 = ob.getattr("size")?.extract()?; let revision: Option = ob.getattr("revision")?.extract()?; Ok(TreeEntry::File { executable, kind, size, revision, }) } "directory" => { let revision: Option = ob.getattr("revision")?.extract()?; Ok(TreeEntry::Directory { revision }) } "symlink" => { let revision: Option = ob.getattr("revision")?.extract()?; let symlink_target: String = ob.getattr("symlink_target")?.extract()?; Ok(TreeEntry::Symlink { revision, symlink_target, }) } "tree-reference" => { let revision: Option = ob.getattr("revision")?.extract()?; let reference_revision: RevisionId = ob.getattr("reference_revision")?.extract()?; Ok(TreeEntry::TreeReference { revision, reference_revision, }) } kind => panic!("Invalid kind: {}", kind), } } } impl<'py> IntoPyObject<'py> for TreeEntry { type Target = PyAny; type Output = Bound<'py, Self::Target>; type Error = std::convert::Infallible; fn into_pyobject(self, py: Python<'py>) -> Result { let dict = pyo3::types::PyDict::new(py); match self { TreeEntry::File { executable, kind: _, revision, size, } => { dict.set_item("kind", "file").unwrap(); dict.set_item("executable", executable).unwrap(); dict.set_item("size", size).unwrap(); dict.set_item("revision", revision).unwrap(); } TreeEntry::Directory { revision } => { dict.set_item("kind", "directory").unwrap(); dict.set_item("revision", revision).unwrap(); } TreeEntry::Symlink { revision, symlink_target, } => { dict.set_item("kind", "symlink").unwrap(); dict.set_item("revision", revision).unwrap(); dict.set_item("symlink_target", symlink_target).unwrap(); } TreeEntry::TreeReference { revision, reference_revision, } => { dict.set_item("kind", "tree-reference").unwrap(); dict.set_item("revision", revision).unwrap(); dict.set_item("reference_revision", reference_revision) .unwrap(); } } Ok(dict.into_any()) } } /// The core tree interface that provides access to content and metadata. /// /// A tree represents a structured collection of files that can be /// read, modified, and compared, depending on the implementation. pub trait Tree { /// Get a dictionary of tags and their revision IDs. fn get_tag_dict(&self) -> Result, Error>; /// Get a file from the tree as a readable stream. fn get_file(&self, path: &Path) -> Result, Error>; /// Get the contents of a file from the tree as a byte vector. fn get_file_text(&self, path: &Path) -> Result, Error>; /// Get the contents of a file as a vector of lines (byte vectors). fn get_file_lines(&self, path: &Path) -> Result>, Error>; /// Lock the tree for read operations. fn lock_read(&self) -> Result; /// Check if a file exists in the tree at the specified path. fn has_filename(&self, path: &Path) -> bool; /// Get the target of a symbolic link. fn get_symlink_target(&self, path: &Path) -> Result; /// Get the IDs of the parent revisions of this tree. fn get_parent_ids(&self) -> Result, Error>; /// Check if a path is ignored by version control. fn is_ignored(&self, path: &Path) -> Option; /// Get the kind of object at the specified path (file, directory, symlink, etc.). fn kind(&self, path: &Path) -> Result; /// Check if a path is under version control. fn is_versioned(&self, path: &Path) -> bool; /// Iterate through the changes between this tree and another tree. /// /// # Arguments /// * `other` - The other tree to compare against /// * `specific_files` - Optional list of specific files to check /// * `want_unversioned` - Whether to include unversioned files /// * `require_versioned` - Whether to require files to be versioned fn iter_changes( &self, other: &dyn PyTree, specific_files: Option<&[&Path]>, want_unversioned: Option, require_versioned: Option, ) -> Result>>, Error>; /// Check if this tree supports versioned directories. fn has_versioned_directories(&self) -> bool; /// Get a preview of transformations that would be applied to this tree. fn preview_transform(&self) -> Result; /// List files in the tree, optionally recursively. /// /// # Arguments /// * `include_root` - Whether to include the root directory /// * `from_dir` - Starting directory (if not the root) /// * `recursive` - Whether to recurse into subdirectories /// * `recurse_nested` - Whether to recurse into nested trees fn list_files( &self, include_root: Option, from_dir: Option<&Path>, recursive: Option, recurse_nested: Option, ) -> Result>>, Error>; /// Iterate through entries in a directory. /// /// # Arguments /// * `path` - Path to the directory to list fn iter_child_entries( &self, path: &std::path::Path, ) -> Result>>, Error>; /// Get the size of a file in bytes. fn get_file_size(&self, path: &Path) -> Result; /// Get the SHA1 hash of a file's contents. fn get_file_sha1( &self, path: &Path, stat_value: Option<&std::fs::Metadata>, ) -> Result; /// Get the modification time of a file. fn get_file_mtime(&self, path: &Path) -> Result; /// Check if a file is executable. fn is_executable(&self, path: &Path) -> Result; /// Get the stored kind of a file (as opposed to the actual kind on disk). fn stored_kind(&self, path: &Path) -> Result; /// Check if the tree supports content filtering. fn supports_content_filtering(&self) -> bool; /// Check if the tree supports file IDs. fn supports_file_ids(&self) -> bool; /// Check if the tree supports rename tracking. fn supports_rename_tracking(&self) -> bool; /// Check if the tree supports symbolic links. fn supports_symlinks(&self) -> bool; /// Check if the tree supports tree references. fn supports_tree_reference(&self) -> bool; /// Get unversioned files in the tree. fn unknowns(&self) -> Result, Error>; /// Get all versioned paths in the tree. fn all_versioned_paths( &self, ) -> Result>>, Error>; /// Get conflicts in the tree. fn conflicts(&self) -> Result, Error>; /// Get extra (unversioned) files in the tree. fn extras(&self) -> Result, Error>; /// Filter out versioned files from a list of paths. fn filter_unversioned_files(&self, paths: &[&Path]) -> Result, Error>; /// Walk directories in the tree. fn walkdirs( &self, prefix: Option<&Path>, ) -> Result>>, Error>; /// Check if a file kind is versionable. fn versionable_kind(&self, kind: &Kind) -> bool; /// Get file content summary for a path. fn path_content_summary(&self, path: &Path) -> Result; /// Iterate through file bytes. fn iter_files_bytes( &self, paths: &[&Path], ) -> Result), Error>>>, Error>; /// Iterate through entries by directory. fn iter_entries_by_dir( &self, specific_files: Option<&[&Path]>, ) -> Result>>, Error>; /// Get file verifier information. fn get_file_verifier( &self, path: &Path, stat_value: Option<&std::fs::Metadata>, ) -> Result<(String, Vec), Error>; /// Get the reference revision for a tree reference. fn get_reference_revision(&self, path: &Path) -> Result; /// Create an archive of the tree. fn archive( &self, format: &str, name: &str, root: Option<&str>, subdir: Option<&Path>, force_mtime: Option, recurse_nested: bool, ) -> Result, Error>>>, Error>; /// Annotate a file with revision information. fn annotate_iter( &self, path: &Path, default_revision: Option<&RevisionId>, ) -> Result), Error>>>, Error>; /// Check if a path is a special path (e.g., control directory). fn is_special_path(&self, path: &Path) -> bool; /// Iterate through search rules. fn iter_search_rules( &self, paths: &[&Path], ) -> Result>>, Error>; } /// Trait for Python tree objects that can be converted to and from Python objects. /// /// This trait is implemented by all tree types that wrap Python objects. pub trait PyTree: Tree + std::any::Any { /// Get the underlying Python object for this tree. fn to_object(&self, py: Python) -> Py; } impl dyn PyTree { /// Get a reference to self as a Tree trait object. pub fn as_tree(&self) -> &dyn Tree { self } } impl Tree for T { fn get_tag_dict(&self) -> Result, Error> { Python::attach(|py| { let branch = self.to_object(py).getattr(py, "branch")?; let tags = branch.getattr(py, "tags")?; let tag_dict = tags.call_method0(py, intern!(py, "get_tag_dict"))?; tag_dict.extract(py) }) .map_err(|e: PyErr| -> Error { e.into() }) } fn get_file(&self, path: &Path) -> Result, Error> { Python::attach(|py| { let path_str = path.to_string_lossy().to_string(); let f = self .to_object(py) .call_method1(py, "get_file", (path_str,))?; let f = pyo3_filelike::PyBinaryFile::from(f); Ok(Box::new(f) as Box) }) } fn get_file_text(&self, path: &Path) -> Result, Error> { Python::attach(|py| { let path_str = path.to_string_lossy().to_string(); let text = self .to_object(py) .call_method1(py, "get_file_text", (path_str,))?; text.extract(py).map_err(Into::into) }) } fn get_file_lines(&self, path: &Path) -> Result>, Error> { Python::attach(|py| { let path_str = path.to_string_lossy().to_string(); let lines = self .to_object(py) .call_method1(py, "get_file_lines", (path_str,))?; lines.extract(py).map_err(Into::into) }) } fn lock_read(&self) -> Result { Python::attach(|py| { let lock = self .to_object(py) .call_method0(py, intern!(py, "lock_read"))?; Ok(Lock::from(lock)) }) } fn has_filename(&self, path: &Path) -> bool { Python::attach(|py| { let path_str = path.to_string_lossy().to_string(); self.to_object(py) .call_method1(py, intern!(py, "has_filename"), (path_str,)) .and_then(|result| result.extract(py)) .unwrap_or(false) }) } fn get_symlink_target(&self, path: &Path) -> Result { Python::attach(|py| { let path_str = path.to_string_lossy().to_string(); let target = self .to_object(py) .call_method1(py, "get_symlink_target", (path_str,))?; target.extract(py).map_err(Into::into) }) } fn get_parent_ids(&self) -> Result, Error> { Python::attach(|py| { Ok(self .to_object(py) .call_method0(py, intern!(py, "get_parent_ids")) .unwrap() .extract(py)?) }) } fn is_ignored(&self, path: &Path) -> Option { Python::attach(|py| { let path_str = path.to_string_lossy().to_string(); self.to_object(py) .call_method1(py, "is_ignored", (path_str,)) .unwrap() .extract(py) .unwrap() }) } fn kind(&self, path: &Path) -> Result { Python::attach(|py| { let path_str = path.to_string_lossy().to_string(); self.to_object(py) .call_method1(py, "kind", (path_str,)) .unwrap() .extract(py) .map_err(Into::into) }) } fn is_versioned(&self, path: &Path) -> bool { Python::attach(|py| { let path_str = path.to_string_lossy().to_string(); self.to_object(py) .call_method1(py, "is_versioned", (path_str,)) .unwrap() .extract(py) .unwrap() }) } fn iter_changes( &self, other: &dyn PyTree, specific_files: Option<&[&Path]>, want_unversioned: Option, require_versioned: Option, ) -> Result>>, Error> { Python::attach(|py| { let kwargs = pyo3::types::PyDict::new(py); if let Some(specific_files) = specific_files { kwargs.set_item( "specific_files", specific_files .iter() .map(|p| p.to_string_lossy().to_string()) .collect::>(), )?; } if let Some(want_unversioned) = want_unversioned { kwargs.set_item("want_unversioned", want_unversioned)?; } if let Some(require_versioned) = require_versioned { kwargs.set_item("require_versioned", require_versioned)?; } struct TreeChangeIter(pyo3::Py); impl Iterator for TreeChangeIter { type Item = Result; fn next(&mut self) -> Option { Python::attach(|py| { let next = match self.0.call_method0(py, intern!(py, "__next__")) { Ok(v) => v, Err(e) => { if e.is_instance_of::(py) { return None; } return Some(Err(e.into())); } }; if next.is_none(py) { None } else { Some(next.extract(py).map_err(Into::into)) } }) } } Ok(Box::new(TreeChangeIter(self.to_object(py).call_method( py, "iter_changes", (other.to_object(py),), Some(&kwargs), )?)) as Box>>) }) } fn has_versioned_directories(&self) -> bool { Python::attach(|py| { self.to_object(py) .call_method0(py, "has_versioned_directories") .unwrap() .extract(py) .unwrap() }) } fn preview_transform(&self) -> Result { Python::attach(|py| { let transform = self.to_object(py).call_method0(py, "preview_transform")?; Ok(crate::transform::TreeTransform::from(transform)) }) } fn list_files( &self, include_root: Option, from_dir: Option<&Path>, recursive: Option, recurse_nested: Option, ) -> Result>>, Error> { Python::attach(|py| { let kwargs = pyo3::types::PyDict::new(py); if let Some(include_root) = include_root { kwargs.set_item("include_root", include_root)?; } if let Some(from_dir) = from_dir { kwargs.set_item("from_dir", from_dir.to_string_lossy().to_string())?; } if let Some(recursive) = recursive { kwargs.set_item("recursive", recursive)?; } if let Some(recurse_nested) = recurse_nested { kwargs.set_item("recurse_nested", recurse_nested)?; } struct ListFilesIter(pyo3::Py); impl Iterator for ListFilesIter { type Item = Result<(PathBuf, bool, Kind, TreeEntry), Error>; fn next(&mut self) -> Option { Python::attach(|py| { let next = match self.0.call_method0(py, intern!(py, "__next__")) { Ok(v) => v, Err(e) => { if e.is_instance_of::(py) { return None; } return Some(Err(e.into())); } }; if next.is_none(py) { None } else { Some(next.extract(py).map_err(Into::into)) } }) } } Ok(Box::new(ListFilesIter(self.to_object(py).call_method( py, "list_files", (), Some(&kwargs), )?)) as Box< dyn Iterator>, >) }) .map_err(|e: PyErr| -> Error { e.into() }) } fn iter_child_entries( &self, path: &std::path::Path, ) -> Result>>, Error> { Python::attach(|py| { struct IterChildEntriesIter(pyo3::Py); impl Iterator for IterChildEntriesIter { type Item = Result<(PathBuf, Kind, TreeEntry), Error>; fn next(&mut self) -> Option { Python::attach(|py| { let next = match self.0.call_method0(py, intern!(py, "__next__")) { Ok(v) => v, Err(e) => { if e.is_instance_of::(py) { return None; } return Some(Err(e.into())); } }; if next.is_none(py) { None } else { Some(next.extract(py).map_err(Into::into)) } }) } } let path_str = path.to_string_lossy().to_string(); Ok( Box::new(IterChildEntriesIter(self.to_object(py).call_method1( py, "iter_child_entries", (path_str,), )?)) as Box>>, ) }) } fn get_file_size(&self, path: &Path) -> Result { Python::attach(|py| { let path_str = path.to_string_lossy().to_string(); let size = self .to_object(py) .call_method1(py, "get_file_size", (path_str,))?; size.extract(py).map_err(Into::into) }) } fn get_file_sha1( &self, path: &Path, _stat_value: Option<&std::fs::Metadata>, ) -> Result { Python::attach(|py| { let path_str = path.to_string_lossy().to_string(); let sha1 = self .to_object(py) .call_method1(py, "get_file_sha1", (path_str,))?; sha1.extract(py).map_err(Into::into) }) } fn get_file_mtime(&self, path: &Path) -> Result { Python::attach(|py| { let path_str = path.to_string_lossy().to_string(); let mtime = self .to_object(py) .call_method1(py, "get_file_mtime", (path_str,))?; mtime.extract(py).map_err(Into::into) }) } fn is_executable(&self, path: &Path) -> Result { Python::attach(|py| { let path_str = path.to_string_lossy().to_string(); let result = self .to_object(py) .call_method1(py, "is_executable", (path_str,))?; result.extract(py).map_err(Into::into) }) } fn stored_kind(&self, path: &Path) -> Result { Python::attach(|py| { let path_str = path.to_string_lossy().to_string(); self.to_object(py) .call_method1(py, "stored_kind", (path_str,))? .extract(py) .map_err(Into::into) }) } fn supports_content_filtering(&self) -> bool { Python::attach(|py| { self.to_object(py) .call_method0(py, "supports_content_filtering") .unwrap() .extract(py) .unwrap() }) } fn supports_file_ids(&self) -> bool { Python::attach(|py| { self.to_object(py) .call_method0(py, "supports_file_ids") .unwrap() .extract(py) .unwrap() }) } fn supports_rename_tracking(&self) -> bool { Python::attach(|py| { self.to_object(py) .call_method0(py, "supports_rename_tracking") .unwrap() .extract(py) .unwrap() }) } fn supports_symlinks(&self) -> bool { Python::attach(|py| { self.to_object(py) .call_method0(py, "supports_symlinks") .unwrap() .extract(py) .unwrap() }) } fn supports_tree_reference(&self) -> bool { Python::attach(|py| { self.to_object(py) .call_method0(py, "supports_tree_reference") .unwrap() .extract(py) .unwrap() }) } fn unknowns(&self) -> Result, Error> { Python::attach(|py| { let unknowns = self.to_object(py).call_method0(py, "unknowns")?; unknowns.extract(py).map_err(Into::into) }) } fn all_versioned_paths( &self, ) -> Result>>, Error> { Python::attach(|py| { struct AllVersionedPathsIter(pyo3::Py); impl Iterator for AllVersionedPathsIter { type Item = Result; fn next(&mut self) -> Option { Python::attach(|py| { let next = match self.0.call_method0(py, intern!(py, "__next__")) { Ok(v) => v, Err(e) => { if e.is_instance_of::(py) { return None; } return Some(Err(e.into())); } }; if next.is_none(py) { None } else { Some(next.extract(py).map_err(Into::into)) } }) } } Ok(Box::new(AllVersionedPathsIter( self.to_object(py).call_method0(py, "all_versioned_paths")?, )) as Box>>) }) } fn conflicts(&self) -> Result, Error> { Python::attach(|py| { let conflicts = self.to_object(py).call_method0(py, "conflicts")?; conflicts.extract(py).map_err(Into::into) }) } fn extras(&self) -> Result, Error> { Python::attach(|py| { let extras = self.to_object(py).call_method0(py, "extras")?; extras.extract(py).map_err(Into::into) }) } fn filter_unversioned_files(&self, paths: &[&Path]) -> Result, Error> { Python::attach(|py| { let path_strings: Vec = paths .iter() .map(|p| p.to_string_lossy().to_string()) .collect(); let result = self.to_object(py) .call_method1(py, "filter_unversioned_files", (path_strings,))?; result.extract(py).map_err(Into::into) }) } fn walkdirs( &self, prefix: Option<&Path>, ) -> Result>>, Error> { Python::attach(|py| { struct WalkdirsIter(pyo3::Py); impl Iterator for WalkdirsIter { type Item = Result; fn next(&mut self) -> Option { Python::attach(|py| { let next = match self.0.call_method0(py, intern!(py, "__next__")) { Ok(v) => v, Err(e) => { if e.is_instance_of::(py) { return None; } return Some(Err(e.into())); } }; if next.is_none(py) { None } else { let tuple = match next .extract::<(String, String, String, Option>, bool)>(py) { Ok(t) => t, Err(e) => return Some(Err(e.into())), }; Some(Ok(WalkdirResult { relpath: PathBuf::from(tuple.0), abspath: PathBuf::from(tuple.1), kind: tuple.2.parse().unwrap(), stat: None, // TODO: convert Python stat to Rust metadata versioned: tuple.4, })) } }) } } let prefix_str = prefix.map(|p| p.to_string_lossy().to_string()); Ok(Box::new(WalkdirsIter(self.to_object(py).call_method1( py, "walkdirs", (prefix_str,), )?)) as Box>>) }) } fn versionable_kind(&self, kind: &Kind) -> bool { Python::attach(|py| { self.to_object(py) .call_method1(py, "versionable_kind", (kind.clone(),)) .unwrap() .extract(py) .unwrap() }) } fn path_content_summary(&self, path: &Path) -> Result { Python::attach(|py| { let path_str = path.to_string_lossy().to_string(); let summary = self.to_object(py) .call_method1(py, "path_content_summary", (path_str,))?; let summary_bound = summary.bind(py); let kind: String = summary_bound.get_item("kind")?.extract()?; let size: Option = summary_bound .get_item("size") .ok() .map(|v| v.extract().expect("size should be u64")); let executable: Option = summary_bound .get_item("executable") .ok() .map(|v| v.extract().expect("executable should be bool")); let sha1: Option = summary_bound .get_item("sha1") .ok() .map(|v| v.extract().expect("sha1 should be string")); let target: Option = summary_bound .get_item("target") .ok() .map(|v| v.extract().expect("target should be string")); Ok(PathContentSummary { kind: kind.parse().unwrap(), size, executable, sha1, target, }) }) } fn iter_files_bytes( &self, paths: &[&Path], ) -> Result), Error>>>, Error> { Python::attach(|py| { struct IterFilesBytesIter(pyo3::Py); impl Iterator for IterFilesBytesIter { type Item = Result<(PathBuf, Vec), Error>; fn next(&mut self) -> Option { Python::attach(|py| { let next = match self.0.call_method0(py, intern!(py, "__next__")) { Ok(v) => v, Err(e) => { if e.is_instance_of::(py) { return None; } return Some(Err(e.into())); } }; if next.is_none(py) { None } else { Some(next.extract(py).map_err(Into::into)) } }) } } let path_strings: Vec = paths .iter() .map(|p| p.to_string_lossy().to_string()) .collect(); Ok(Box::new(IterFilesBytesIter(self.to_object(py).call_method1( py, "iter_files_bytes", (path_strings,), )?)) as Box< dyn Iterator), Error>>, >) }) } fn iter_entries_by_dir( &self, specific_files: Option<&[&Path]>, ) -> Result>>, Error> { Python::attach(|py| { struct IterEntriesByDirIter(pyo3::Py); impl Iterator for IterEntriesByDirIter { type Item = Result<(PathBuf, TreeEntry), Error>; fn next(&mut self) -> Option { Python::attach(|py| { let next = match self.0.call_method0(py, intern!(py, "__next__")) { Ok(v) => v, Err(e) => { if e.is_instance_of::(py) { return None; } return Some(Err(e.into())); } }; if next.is_none(py) { None } else { Some(next.extract(py).map_err(Into::into)) } }) } } let kwargs = pyo3::types::PyDict::new(py); if let Some(specific_files) = specific_files { let path_strings: Vec = specific_files .iter() .map(|p| p.to_string_lossy().to_string()) .collect(); kwargs.set_item("specific_files", path_strings)?; } Ok( Box::new(IterEntriesByDirIter(self.to_object(py).call_method( py, "iter_entries_by_dir", (), Some(&kwargs), )?)) as Box>>, ) }) } fn get_file_verifier( &self, path: &Path, _stat_value: Option<&std::fs::Metadata>, ) -> Result<(String, Vec), Error> { Python::attach(|py| { let path_str = path.to_string_lossy().to_string(); let result = self .to_object(py) .call_method1(py, "get_file_verifier", (path_str,))?; result.extract(py).map_err(Into::into) }) } fn get_reference_revision(&self, path: &Path) -> Result { Python::attach(|py| { let path_str = path.to_string_lossy().to_string(); let rev = self .to_object(py) .call_method1(py, "get_reference_revision", (path_str,))?; rev.extract(py).map_err(Into::into) }) } fn archive( &self, format: &str, name: &str, root: Option<&str>, subdir: Option<&Path>, force_mtime: Option, recurse_nested: bool, ) -> Result, Error>>>, Error> { Python::attach(|py| { struct ArchiveIter(pyo3::Py); impl Iterator for ArchiveIter { type Item = Result, Error>; fn next(&mut self) -> Option { Python::attach(|py| { let next = match self.0.call_method0(py, intern!(py, "__next__")) { Ok(v) => v, Err(e) => { if e.is_instance_of::(py) { return None; } return Some(Err(e.into())); } }; if next.is_none(py) { None } else { Some(next.extract(py).map_err(Into::into)) } }) } } let kwargs = pyo3::types::PyDict::new(py); kwargs.set_item("format", format)?; kwargs.set_item("name", name)?; if let Some(root) = root { kwargs.set_item("root", root)?; } if let Some(subdir) = subdir { kwargs.set_item("subdir", subdir.to_string_lossy().to_string())?; } if let Some(force_mtime) = force_mtime { kwargs.set_item("force_mtime", force_mtime)?; } kwargs.set_item("recurse_nested", recurse_nested)?; Ok(Box::new(ArchiveIter(self.to_object(py).call_method( py, "archive", (), Some(&kwargs), )?)) as Box, Error>>>) }) } fn annotate_iter( &self, path: &Path, default_revision: Option<&RevisionId>, ) -> Result), Error>>>, Error> { Python::attach(|py| { struct AnnotateIter(pyo3::Py); impl Iterator for AnnotateIter { type Item = Result<(RevisionId, Vec), Error>; fn next(&mut self) -> Option { Python::attach(|py| { let next = match self.0.call_method0(py, intern!(py, "__next__")) { Ok(v) => v, Err(e) => { if e.is_instance_of::(py) { return None; } return Some(Err(e.into())); } }; if next.is_none(py) { None } else { Some(next.extract(py).map_err(Into::into)) } }) } } let path_str = path.to_string_lossy().to_string(); let kwargs = pyo3::types::PyDict::new(py); if let Some(default_revision) = default_revision { kwargs.set_item( "default_revision", default_revision.clone().into_pyobject(py).unwrap(), )?; } Ok(Box::new(AnnotateIter(self.to_object(py).call_method( py, "annotate_iter", (path_str,), Some(&kwargs), )?)) as Box< dyn Iterator), Error>>, >) }) } fn is_special_path(&self, path: &Path) -> bool { Python::attach(|py| { let path_str = path.to_string_lossy().to_string(); self.to_object(py) .call_method1(py, "is_special_path", (path_str,)) .unwrap() .extract(py) .unwrap() }) } fn iter_search_rules( &self, paths: &[&Path], ) -> Result>>, Error> { Python::attach(|py| { struct IterSearchRulesIter(pyo3::Py); impl Iterator for IterSearchRulesIter { type Item = Result; fn next(&mut self) -> Option { Python::attach(|py| { let next = match self.0.call_method0(py, intern!(py, "__next__")) { Ok(v) => v, Err(e) => { if e.is_instance_of::(py) { return None; } return Some(Err(e.into())); } }; if next.is_none(py) { None } else { let tuple = match next.extract::<(String, String)>(py) { Ok(t) => t, Err(e) => return Some(Err(e.into())), }; let rule_type = match tuple.1.as_str() { "include" => SearchRuleType::Include, "exclude" => SearchRuleType::Exclude, _ => { return Some(Err(Error::Other(PyErr::new::< pyo3::exceptions::PyValueError, _, >( "Unknown search rule type" )))) } }; Some(Ok(SearchRule { pattern: tuple.0, rule_type, })) } }) } } let path_strings: Vec = paths .iter() .map(|p| p.to_string_lossy().to_string()) .collect(); Ok( Box::new(IterSearchRulesIter(self.to_object(py).call_method1( py, "iter_search_rules", (path_strings,), )?)) as Box>>, ) }) } } /// A generic tree implementation that wraps any Python tree object. pub struct GenericTree(Py); impl<'py> IntoPyObject<'py> for GenericTree { type Target = PyAny; type Output = Bound<'py, Self::Target>; type Error = std::convert::Infallible; fn into_pyobject(self, py: Python<'py>) -> Result { Ok(self.0.into_bound(py)) } } impl From> for GenericTree { fn from(obj: Py) -> Self { GenericTree(obj) } } impl PyTree for GenericTree { fn to_object(&self, py: Python) -> Py { self.0.clone_ref(py) } } /// Trait for trees that support modification operations. pub trait MutableTree: Tree { /// Add specified files to version control. fn add(&self, files: &[&Path]) -> Result<(), Error>; /// Lock the tree for write operations. fn lock_write(&self) -> Result; /// Write bytes to a file in the tree without atomic guarantees. fn put_file_bytes_non_atomic(&self, path: &Path, data: &[u8]) -> Result<(), Error>; /// Check if the tree has any uncommitted changes. fn has_changes(&self) -> std::result::Result; /// Create a directory in the tree. fn mkdir(&self, path: &Path) -> Result<(), Error>; /// Remove specified files from version control and from the filesystem. fn remove(&self, files: &[&std::path::Path]) -> Result<(), Error>; /// Add a tree reference. fn add_reference(&self, reference: &TreeReference) -> Result<(), Error>; /// Copy a file or directory to a new location. fn copy_one(&self, from_path: &Path, to_path: &Path) -> Result<(), Error>; /// Get the last revision ID. fn last_revision(&self) -> Result; /// Lock the tree for write operations. fn lock_tree_write(&self) -> Result; /// Set the parent IDs for this tree. fn set_parent_ids(&self, parent_ids: &[RevisionId]) -> Result<(), Error>; /// Set the parent trees for this tree. fn set_parent_trees(&self, parent_trees: &[(RevisionId, RevisionTree)]) -> Result<(), Error>; /// Apply a delta to the tree. fn apply_inventory_delta(&self, delta: Vec) -> Result<(), Error>; /// Commit changes in the tree. fn commit( &self, message: &str, committer: Option<&str>, timestamp: Option, allow_pointless: Option, specific_files: Option<&[&Path]>, ) -> Result; } /// A tree that can be modified. pub trait PyMutableTree: PyTree + MutableTree {} impl dyn PyMutableTree { /// Get a reference to self as a MutableTree trait object. pub fn as_mutable_tree(&self) -> &dyn MutableTree { self } } impl MutableTree for T { fn add(&self, files: &[&Path]) -> Result<(), Error> { for f in files { assert!(f.is_relative()); } Python::attach(|py| -> Result<(), PyErr> { let path_strings: Vec = files .iter() .map(|p| p.to_string_lossy().to_string()) .collect(); self.to_object(py) .call_method1(py, "add", (path_strings,))?; Ok(()) }) .map_err(Into::into) } fn lock_write(&self) -> Result { Python::attach(|py| { let lock = self .to_object(py) .call_method0(py, intern!(py, "lock_write"))?; Ok(Lock::from(lock)) }) } fn put_file_bytes_non_atomic(&self, path: &Path, data: &[u8]) -> Result<(), Error> { assert!(path.is_relative()); Python::attach(|py| { let path_str = path.to_string_lossy().to_string(); self.to_object(py) .call_method1(py, "put_file_bytes_non_atomic", (path_str, data))?; Ok(()) }) } fn has_changes(&self) -> std::result::Result { Python::attach(|py| { self.to_object(py) .call_method0(py, "has_changes")? .extract::(py) .map_err(Into::into) }) } fn mkdir(&self, path: &Path) -> Result<(), Error> { assert!(path.is_relative()); Python::attach(|py| -> Result<(), PyErr> { let path_str = path.to_string_lossy().to_string(); self.to_object(py).call_method1(py, "mkdir", (path_str,))?; Ok(()) }) .map_err(Into::into) } fn remove(&self, files: &[&std::path::Path]) -> Result<(), Error> { for f in files { assert!(f.is_relative()); } Python::attach(|py| -> Result<(), PyErr> { let path_strings: Vec = files .iter() .map(|p| p.to_string_lossy().to_string()) .collect(); self.to_object(py) .call_method1(py, "remove", (path_strings,))?; Ok(()) }) .map_err(Into::into) } fn add_reference(&self, reference: &TreeReference) -> Result<(), Error> { Python::attach(|py| { let kwargs = pyo3::types::PyDict::new(py); kwargs.set_item("path", reference.path.to_string_lossy().to_string())?; kwargs.set_item("kind", reference.kind.clone())?; if let Some(ref rev) = reference.reference_revision { kwargs.set_item("reference_revision", rev.clone().into_pyobject(py).unwrap())?; } self.to_object(py) .call_method(py, "add_reference", (), Some(&kwargs))?; Ok(()) }) } fn copy_one(&self, from_path: &Path, to_path: &Path) -> Result<(), Error> { assert!(from_path.is_relative()); assert!(to_path.is_relative()); Python::attach(|py| { let from_str = from_path.to_string_lossy().to_string(); let to_str = to_path.to_string_lossy().to_string(); self.to_object(py) .call_method1(py, "copy_one", (from_str, to_str))?; Ok(()) }) } fn last_revision(&self) -> Result { Python::attach(|py| { let last_revision = self .to_object(py) .call_method0(py, intern!(py, "last_revision"))?; Ok(RevisionId::from(last_revision.extract::>(py)?)) }) } fn lock_tree_write(&self) -> Result { Python::attach(|py| { let lock = self.to_object(py).call_method0(py, "lock_tree_write")?; Ok(Lock::from(lock)) }) } fn set_parent_ids(&self, parent_ids: &[RevisionId]) -> Result<(), Error> { Python::attach(|py| { let parent_ids_py: Vec> = parent_ids .iter() .map(|id| id.clone().into_pyobject(py).unwrap().unbind()) .collect(); self.to_object(py) .call_method1(py, "set_parent_ids", (parent_ids_py,))?; Ok(()) }) } fn set_parent_trees(&self, parent_trees: &[(RevisionId, RevisionTree)]) -> Result<(), Error> { Python::attach(|py| { let parent_trees_py: Vec<(Py, Py)> = parent_trees .iter() .map(|(id, tree)| { ( id.clone().into_pyobject(py).unwrap().unbind(), tree.to_object(py), ) }) .collect(); self.to_object(py) .call_method1(py, "set_parent_trees", (parent_trees_py,))?; Ok(()) }) } fn apply_inventory_delta(&self, delta: Vec) -> Result<(), Error> { Python::attach(|py| { let delta_py: Vec> = delta .into_iter() .map(|d| { let tuple = pyo3::types::PyTuple::new( py, vec![ d.old_path .map(|p| p.to_string_lossy().to_string()) .into_pyobject(py) .unwrap() .into_any(), d.new_path .map(|p| p.to_string_lossy().to_string()) .into_pyobject(py) .unwrap() .into_any(), d.file_id.into_pyobject(py).unwrap().into_any(), d.entry.into_pyobject(py).unwrap().into_any(), ], ) .unwrap(); tuple.into_any().unbind() }) .collect(); self.to_object(py) .call_method1(py, "apply_inventory_delta", (delta_py,))?; Ok(()) }) } fn commit( &self, message: &str, committer: Option<&str>, timestamp: Option, allow_pointless: Option, specific_files: Option<&[&Path]>, ) -> Result { Python::attach(|py| { let kwargs = pyo3::types::PyDict::new(py); if let Some(committer) = committer { kwargs.set_item("committer", committer)?; } if let Some(timestamp) = timestamp { kwargs.set_item("timestamp", timestamp)?; } if let Some(allow_pointless) = allow_pointless { kwargs.set_item("allow_pointless", allow_pointless)?; } if let Some(specific_files) = specific_files { let file_paths: Vec = specific_files .iter() .map(|p| p.to_string_lossy().to_string()) .collect(); kwargs.set_item("specific_files", file_paths)?; } let result = self .to_object(py) .call_method(py, "commit", (message,), Some(&kwargs))?; result.extract(py).map_err(Into::into) }) } } /// A read-only tree at a specific revision. pub struct RevisionTree(pub Py); impl<'py> IntoPyObject<'py> for RevisionTree { type Target = PyAny; type Output = Bound<'py, Self::Target>; type Error = std::convert::Infallible; fn into_pyobject(self, py: Python<'py>) -> Result { Ok(self.0.into_bound(py)) } } impl PyTree for RevisionTree { fn to_object(&self, py: Python) -> Py { self.0.clone_ref(py) } } impl Clone for RevisionTree { fn clone(&self) -> Self { Python::attach(|py| RevisionTree(self.0.clone_ref(py))) } } impl RevisionTree { /// Get the repository this revision tree belongs to. pub fn repository(&self) -> crate::repository::GenericRepository { Python::attach(|py| { let repository = self.to_object(py).getattr(py, "_repository").unwrap(); crate::repository::GenericRepository::new(repository) }) } /// Get the revision ID of this tree. pub fn get_revision_id(&self) -> RevisionId { Python::attach(|py| { self.to_object(py) .call_method0(py, "get_revision_id") .unwrap() .extract(py) .unwrap() }) } /// Get the parent revision IDs of this tree. pub fn get_parent_ids(&self) -> Vec { Python::attach(|py| { self.to_object(py) .call_method0(py, intern!(py, "get_parent_ids")) .unwrap() .extract(py) .unwrap() }) } } #[derive(Debug, PartialEq, Eq, Clone)] /// Represents a change to a file in a tree. pub struct TreeChange { /// The path of the file, as (old_path, new_path). pub path: (Option, Option), /// Whether the content of the file changed. pub changed_content: bool, /// Whether the file is versioned, as (old_versioned, new_versioned). pub versioned: (Option, Option), /// The name of the file, as (old_name, new_name). pub name: (Option, Option), /// The kind of the file, as (old_kind, new_kind). pub kind: (Option, Option), /// Whether the file is executable, as (old_executable, new_executable). pub executable: (Option, Option), /// Whether the file was copied rather than just changed/renamed. pub copied: bool, } impl<'py> IntoPyObject<'py> for TreeChange { type Target = PyAny; type Output = Bound<'py, Self::Target>; type Error = std::convert::Infallible; fn into_pyobject(self, py: Python<'py>) -> Result { let dict = pyo3::types::PyDict::new(py); dict.set_item( "path", ( self.path .0 .as_ref() .map(|p| p.to_string_lossy().to_string()), self.path .1 .as_ref() .map(|p| p.to_string_lossy().to_string()), ), ) .unwrap(); dict.set_item("changed_content", self.changed_content) .unwrap(); dict.set_item("versioned", self.versioned).unwrap(); dict.set_item("name", &self.name).unwrap(); dict.set_item("kind", self.kind.clone()).unwrap(); dict.set_item("executable", self.executable).unwrap(); dict.set_item("copied", self.copied).unwrap(); Ok(dict.into_any()) } } impl<'a, 'py> FromPyObject<'a, 'py> for TreeChange { type Error = PyErr; fn extract(obj: Borrowed<'a, 'py, PyAny>) -> PyResult { fn from_bool(o: &Bound) -> PyResult { if let Ok(b) = o.extract::() { Ok(b != 0) } else { o.extract::() } } fn from_opt_bool_tuple(o: &Bound) -> PyResult<(Option, Option)> { let tuple = o.extract::<(Option>, Option>)>()?; Ok(( tuple.0.map(|o| from_bool(&o.as_borrowed())).transpose()?, tuple.1.map(|o| from_bool(&o.as_borrowed())).transpose()?, )) } let path = obj.getattr("path")?; let changed_content = from_bool(&obj.getattr("changed_content")?)?; let versioned = from_opt_bool_tuple(&obj.getattr("versioned")?)?; let name = obj.getattr("name")?; let kind = obj.getattr("kind")?; let executable = from_opt_bool_tuple(&obj.getattr("executable")?)?; let copied = obj.getattr("copied")?; Ok(TreeChange { path: path.extract()?, changed_content, versioned, name: name.extract()?, kind: kind.extract()?, executable, copied: copied.extract()?, }) } } /// An in-memory tree implementation. pub struct MemoryTree(pub Py); impl<'py> IntoPyObject<'py> for MemoryTree { type Target = PyAny; type Output = Bound<'py, Self::Target>; type Error = std::convert::Infallible; fn into_pyobject(self, py: Python<'py>) -> Result { Ok(self.0.into_bound(py)) } } impl From<&B> for MemoryTree { fn from(branch: &B) -> Self { Python::attach(|py| { MemoryTree( branch .to_object(py) .call_method0(py, "create_memorytree") .unwrap() .extract(py) .unwrap(), ) }) } } impl PyTree for MemoryTree { fn to_object(&self, py: Python) -> Py { self.0.clone_ref(py) } } impl PyMutableTree for MemoryTree {} pub use crate::workingtree::WorkingTree; #[cfg(test)] mod tests { use super::*; use crate::controldir::{create_standalone_workingtree, ControlDirFormat}; use serial_test::serial; #[test] #[serial] fn test_remove() { let env = crate::testing::TestEnv::new(); let wt = create_standalone_workingtree(std::path::Path::new("."), &ControlDirFormat::default()) .unwrap(); let path = std::path::Path::new("foo"); std::fs::write(&path, b"").unwrap(); wt.add(&[(std::path::Path::new("foo"))]).unwrap(); wt.build_commit() .message("Initial commit") .reporter(&crate::commit::NullCommitReporter::new()) .commit() .unwrap(); assert!(wt.has_filename(&path)); wt.remove(&[Path::new("foo")]).unwrap(); assert!(!wt.is_versioned(&path)); std::mem::drop(env); } } breezyshim-0.7.5/src/ui.rs000064400000000000000000000072401046102023000135730ustar 00000000000000//! UI Factory use pyo3::prelude::*; /// Python UI factory trait. pub trait PyUIFactory: std::any::Any + std::fmt::Debug { /// Get the underlying Python object for this UI factory. fn to_object(&self, py: Python) -> Py; } /// UI factory trait. pub trait UIFactory: std::fmt::Debug {} impl UIFactory for T {} /// UI factory that does not output anything. pub struct SilentUIFactory(Py); impl SilentUIFactory { /// Create a new silent UI factory. pub fn new() -> Self { Python::attach(|py| { SilentUIFactory( py.import("breezy.ui") .unwrap() .getattr("SilentUIFactory") .unwrap() .call0() .unwrap() .unbind(), ) }) } } impl Default for SilentUIFactory { fn default() -> Self { Self::new() } } /// Generic wrapper for a Python UI factory. pub struct GenericUIFactory(Py); impl<'py> IntoPyObject<'py> for GenericUIFactory { type Target = PyAny; type Output = Bound<'py, Self::Target>; type Error = std::convert::Infallible; fn into_pyobject(self, py: Python<'py>) -> Result { Ok(self.0.into_bound(py)) } } impl<'a, 'py> FromPyObject<'a, 'py> for GenericUIFactory { type Error = PyErr; fn extract(obj: Borrowed<'a, 'py, PyAny>) -> PyResult { Ok(GenericUIFactory(obj.to_owned().unbind())) } } impl GenericUIFactory { /// Create a new generic UI factory from a Python object. pub fn new(obj: Py) -> Self { Self(obj) } } impl PyUIFactory for GenericUIFactory { fn to_object(&self, py: Python) -> Py { self.0.clone_ref(py) } } impl std::fmt::Debug for GenericUIFactory { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { f.write_fmt(format_args!("GenericUIFactory({:?})", self.0)) } } impl<'py> IntoPyObject<'py> for SilentUIFactory { type Target = PyAny; type Output = Bound<'py, Self::Target>; type Error = std::convert::Infallible; fn into_pyobject(self, py: Python<'py>) -> Result { Ok(self.0.into_bound(py)) } } impl PyUIFactory for SilentUIFactory { fn to_object(&self, py: Python) -> Py { self.0.clone_ref(py) } } impl std::fmt::Debug for SilentUIFactory { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { f.write_fmt(format_args!("SilentUIFactory({:?})", self.0)) } } /// Install a UI factory globally. pub fn install_ui_factory(factory: &dyn PyUIFactory) { Python::attach(|py| { let m = py.import("breezy.ui").unwrap(); m.setattr("ui_factory", factory.to_object(py)).unwrap(); }); } /// Get the current global UI factory. pub fn get_ui_factory() -> Box { Box::new(GenericUIFactory::new(Python::attach(|py| { let m = py.import("breezy.ui").unwrap(); m.getattr("ui_factory").unwrap().unbind() }))) as Box } /// Run a function with a silent UI factory temporarily installed. pub fn with_silent_ui_factory(f: impl FnOnce() -> R) -> R { let old_factory = get_ui_factory(); let new_factory = SilentUIFactory::new(); install_ui_factory(&new_factory); let r = f(); install_ui_factory(old_factory.as_ref()); r } #[cfg(test)] mod tests { use super::*; #[test] fn test_create_silent_factory() { let _ = SilentUIFactory::new(); } #[test] fn test_run_with_silent_factory() { with_silent_ui_factory(|| { crate::version::version(); }); } } breezyshim-0.7.5/src/urlutils.rs000064400000000000000000000116271046102023000150450ustar 00000000000000//! URL manipulation utilities. use pyo3::prelude::*; /// Join segment parameters to a URL. /// /// This function adds the specified parameters to a URL as segment parameters. /// /// # Parameters /// /// * `url` - The URL to add parameters to. /// * `parameters` - The parameters to add to the URL. /// /// # Returns /// /// A new URL with the specified parameters added. pub fn join_segment_parameters( url: &url::Url, parameters: std::collections::HashMap, ) -> url::Url { pyo3::Python::attach(|py| { let urlutils = py.import("breezy.urlutils").unwrap(); urlutils .call_method1("join_segment_parameters", (url.to_string(), parameters)) .unwrap() .extract::() .map(|s| url::Url::parse(s.as_str()).unwrap()) .unwrap() }) } /// Split segment parameters from a URL. /// /// This function extracts segment parameters from a URL. /// /// # Parameters /// /// * `url` - The URL to extract parameters from. /// /// # Returns /// /// A tuple containing the URL without parameters and a map of the parameters. pub fn split_segment_parameters( url: &url::Url, ) -> (url::Url, std::collections::HashMap) { pyo3::Python::attach(|py| { let urlutils = py.import("breezy.urlutils").unwrap(); urlutils .call_method1("split_segment_parameters", (url.to_string(),)) .unwrap() .extract::<(String, std::collections::HashMap)>() .map(|(s, m)| (url::Url::parse(s.as_str()).unwrap(), m)) .unwrap() }) } fn char_is_safe(c: char) -> bool { c.is_ascii_alphanumeric() || c == '-' || c == '_' || c == '.' || c == '~' } /// Escape a byte slice for use in a URL. /// /// This function escapes bytes for use in a URL, preserving characters that /// are considered safe. /// /// # Parameters /// /// * `relpath` - The byte slice to escape. /// * `safe` - Additional characters to consider safe (not to escape). /// /// # Returns /// /// The escaped string. pub fn escape(relpath: &[u8], safe: Option<&str>) -> String { let mut result = String::new(); let safe = safe.unwrap_or("/~").as_bytes(); for b in relpath { if char_is_safe(char::from(*b)) || safe.contains(b) { result.push(char::from(*b)); } else { result.push_str(&format!("%{:02X}", *b)); } } result } /// Escape a UTF-8 string for use in a URL. /// /// This is a convenience function that converts the string to bytes and calls `escape`. /// /// # Parameters /// /// * `relpath` - The string to escape. /// * `safe` - Additional characters to consider safe (not to escape). /// /// # Returns /// /// The escaped string. pub fn escape_utf8(relpath: &str, safe: Option<&str>) -> String { escape(relpath.as_bytes(), safe) } /// Unescape a URL-encoded UTF-8 string. /// /// This function decodes percent-encoded characters in a string. /// /// # Parameters /// /// * `url` - The URL-encoded string to decode. /// /// # Returns /// /// The decoded string. pub fn unescape_utf8(url: &str) -> String { use percent_encoding::percent_decode_str; percent_decode_str(url) .decode_utf8() .map(|s| s.to_string()) .unwrap_or_else(|_| url.to_string()) } #[cfg(test)] mod tests { use super::*; #[test] fn test_escape() { assert_eq!(escape(b"blah", None), "blah"); assert_eq!(escape(b"blah", Some("")), "blah"); assert_eq!(escape(b"blah", Some("/~")), "blah"); assert_eq!(escape(b"la/bla", None), "la/bla"); assert_eq!(escape(b"la/bla", Some("")), "la%2Fbla"); assert_eq!(escape_utf8("la/bla", Some("/")), "la/bla"); } #[test] fn test_unescape() { assert_eq!(unescape_utf8("blah"), "blah"); assert_eq!(unescape_utf8("la%2Fbla"), "la/bla"); } #[test] fn test_split_segment_parameters() { let url = url::Url::parse("http://example.com/blah,a=1,b=2").unwrap(); let (result_url, result_parameters) = split_segment_parameters(&url); assert_eq!( result_url, url::Url::parse("http://example.com/blah").unwrap() ); let mut expected_parameters = std::collections::HashMap::new(); expected_parameters.insert("a".to_string(), "1".to_string()); expected_parameters.insert("b".to_string(), "2".to_string()); assert_eq!(result_parameters, expected_parameters); } #[test] fn test_join_segment_parameters() { let url = url::Url::parse("http://example.com/blah").unwrap(); let mut parameters = std::collections::HashMap::new(); parameters.insert("a".to_string(), "1".to_string()); parameters.insert("b".to_string(), "2".to_string()); let result = join_segment_parameters(&url, parameters); assert_eq!( result, url::Url::parse("http://example.com/blah,a=1,b=2").unwrap() ); } } breezyshim-0.7.5/src/version.rs000064400000000000000000000060021046102023000146360ustar 00000000000000//! Version information for the Breezy library. use pyo3::prelude::*; /// The release level of a version. /// /// This enum represents the different stages of a software release. #[derive(Debug, PartialEq, Eq, PartialOrd, Ord)] pub enum ReleaseLevel { /// Development version. Dev, /// Alpha version. Alpha, /// Beta version. Beta, /// Release candidate. Candidate, /// Final release. Final, } /// Version information. /// /// This struct represents a version number with major, minor, and micro components, /// a release level, and a serial number. #[derive(Debug, PartialEq, Eq, PartialOrd, Ord)] pub struct Version { /// The major version number. major: u32, /// The minor version number. minor: u32, /// The micro (patch) version number. micro: u32, /// The release level. level: ReleaseLevel, /// The serial number within the release level. serial: u32, } impl std::fmt::Display for Version { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { write!( f, "{}.{}.{}{}{}", self.major, self.minor, self.micro, match self.level { ReleaseLevel::Dev => "dev", ReleaseLevel::Alpha => "a", ReleaseLevel::Beta => "b", ReleaseLevel::Candidate => "rc", ReleaseLevel::Final => "", }, if self.serial > 0 { format!("{}", self.serial) } else { "".to_string() } ) } } /// Get the version of the Breezy library. /// /// # Returns /// /// The version of the Breezy library. pub fn version() -> Version { Python::attach(|py| { let m = py.import("breezy").unwrap(); let version_info = m.getattr("version_info").unwrap(); let major = version_info.get_item(0).unwrap().extract::().unwrap(); let minor = version_info.get_item(1).unwrap().extract::().unwrap(); let micro = version_info.get_item(2).unwrap().extract::().unwrap(); let level = match version_info .get_item(3) .unwrap() .extract::() .unwrap() .as_str() { "dev" => ReleaseLevel::Dev, "alpha" => ReleaseLevel::Alpha, "beta" => ReleaseLevel::Beta, "candidate" => ReleaseLevel::Candidate, "final" => ReleaseLevel::Final, _ => panic!("Invalid release level"), }; let serial = version_info.get_item(4).unwrap().extract::().unwrap(); Version { major, minor, micro, level, serial, } }) } #[test] fn test_version_serialize() { let v = Version { major: 1, minor: 2, micro: 3, level: ReleaseLevel::Final, serial: 0, }; assert_eq!(v.to_string(), "1.2.3"); } #[test] fn test_version() { version().to_string(); } breezyshim-0.7.5/src/versionedfiles/tests.rs000064400000000000000000000045021046102023000173370ustar 00000000000000#[cfg(test)] mod tests { use crate::graph::Key; use crate::init; use crate::versionedfiles::{AbsentContentFactory, FulltextContentFactory, RecordOrdering}; #[test] fn test_key_conversion() { let key = Key::from(vec!["file1".to_string(), "rev1".to_string()]); let vec: Vec = key.clone().into(); assert_eq!(vec, vec!["file1", "rev1"]); } #[test] fn test_fulltext_content_factory() { init(); let key = Key::from(vec!["file1".to_string()]); let factory = FulltextContentFactory::new( Some("abc123".to_string()), "fulltext".to_string(), key, Some(vec![]), ); assert_eq!(factory.sha1, Some("abc123".to_string())); assert_eq!(factory.storage_kind, "fulltext"); } #[test] fn test_absent_content_factory() { let key = Key::from(vec!["file1".to_string()]); let parents = vec![Key::from(vec!["parent1".to_string()])]; let factory = AbsentContentFactory::new(key, parents); assert_eq!(factory.parents.len(), 1); } #[test] fn test_weave_basic_operations() { init(); pyo3::Python::attach(|py| { // Import the weave module py.import("breezy.bzr.weave").ok(); let weave = crate::weave::Weave::new_empty(py).unwrap(); // Add initial version weave .add_lines("v1", vec![], vec!["line1\n", "line2\n"]) .unwrap(); // Get text back let text = weave.get_text("v1").unwrap(); assert_eq!(text, vec!["line1\n", "line2\n"]); // Add child version weave .add_lines("v2", vec!["v1"], vec!["line1\n", "line2\n", "line3\n"]) .unwrap(); // Check ancestry let ancestry = weave.get_ancestry(vec!["v2"]).unwrap(); assert!(ancestry.contains(&"v1".to_string())); assert!(ancestry.contains(&"v2".to_string())); // Check version count assert_eq!(weave.numversions().unwrap(), 2); }); } #[test] fn test_record_ordering() { let _unordered = RecordOrdering::Unordered; let _topological = RecordOrdering::Topological; let _grouped = RecordOrdering::GroupedByKey; } } breezyshim-0.7.5/src/versionedfiles.rs000064400000000000000000000417771046102023000162140ustar 00000000000000//! Versioned files API for storing file content history #![allow(missing_docs)] use crate::error::Error; use crate::graph::{Key, KnownGraph}; use pyo3::exceptions::PyValueError; use pyo3::prelude::*; use pyo3::types::{PyBytes, PyDict, PyIterator, PyList, PyTuple}; use std::collections::HashMap; #[derive(Debug, Clone)] pub struct FulltextContentFactory { pub sha1: Option, pub storage_kind: String, pub key: Key, pub parents: Option>, } impl FulltextContentFactory { pub fn new( sha1: Option, storage_kind: String, key: Key, parents: Option>, ) -> Self { Self { sha1, storage_kind, key, parents, } } } impl<'py> IntoPyObject<'py> for FulltextContentFactory { type Target = PyAny; type Output = Bound<'py, Self::Target>; type Error = PyErr; fn into_pyobject(self, py: Python<'py>) -> Result { let factory_mod = py.import("breezy.bzr.versionedfile")?; let factory_cls = factory_mod.getattr("FulltextContentFactory")?; let parents_py = if let Some(parents) = self.parents { let parent_tuples: Vec<_> = parents .into_iter() .map(|p| p.into_pyobject(py)) .collect::, _>>()?; Some(PyTuple::new(py, parent_tuples)?) } else { None }; let kwargs = PyDict::new(py); if let Some(sha1) = self.sha1 { kwargs.set_item("sha1", PyBytes::new(py, sha1.as_bytes()))?; } kwargs.set_item("storage_kind", self.storage_kind)?; kwargs.set_item("key", self.key.into_pyobject(py)?)?; if let Some(parents) = parents_py { kwargs.set_item("parents", parents)?; } factory_cls.call((), Some(&kwargs)) } } #[derive(Debug, Clone)] pub struct AbsentContentFactory { pub key: Key, pub parents: Vec, } impl AbsentContentFactory { pub fn new(key: Key, parents: Vec) -> Self { Self { key, parents } } } impl<'py> IntoPyObject<'py> for AbsentContentFactory { type Target = PyAny; type Output = Bound<'py, Self::Target>; type Error = PyErr; fn into_pyobject(self, py: Python<'py>) -> Result { let factory_mod = py.import("breezy.bzr.versionedfile")?; let factory_cls = factory_mod.getattr("AbsentContentFactory")?; let parent_tuples: Vec<_> = self .parents .into_iter() .map(|p| p.into_pyobject(py)) .collect::, _>>()?; let parents_py = PyTuple::new(py, parent_tuples)?; factory_cls.call1((self.key.into_pyobject(py)?, parents_py)) } } pub trait VersionedFiles: Clone + Send + Sync { fn add_lines( &self, key: &Key, parents: &[Key], lines: Vec<&str>, parent_texts: Option>>, left_matching_blocks: Option>, nostore_sha: Option<&str>, random_id: bool, check_content: bool, ) -> Result<(String, usize), Error>; fn get_record_stream( &self, keys: Vec, ordering: RecordOrdering, include_delta_closure: bool, ) -> Result; fn get_sha1s(&self, keys: Vec) -> Result, Error>; fn insert_record_stream(&self, stream: RecordStream) -> Result<(), Error>; fn keys(&self) -> Result, Error>; fn make_mpdiffs(&self, keys: Vec) -> Result, Error>; fn get_parent_map(&self, keys: Vec) -> Result>, Error>; fn get_known_graph_ancestry(&self, keys: Vec) -> Result; fn get_record_stream_for_keys( &self, keys: Vec, ordering: RecordOrdering, ) -> Result { self.get_record_stream(keys, ordering, false) } fn has_key(&self, key: &Key) -> Result { let parent_map = self.get_parent_map(vec![key.clone()])?; Ok(parent_map.contains_key(key)) } } pub trait PyVersionedFiles: VersionedFiles { fn to_object(&self, py: Python) -> Py; } impl VersionedFiles for T { fn add_lines( &self, key: &Key, parents: &[Key], lines: Vec<&str>, parent_texts: Option>>, left_matching_blocks: Option>, nostore_sha: Option<&str>, random_id: bool, check_content: bool, ) -> Result<(String, usize), Error> { Python::attach(|py| { let parents_py: Vec<_> = parents .iter() .map(|p| p.clone().into_pyobject(py)) .collect::, _>>()?; let parents_tuple = PyTuple::new(py, parents_py)?; let lines_py = PyList::new(py, lines)?; let kwargs = PyDict::new(py); if let Some(parent_texts) = parent_texts { let parent_texts_dict = PyDict::new(py); for (k, v) in parent_texts { let lines_list = PyList::new(py, v)?; parent_texts_dict.set_item(k.into_pyobject(py)?, lines_list)?; } kwargs.set_item("parent_texts", parent_texts_dict)?; } if let Some(blocks) = left_matching_blocks { let blocks_list = PyList::new(py, blocks.iter().map(|(a, b, c)| (*a, *b, *c)))?; kwargs.set_item("left_matching_blocks", blocks_list)?; } if let Some(sha) = nostore_sha { kwargs.set_item("nostore_sha", PyBytes::new(py, sha.as_bytes()))?; } kwargs.set_item("random_id", random_id)?; kwargs.set_item("check_content", check_content)?; let result = self.to_object(py).call_method( py, "add_lines", (key.clone().into_pyobject(py)?, parents_tuple, lines_py), Some(&kwargs), )?; let tuple = result .cast_bound::(py) .map_err(|_| PyValueError::new_err("Expected tuple"))?; let item0 = tuple.get_item(0)?; let sha1_bytes = item0 .cast::() .map_err(|_| PyValueError::new_err("Expected bytes"))?; let sha1 = std::str::from_utf8(sha1_bytes.as_bytes()) .map_err(|_| PyValueError::new_err("Invalid UTF-8 in SHA1"))? .to_string(); let length = tuple.get_item(1)?.extract::()?; Ok((sha1, length)) }) } fn get_record_stream( &self, keys: Vec, ordering: RecordOrdering, include_delta_closure: bool, ) -> Result { Python::attach(|py| { let keys_py: Vec<_> = keys .into_iter() .map(|k| k.into_pyobject(py)) .collect::, _>>()?; let keys_list = PyList::new(py, keys_py)?; let ordering_str = match ordering { RecordOrdering::Unordered => "unordered", RecordOrdering::Topological => "topological", RecordOrdering::GroupedByKey => "groupcompress", }; let stream_obj = self.to_object(py).call_method1( py, "get_record_stream", (keys_list, ordering_str, include_delta_closure), )?; Ok(RecordStream(stream_obj)) }) } fn get_sha1s(&self, keys: Vec) -> Result, Error> { Python::attach(|py| { let keys_py: Vec<_> = keys .into_iter() .map(|k| k.into_pyobject(py)) .collect::, _>>()?; let keys_list = PyList::new(py, keys_py)?; let result_dict = self .to_object(py) .call_method1(py, "get_sha1s", (keys_list,))?; let dict = result_dict .cast_bound::(py) .map_err(|_| PyValueError::new_err("Expected dict"))?; let mut sha1s = HashMap::new(); for (key_py, sha_py) in dict { let key = key_py.extract::()?; let sha_bytes = sha_py .cast::() .map_err(|_| PyValueError::new_err("Expected bytes"))?; let sha = std::str::from_utf8(sha_bytes.as_bytes()) .map_err(|_| PyValueError::new_err("Invalid UTF-8 in SHA1"))? .to_string(); sha1s.insert(key, sha); } Ok(sha1s) }) } fn insert_record_stream(&self, stream: RecordStream) -> Result<(), Error> { Python::attach(|py| { self.to_object(py) .call_method1(py, "insert_record_stream", (stream.0,))?; Ok(()) }) } fn keys(&self) -> Result, Error> { Python::attach(|py| { let keys_iter = self.to_object(py).call_method0(py, "keys")?; let mut keys = Vec::new(); for key_py in keys_iter .cast_bound::(py) .map_err(|_| PyValueError::new_err("Expected iterator"))? { let key = key_py?.extract::()?; keys.push(key); } Ok(keys) }) } fn make_mpdiffs(&self, keys: Vec) -> Result, Error> { Python::attach(|py| { let keys_py: Vec<_> = keys .into_iter() .map(|k| k.into_pyobject(py)) .collect::, _>>()?; let keys_list = PyList::new(py, keys_py)?; let result = self .to_object(py) .call_method1(py, "make_mpdiffs", (keys_list,))?; let mut diffs = Vec::new(); for diff_py in result .cast_bound::(py) .map_err(|_| PyValueError::new_err("Expected iterator"))? { let diff = diff_py?.extract::()?; diffs.push(diff); } Ok(diffs) }) } fn get_parent_map(&self, keys: Vec) -> Result>, Error> { Python::attach(|py| { let keys_py: Vec<_> = keys .into_iter() .map(|k| k.into_pyobject(py)) .collect::, _>>()?; let keys_list = PyList::new(py, keys_py)?; let result_dict = self.to_object(py) .call_method1(py, "get_parent_map", (keys_list,))?; let dict = result_dict .cast_bound::(py) .map_err(|_| PyValueError::new_err("Expected dict"))?; let mut parent_map = HashMap::new(); for (key_py, parents_py) in dict { let key = key_py.extract::()?; let mut parents = Vec::new(); for parent_py in parents_py .cast::() .map_err(|_| PyValueError::new_err("Expected tuple"))? { let parent = parent_py.extract::()?; parents.push(parent); } parent_map.insert(key, parents); } Ok(parent_map) }) } fn get_known_graph_ancestry(&self, keys: Vec) -> Result { Python::attach(|py| { let keys_py: Vec<_> = keys .into_iter() .map(|k| k.into_pyobject(py)) .collect::, _>>()?; let keys_list = PyList::new(py, keys_py)?; let graph_obj = self.to_object(py) .call_method1(py, "get_known_graph_ancestry", (keys_list,))?; Ok(KnownGraph::new(graph_obj)) }) } } #[derive(Clone, Copy, Debug)] pub enum RecordOrdering { Unordered, Topological, GroupedByKey, } pub struct RecordStream(Py); impl RecordStream { pub fn iter(&self) -> Result { Python::attach(|py| { let iter = self.0.call_method0(py, "__iter__")?; Ok(RecordStreamIterator(iter)) }) } } pub struct RecordStreamIterator(Py); impl Iterator for RecordStreamIterator { type Item = Result; fn next(&mut self) -> Option { Python::attach(|py| match self.0.call_method0(py, "__next__") { Ok(record_py) => Some(record_py.bind(py).extract::().map_err(Into::into)), Err(e) if e.is_instance_of::(py) => None, Err(e) => Some(Err(e.into())), }) } } #[derive(Debug)] pub struct Record { pub key: Key, pub storage_kind: String, pub sha1: Option, pub parents: Vec, } impl<'a, 'py> FromPyObject<'a, 'py> for Record { type Error = PyErr; fn extract(ob: Borrowed<'a, 'py, PyAny>) -> PyResult { let key = ob.getattr("key")?.extract::()?; let storage_kind = ob.getattr("storage_kind")?.extract::()?; let sha1 = if let Ok(sha_bytes) = ob.getattr("sha1") { if !sha_bytes.is_none() { let bytes = sha_bytes .cast::() .map_err(|_| PyValueError::new_err("Expected bytes"))?; Some( std::str::from_utf8(bytes.as_bytes()) .map_err(|_| PyValueError::new_err("Invalid UTF-8 in SHA1"))? .to_string(), ) } else { None } } else { None }; let parents = ob.getattr("parents")?.extract::>()?; Ok(Record { key, storage_kind, sha1, parents, }) } } #[derive(Debug)] pub struct MultiParentDiff { pub key: Key, pub parents: Vec, pub hunks: Vec, } impl<'a, 'py> FromPyObject<'a, 'py> for MultiParentDiff { type Error = PyErr; fn extract(ob: Borrowed<'a, 'py, PyAny>) -> PyResult { let tuple = ob .cast::() .map_err(|_| PyValueError::new_err("Expected tuple"))?; let key = tuple.get_item(0)?.extract::()?; let parents = tuple.get_item(1)?.extract::>()?; let hunks_py = tuple.get_item(2)?; let mut hunks = Vec::new(); for hunk_py in hunks_py .cast::() .map_err(|_| PyValueError::new_err("Expected list"))? { hunks.push(hunk_py.extract::()?); } Ok(MultiParentDiff { key, parents, hunks, }) } } #[derive(Debug)] pub enum DiffHunk { NewText(Vec), ParentText { parent: usize, start: usize, end: usize, }, } impl<'a, 'py> FromPyObject<'a, 'py> for DiffHunk { type Error = PyErr; fn extract(ob: Borrowed<'a, 'py, PyAny>) -> PyResult { let tuple = ob .cast::() .map_err(|_| PyValueError::new_err("Expected tuple"))?; let hunk_type = tuple.get_item(0)?.extract::()?; match hunk_type.as_str() { "new" => { let lines = tuple.get_item(1)?.extract::>()?; Ok(DiffHunk::NewText(lines)) } "parent" => { let parent = tuple.get_item(1)?.extract::()?; let start = tuple.get_item(2)?.extract::()?; let end = tuple.get_item(3)?.extract::()?; Ok(DiffHunk::ParentText { parent, start, end }) } _ => Err(PyValueError::new_err(format!( "Unknown hunk type: {}", hunk_type ))), } } } pub struct GenericVersionedFiles(Py); impl GenericVersionedFiles { pub fn new(py_obj: Py) -> Self { Self(py_obj) } } impl Clone for GenericVersionedFiles { fn clone(&self) -> Self { Python::attach(|py| GenericVersionedFiles(self.0.clone_ref(py))) } } impl PyVersionedFiles for GenericVersionedFiles { fn to_object(&self, py: Python) -> Py { self.0.clone_ref(py) } } impl<'py> IntoPyObject<'py> for GenericVersionedFiles { type Target = PyAny; type Output = Bound<'py, Self::Target>; type Error = PyErr; fn into_pyobject(self, py: Python<'py>) -> Result { Ok(self.0.into_bound(py)) } } impl<'a, 'py> FromPyObject<'a, 'py> for GenericVersionedFiles { type Error = PyErr; fn extract(ob: Borrowed<'a, 'py, PyAny>) -> PyResult { Ok(GenericVersionedFiles(ob.to_owned().unbind())) } } #[cfg(test)] mod tests; breezyshim-0.7.5/src/weave.rs000064400000000000000000000165701046102023000142730ustar 00000000000000//! Weave versioned files implementation #![allow(missing_docs)] use crate::error::Error; use pyo3::prelude::*; pub struct WeaveFile(Py); impl WeaveFile { pub fn new(py_obj: Py) -> Self { Self(py_obj) } pub fn from_transport( py: Python, transport: &crate::transport::Transport, file_name: &str, mode: Option<&str>, create: bool, ) -> PyResult { let weave_mod = py.import("breezy.bzr.weave")?; let weave_cls = weave_mod.getattr("WeaveFile")?; let kwargs = pyo3::types::PyDict::new(py); if let Some(m) = mode { kwargs.set_item("mode", m)?; } kwargs.set_item("create", create)?; let obj = weave_cls.call( (file_name, transport.as_pyobject().clone_ref(py)), Some(&kwargs), )?; Ok(WeaveFile(obj.unbind())) } pub fn add_lines( &self, version_id: &str, parents: Vec<&str>, lines: Vec<&str>, ) -> Result<(), Error> { Python::attach(|py| { let parents_list = pyo3::types::PyList::new(py, parents)?; let lines_list = pyo3::types::PyList::new(py, lines)?; self.0 .call_method1(py, "add_lines", (version_id, parents_list, lines_list))?; Ok(()) }) } pub fn get_lines(&self, version_id: &str) -> Result, Error> { Python::attach(|py| { let result = self.0.call_method1(py, "get_lines", (version_id,))?; let lines_list = result .cast_bound::(py) .map_err(|_| pyo3::exceptions::PyTypeError::new_err("Expected list"))?; let mut lines = Vec::new(); for line in lines_list { lines.push(line.extract::()?); } Ok(lines) }) } pub fn get_ancestry(&self, version_ids: Vec<&str>) -> Result, Error> { Python::attach(|py| { let ids_list = pyo3::types::PyList::new(py, version_ids)?; let result = self.0.call_method1(py, "get_ancestry", (ids_list,))?; let ancestry_list = result .cast_bound::(py) .map_err(|_| pyo3::exceptions::PyTypeError::new_err("Expected list"))?; let mut ancestry = Vec::new(); for id in ancestry_list { ancestry.push(id.extract::()?); } Ok(ancestry) }) } pub fn get_parent_map( &self, version_ids: Option>, ) -> Result>, Error> { Python::attach(|py| { let ids_arg: Py = if let Some(ids) = version_ids { pyo3::types::PyList::new(py, ids)?.unbind().into() } else { py.None() }; let result = self.0.call_method1(py, "get_parent_map", (ids_arg,))?; let parent_dict = result .cast_bound::(py) .map_err(|_| pyo3::exceptions::PyTypeError::new_err("Expected dict"))?; let mut parent_map = std::collections::HashMap::new(); for (key, value) in parent_dict { let version_id = key.extract::()?; let parents_list = value .cast::() .map_err(|_| pyo3::exceptions::PyTypeError::new_err("Expected list"))?; let mut parents = Vec::new(); for parent in parents_list { parents.push(parent.extract::()?); } parent_map.insert(version_id, parents); } Ok(parent_map) }) } } impl Clone for WeaveFile { fn clone(&self) -> Self { Python::attach(|py| WeaveFile(self.0.clone_ref(py))) } } impl<'py> IntoPyObject<'py> for WeaveFile { type Target = PyAny; type Output = Bound<'py, Self::Target>; type Error = PyErr; fn into_pyobject(self, py: Python<'py>) -> Result { Ok(self.0.into_bound(py)) } } impl<'a, 'py> FromPyObject<'a, 'py> for WeaveFile { type Error = PyErr; fn extract(ob: Borrowed<'a, 'py, PyAny>) -> PyResult { Ok(WeaveFile(ob.to_owned().unbind())) } } pub struct Weave(Py); impl Weave { pub fn new(py_obj: Py) -> Self { Self(py_obj) } pub fn new_empty(py: Python) -> PyResult { let weave_mod = py.import("breezy.bzr.weave")?; let weave_cls = weave_mod.getattr("Weave")?; let obj = weave_cls.call0()?; Ok(Weave(obj.unbind())) } pub fn add_lines(&self, name: &str, parents: Vec<&str>, text: Vec<&str>) -> Result<(), Error> { Python::attach(|py| { let parents_list = pyo3::types::PyList::new(py, parents)?; // Convert text to bytes as required by weave let text_bytes: Vec<_> = text .iter() .map(|line| pyo3::types::PyBytes::new(py, line.as_bytes())) .collect(); let text_list = pyo3::types::PyList::new(py, text_bytes)?; self.0 .call_method1(py, "add_lines", (name, parents_list, text_list))?; Ok(()) }) } pub fn get_text(&self, name: &str) -> Result, Error> { Python::attach(|py| { let result = self.0.call_method1(py, "get_text", (name,))?; let bytes_result = result .cast_bound::(py) .map_err(|_| pyo3::exceptions::PyTypeError::new_err("Expected bytes"))?; let text = std::str::from_utf8(bytes_result.as_bytes()) .map_err(|_| pyo3::exceptions::PyValueError::new_err("Invalid UTF-8"))?; // Split into lines let lines: Vec = text.lines().map(|line| format!("{}\n", line)).collect(); Ok(lines) }) } pub fn get_ancestry(&self, names: Vec<&str>) -> Result, Error> { Python::attach(|py| { let names_list = pyo3::types::PyList::new(py, names)?; let result = self.0.call_method1(py, "get_ancestry", (names_list,))?; let ancestry_set = result .cast_bound::(py) .map_err(|_| pyo3::exceptions::PyTypeError::new_err("Expected set"))?; let mut ancestry = Vec::new(); for name in ancestry_set { ancestry.push(name.extract::()?); } Ok(ancestry) }) } pub fn numversions(&self) -> Result { Python::attach(|py| { let result = self.0.call_method0(py, "num_versions")?; Ok(result.extract::(py)?) }) } } impl Clone for Weave { fn clone(&self) -> Self { Python::attach(|py| Weave(self.0.clone_ref(py))) } } impl<'py> IntoPyObject<'py> for Weave { type Target = PyAny; type Output = Bound<'py, Self::Target>; type Error = PyErr; fn into_pyobject(self, py: Python<'py>) -> Result { Ok(self.0.into_bound(py)) } } impl<'a, 'py> FromPyObject<'a, 'py> for Weave { type Error = PyErr; fn extract(ob: Borrowed<'a, 'py, PyAny>) -> PyResult { Ok(Weave(ob.to_owned().unbind())) } } breezyshim-0.7.5/src/workingtree.rs000064400000000000000000001276241046102023000155270ustar 00000000000000//! Working trees in version control systems. //! //! This module provides functionality for working with working trees, which are //! local directories containing the files of a branch that can be edited. use crate::branch::{Branch, GenericBranch, PyBranch}; use crate::controldir::{ControlDir, GenericControlDir}; use crate::error::Error; use crate::tree::{MutableTree, PyMutableTree, PyTree, RevisionTree}; use crate::RevisionId; use pyo3::prelude::*; use std::path::{Path, PathBuf}; /// Trait representing a working tree in a version control system. /// /// A working tree is a local directory containing the files of a branch that can /// be edited. This trait provides methods for interacting with working trees /// across various version control systems. pub trait WorkingTree: MutableTree { /// Get the base directory path of this working tree. /// /// # Returns /// /// The absolute path to the root directory of this working tree. fn basedir(&self) -> PathBuf; /// Get the control directory for this working tree. /// /// # Returns /// /// The control directory containing this working tree. fn controldir( &self, ) -> Box< dyn ControlDir< Branch = GenericBranch, Repository = crate::repository::GenericRepository, WorkingTree = GenericWorkingTree, >, >; /// Get the branch associated with this working tree. /// /// # Returns /// /// The branch that this working tree is tracking. fn branch(&self) -> GenericBranch; /// Get the user-visible URL for this working tree. /// /// # Returns /// /// The URL that can be used to access this working tree. fn get_user_url(&self) -> url::Url; /// Check if this working tree supports setting the last revision. /// /// # Returns /// /// `true` if the working tree supports setting the last revision, `false` otherwise. fn supports_setting_file_ids(&self) -> bool; /// Add specified files to version control and the working tree. /// /// # Parameters /// /// * `files` - The list of file paths to add. /// /// # Returns /// /// `Ok(())` on success, or an error if the files could not be added. fn smart_add(&self, files: &[&Path]) -> Result<(), Error>; /// Update the working tree to a specific revision. /// /// # Parameters /// /// * `revision_id` - The revision to update to, or None for the latest. /// /// # Returns /// /// `Ok(())` on success, or an error if the update failed. fn update(&self, revision_id: Option<&RevisionId>) -> Result<(), Error>; /// Revert changes in the working tree. /// /// # Parameters /// /// * `filenames` - Optional list of specific files to revert. /// /// # Returns /// /// `Ok(())` on success, or an error if the revert failed. fn revert(&self, filenames: Option<&[&Path]>) -> Result<(), Error>; /// Create a commit builder for this working tree. /// /// # Returns /// /// A new CommitBuilder instance for this working tree. fn build_commit(&self) -> CommitBuilder; /// Get the basis tree for this working tree. /// /// # Returns /// /// The basis tree that this working tree is based on. fn basis_tree(&self) -> Result; /// Check if a path is a control filename in this working tree. /// /// Control filenames are filenames that are used by the version control system /// for its own purposes, like .git or .bzr. /// /// # Parameters /// /// * `path` - The path to check. /// /// # Returns /// /// `true` if the path is a control filename, `false` otherwise. fn is_control_filename(&self, path: &Path) -> bool; /// Get a revision tree for a specific revision. /// /// # Parameters /// /// * `revision_id` - The ID of the revision to get the tree for. /// /// # Returns /// /// The revision tree, or an error if it could not be retrieved. fn revision_tree(&self, revision_id: &RevisionId) -> Result, Error>; /// Convert a path to an absolute path relative to the working tree. /// /// # Parameters /// /// * `path` - The path to convert. /// /// # Returns /// /// The absolute path, or an error if the conversion failed. fn abspath(&self, path: &Path) -> Result; /// Convert an absolute path to a path relative to the working tree. /// /// # Parameters /// /// * `path` - The absolute path to convert. /// /// # Returns /// /// The relative path, or an error if the conversion failed. fn relpath(&self, path: &Path) -> Result; /// Pull changes from another branch into this working tree. /// /// # Parameters /// /// * `source` - The branch to pull from. /// * `overwrite` - Whether to overwrite diverged changes. /// * `stop_revision` - The revision to stop pulling at. /// * `local` - Whether to only pull locally accessible revisions. /// /// # Returns /// /// `Ok(())` on success, or an error if the pull could not be completed. fn pull( &self, source: &dyn Branch, overwrite: Option, stop_revision: Option<&RevisionId>, local: Option, ) -> Result<(), Error>; /// Merge changes from another branch into this working tree. /// /// # Parameters /// /// * `source` - The branch to merge from. /// * `to_revision` - The revision to merge up to. /// /// # Returns /// /// `Ok(())` on success, or an error if the merge could not be completed. fn merge_from_branch( &self, source: &dyn Branch, to_revision: Option<&RevisionId>, ) -> Result<(), Error>; /// Convert a list of files to relative paths safely. /// /// This function takes a list of file paths and converts them to paths relative /// to the working tree, with various safety checks. /// /// # Parameters /// /// * `file_list` - The list of file paths to convert. /// * `canonicalize` - Whether to canonicalize the paths first. /// * `apply_view` - Whether to apply the view (if any) to the paths. /// /// # Returns /// /// A list of converted paths, or an error if the conversion failed. fn safe_relpath_files( &self, file_list: &[&Path], canonicalize: bool, apply_view: bool, ) -> Result, Error>; /// Add conflicts to the working tree. fn add_conflicts(&self, conflicts: &[crate::tree::Conflict]) -> Result<(), Error>; /// Add a parent tree. fn add_parent_tree( &self, parent_id: &RevisionId, parent_tree: &crate::tree::RevisionTree, ) -> Result<(), Error>; /// Add a parent tree ID. fn add_parent_tree_id(&self, parent_id: &RevisionId) -> Result<(), Error>; /// Add a pending merge. fn add_pending_merge(&self, revision_id: &RevisionId) -> Result<(), Error>; /// Auto-resolve conflicts. fn auto_resolve(&self) -> Result<(), Error>; /// Check the state of the working tree. fn check_state(&self) -> Result<(), Error>; /// Get the canonical path for a file. fn get_canonical_path(&self, path: &Path) -> Result; /// Get canonical paths for multiple files. fn get_canonical_paths(&self, paths: &[&Path]) -> Result, Error>; /// Get the configuration stack. fn get_config_stack(&self) -> Result, Error>; /// Get reference information. fn get_reference_info(&self, path: &Path) -> Result, Error>; /// Get the shelf manager. fn get_shelf_manager(&self) -> Result, Error>; /// Get ignored files. fn ignored_files(&self) -> Result, Error>; /// Check if the working tree is locked. fn is_locked(&self) -> bool; /// Get merge-modified files. fn merge_modified(&self) -> Result, Error>; /// Move files within the working tree. fn move_files(&self, from_paths: &[&Path], to_dir: &Path) -> Result<(), Error>; /// Set conflicts in the working tree. fn set_conflicts(&self, conflicts: &[crate::tree::Conflict]) -> Result<(), Error>; /// Set the last revision. fn set_last_revision(&self, revision_id: &RevisionId) -> Result<(), Error>; /// Set merge-modified files. fn set_merge_modified(&self, files: &[&Path]) -> Result<(), Error>; /// Set pending merges. fn set_pending_merges(&self, revision_ids: &[RevisionId]) -> Result<(), Error>; /// Set reference information. fn set_reference_info( &self, path: &Path, location: &str, file_id: Option<&str>, ) -> Result<(), Error>; /// Subsume a tree into this working tree. fn subsume(&self, other: &dyn PyWorkingTree) -> Result<(), Error>; /// Store uncommitted changes. fn store_uncommitted(&self) -> Result; /// Restore uncommitted changes. fn restore_uncommitted(&self) -> Result<(), Error>; /// Extract the working tree to a directory. fn extract(&self, dest: &Path, format: Option<&str>) -> Result<(), Error>; /// Clone the working tree. fn clone( &self, dest: &Path, revision_id: Option<&RevisionId>, ) -> Result; /// Get a control transport. fn control_transport(&self) -> Result; /// Get the control URL. fn control_url(&self) -> url::Url; /// Copy content into this working tree. fn copy_content_into( &self, source: &dyn PyTree, revision_id: Option<&RevisionId>, ) -> Result<(), Error>; /// Flush any pending changes. fn flush(&self) -> Result<(), Error>; /// Check if the working tree requires a rich root. fn requires_rich_root(&self) -> bool; /// Reset the state of the working tree. fn reset_state(&self, revision_ids: Option<&[RevisionId]>) -> Result<(), Error>; /// Reference a parent tree. fn reference_parent( &self, path: &Path, branch: &dyn Branch, revision_id: Option<&RevisionId>, ) -> Result<(), Error>; /// Check if the working tree supports merge-modified tracking. fn supports_merge_modified(&self) -> bool; /// Break the lock on the working tree. fn break_lock(&self) -> Result<(), Error>; /// Get the physical lock status. fn get_physical_lock_status(&self) -> Result; } /// Trait for working trees that wrap Python working tree objects. /// /// This trait is implemented by working tree types that wrap Python working tree objects. pub trait PyWorkingTree: PyMutableTree + WorkingTree {} impl dyn PyWorkingTree { /// Get a reference to self as a WorkingTree trait object. pub fn as_working_tree(&self) -> &dyn WorkingTree { self } } impl WorkingTree for T { fn basedir(&self) -> PathBuf { Python::attach(|py| { let path: String = self .to_object(py) .getattr(py, "basedir") .unwrap() .extract(py) .unwrap(); PathBuf::from(path) }) } fn controldir( &self, ) -> Box< dyn ControlDir< Branch = GenericBranch, Repository = crate::repository::GenericRepository, WorkingTree = GenericWorkingTree, >, > { Python::attach(|py| { let controldir = self.to_object(py).getattr(py, "controldir").unwrap(); Box::new(GenericControlDir::new(controldir)) as Box< dyn ControlDir< Branch = GenericBranch, Repository = crate::repository::GenericRepository, WorkingTree = GenericWorkingTree, >, > }) } fn branch(&self) -> GenericBranch { Python::attach(|py| GenericBranch::from(self.to_object(py).getattr(py, "branch").unwrap())) } fn get_user_url(&self) -> url::Url { Python::attach(|py| { let url: String = self .to_object(py) .getattr(py, "user_url") .unwrap() .extract(py) .unwrap(); url.parse().unwrap() }) } fn supports_setting_file_ids(&self) -> bool { Python::attach(|py| { self.to_object(py) .call_method0(py, "supports_setting_file_ids") .unwrap() .extract(py) .unwrap() }) } fn smart_add(&self, files: &[&Path]) -> Result<(), Error> { Python::attach(|py| { let file_paths: Vec = files .iter() .map(|p| p.to_string_lossy().to_string()) .collect(); self.to_object(py) .call_method1(py, "smart_add", (file_paths,))?; Ok(()) }) } fn update(&self, revision_id: Option<&RevisionId>) -> Result<(), Error> { Python::attach(|py| { self.to_object(py) .call_method1(py, "update", (revision_id.cloned(),))?; Ok(()) }) } fn revert(&self, filenames: Option<&[&Path]>) -> Result<(), Error> { Python::attach(|py| { let file_paths = filenames.map(|files| { files .iter() .map(|p| p.to_string_lossy().to_string()) .collect::>() }); self.to_object(py) .call_method1(py, "revert", (file_paths,))?; Ok(()) }) } fn build_commit(&self) -> CommitBuilder { Python::attach(|py| CommitBuilder::from(GenericWorkingTree(self.to_object(py)))) } fn basis_tree(&self) -> Result { Python::attach(|py| { let basis_tree = self.to_object(py).call_method0(py, "basis_tree")?; Ok(RevisionTree(basis_tree)) }) } fn is_control_filename(&self, path: &Path) -> bool { Python::attach(|py| { self.to_object(py) .call_method1( py, "is_control_filename", (path.to_string_lossy().as_ref(),), ) .unwrap() .extract(py) .unwrap() }) } /// Get a revision tree for a specific revision. fn revision_tree(&self, revision_id: &RevisionId) -> Result, Error> { Python::attach(|py| { let tree = self.to_object(py).call_method1( py, "revision_tree", (revision_id.clone().into_pyobject(py).unwrap(),), )?; Ok(Box::new(RevisionTree(tree))) }) } /// Convert a path to an absolute path relative to the working tree. fn abspath(&self, path: &Path) -> Result { Python::attach(|py| { Ok(self .to_object(py) .call_method1(py, "abspath", (path.to_string_lossy().as_ref(),))? .extract(py)?) }) } /// Convert an absolute path to a path relative to the working tree. fn relpath(&self, path: &Path) -> Result { Python::attach(|py| { Ok(self .to_object(py) .call_method1(py, "relpath", (path.to_string_lossy().as_ref(),))? .extract(py)?) }) } /// Pull changes from another branch into this working tree. fn pull( &self, source: &dyn Branch, overwrite: Option, stop_revision: Option<&RevisionId>, local: Option, ) -> Result<(), Error> { Python::attach(|py| { let kwargs = { let kwargs = pyo3::types::PyDict::new(py); if let Some(overwrite) = overwrite { kwargs.set_item("overwrite", overwrite).unwrap(); } if let Some(stop_revision) = stop_revision { kwargs .set_item( "stop_revision", stop_revision.clone().into_pyobject(py).unwrap(), ) .unwrap(); } if let Some(local) = local { kwargs.set_item("local", local).unwrap(); } kwargs }; // Try to cast to a concrete type that implements PyBranch let py_obj = if let Some(generic_branch) = source.as_any().downcast_ref::() { generic_branch.to_object(py) } else if let Some(py_branch) = source .as_any() .downcast_ref::() { py_branch.to_object(py) } else { return Err(Error::Other( PyErr::new::( "Branch must be a PyBranch implementation for pull operation", ), )); }; self.to_object(py) .call_method(py, "pull", (py_obj,), Some(&kwargs))?; Ok(()) }) } /// Merge changes from another branch into this working tree. fn merge_from_branch( &self, source: &dyn Branch, to_revision: Option<&RevisionId>, ) -> Result<(), Error> { Python::attach(|py| { let kwargs = { let kwargs = pyo3::types::PyDict::new(py); if let Some(to_revision) = to_revision { kwargs .set_item( "to_revision", to_revision.clone().into_pyobject(py).unwrap(), ) .unwrap(); } kwargs }; // Try to cast to a concrete type that implements PyBranch let py_obj = if let Some(generic_branch) = source.as_any().downcast_ref::() { generic_branch.to_object(py) } else if let Some(py_branch) = source .as_any() .downcast_ref::() { py_branch.to_object(py) } else { return Err(Error::Other(PyErr::new::( "Branch must be a PyBranch implementation for merge_from_branch operation" ))); }; self.to_object(py) .call_method(py, "merge_from_branch", (py_obj,), Some(&kwargs))?; Ok(()) }) } /// Convert a list of files to relative paths safely. fn safe_relpath_files( &self, file_list: &[&Path], canonicalize: bool, apply_view: bool, ) -> Result, Error> { Python::attach(|py| { let result = self.to_object(py).call_method1( py, "safe_relpath_files", ( file_list .iter() .map(|x| x.to_string_lossy().to_string()) .collect::>(), canonicalize, apply_view, ), )?; Ok(result.extract(py)?) }) } fn add_conflicts(&self, conflicts: &[crate::tree::Conflict]) -> Result<(), Error> { Python::attach(|py| { let conflicts_py: Vec> = conflicts .iter() .map(|c| { let dict = pyo3::types::PyDict::new(py); dict.set_item("path", c.path.to_string_lossy().to_string()) .unwrap(); dict.set_item("typestring", &c.conflict_type).unwrap(); if let Some(ref msg) = c.message { dict.set_item("message", msg).unwrap(); } dict.into_any().unbind() }) .collect(); self.to_object(py) .call_method1(py, "add_conflicts", (conflicts_py,))?; Ok(()) }) } fn add_parent_tree( &self, parent_id: &RevisionId, parent_tree: &crate::tree::RevisionTree, ) -> Result<(), Error> { Python::attach(|py| { self.to_object(py).call_method1( py, "add_parent_tree", ( parent_id.clone().into_pyobject(py).unwrap(), parent_tree.to_object(py), ), )?; Ok(()) }) } fn add_parent_tree_id(&self, parent_id: &RevisionId) -> Result<(), Error> { Python::attach(|py| { self.to_object(py).call_method1( py, "add_parent_tree_id", (parent_id.clone().into_pyobject(py).unwrap(),), )?; Ok(()) }) } fn add_pending_merge(&self, revision_id: &RevisionId) -> Result<(), Error> { Python::attach(|py| { self.to_object(py).call_method1( py, "add_pending_merge", (revision_id.clone().into_pyobject(py).unwrap(),), )?; Ok(()) }) } fn auto_resolve(&self) -> Result<(), Error> { Python::attach(|py| { self.to_object(py).call_method0(py, "auto_resolve")?; Ok(()) }) } fn check_state(&self) -> Result<(), Error> { Python::attach(|py| { self.to_object(py).call_method0(py, "check_state")?; Ok(()) }) } fn get_canonical_path(&self, path: &Path) -> Result { Python::attach(|py| { Ok(self .to_object(py) .call_method1(py, "get_canonical_path", (path.to_string_lossy().as_ref(),))? .extract(py)?) }) } fn get_canonical_paths(&self, paths: &[&Path]) -> Result, Error> { Python::attach(|py| { let path_strings: Vec = paths .iter() .map(|p| p.to_string_lossy().to_string()) .collect(); Ok(self .to_object(py) .call_method1(py, "get_canonical_paths", (path_strings,))? .extract(py)?) }) } fn get_config_stack(&self) -> Result, Error> { Python::attach(|py| Ok(self.to_object(py).call_method0(py, "get_config_stack")?)) } fn get_reference_info(&self, path: &Path) -> Result, Error> { Python::attach(|py| { let result = self.to_object(py).call_method1( py, "get_reference_info", (path.to_string_lossy().as_ref(),), )?; if result.is_none(py) { Ok(None) } else { let tuple: (String, String) = result.extract(py)?; Ok(Some((tuple.0, PathBuf::from(tuple.1)))) } }) } fn get_shelf_manager(&self) -> Result, Error> { Python::attach(|py| Ok(self.to_object(py).call_method0(py, "get_shelf_manager")?)) } fn ignored_files(&self) -> Result, Error> { Python::attach(|py| { Ok(self .to_object(py) .call_method0(py, "ignored_files")? .extract(py)?) }) } fn is_locked(&self) -> bool { Python::attach(|py| { self.to_object(py) .call_method0(py, "is_locked") .unwrap() .extract(py) .unwrap() }) } fn merge_modified(&self) -> Result, Error> { Python::attach(|py| { Ok(self .to_object(py) .call_method0(py, "merge_modified")? .extract(py)?) }) } fn move_files(&self, from_paths: &[&Path], to_dir: &Path) -> Result<(), Error> { Python::attach(|py| { let from_strings: Vec = from_paths .iter() .map(|p| p.to_string_lossy().to_string()) .collect(); self.to_object(py).call_method1( py, "move", (from_strings, to_dir.to_string_lossy().as_ref()), )?; Ok(()) }) } fn set_conflicts(&self, conflicts: &[crate::tree::Conflict]) -> Result<(), Error> { Python::attach(|py| { let conflicts_py: Vec> = conflicts .iter() .map(|c| { let dict = pyo3::types::PyDict::new(py); dict.set_item("path", c.path.to_string_lossy().to_string()) .unwrap(); dict.set_item("typestring", &c.conflict_type).unwrap(); if let Some(ref msg) = c.message { dict.set_item("message", msg).unwrap(); } dict.into_any().unbind() }) .collect(); self.to_object(py) .call_method1(py, "set_conflicts", (conflicts_py,))?; Ok(()) }) } fn set_last_revision(&self, revision_id: &RevisionId) -> Result<(), Error> { Python::attach(|py| { self.to_object(py).call_method1( py, "set_last_revision", (revision_id.clone().into_pyobject(py).unwrap(),), )?; Ok(()) }) } fn set_merge_modified(&self, files: &[&Path]) -> Result<(), Error> { Python::attach(|py| { let file_strings: Vec = files .iter() .map(|p| p.to_string_lossy().to_string()) .collect(); self.to_object(py) .call_method1(py, "set_merge_modified", (file_strings,))?; Ok(()) }) } fn set_pending_merges(&self, revision_ids: &[RevisionId]) -> Result<(), Error> { Python::attach(|py| { let revision_ids_py: Vec> = revision_ids .iter() .map(|id| id.clone().into_pyobject(py).unwrap().unbind()) .collect(); self.to_object(py) .call_method1(py, "set_pending_merges", (revision_ids_py,))?; Ok(()) }) } fn set_reference_info( &self, path: &Path, location: &str, file_id: Option<&str>, ) -> Result<(), Error> { Python::attach(|py| { let kwargs = pyo3::types::PyDict::new(py); if let Some(file_id) = file_id { kwargs.set_item("file_id", file_id)?; } self.to_object(py).call_method( py, "set_reference_info", (path.to_string_lossy().as_ref(), location), Some(&kwargs), )?; Ok(()) }) } fn subsume(&self, other: &dyn PyWorkingTree) -> Result<(), Error> { Python::attach(|py| { self.to_object(py) .call_method1(py, "subsume", (other.to_object(py),))?; Ok(()) }) } fn store_uncommitted(&self) -> Result { Python::attach(|py| { Ok(self .to_object(py) .call_method0(py, "store_uncommitted")? .extract(py)?) }) } fn restore_uncommitted(&self) -> Result<(), Error> { Python::attach(|py| { self.to_object(py).call_method0(py, "restore_uncommitted")?; Ok(()) }) } fn extract(&self, dest: &Path, format: Option<&str>) -> Result<(), Error> { Python::attach(|py| { let kwargs = pyo3::types::PyDict::new(py); if let Some(format) = format { kwargs.set_item("format", format)?; } self.to_object(py).call_method( py, "extract", (dest.to_string_lossy().as_ref(),), Some(&kwargs), )?; Ok(()) }) } fn clone( &self, dest: &Path, revision_id: Option<&RevisionId>, ) -> Result { Python::attach(|py| { let kwargs = pyo3::types::PyDict::new(py); if let Some(revision_id) = revision_id { kwargs.set_item( "revision_id", revision_id.clone().into_pyobject(py).unwrap(), )?; } let result = self.to_object(py).call_method( py, "clone", (dest.to_string_lossy().as_ref(),), Some(&kwargs), )?; Ok(GenericWorkingTree(result)) }) } fn control_transport(&self) -> Result { Python::attach(|py| { let transport = self.to_object(py).getattr(py, "control_transport")?; Ok(crate::transport::Transport::new(transport)) }) } fn control_url(&self) -> url::Url { Python::attach(|py| { let url: String = self .to_object(py) .getattr(py, "control_url") .unwrap() .extract(py) .unwrap(); url.parse().unwrap() }) } fn copy_content_into( &self, source: &dyn PyTree, revision_id: Option<&RevisionId>, ) -> Result<(), Error> { Python::attach(|py| { let kwargs = pyo3::types::PyDict::new(py); if let Some(revision_id) = revision_id { kwargs.set_item( "revision_id", revision_id.clone().into_pyobject(py).unwrap(), )?; } self.to_object(py).call_method( py, "copy_content_into", (source.to_object(py),), Some(&kwargs), )?; Ok(()) }) } fn flush(&self) -> Result<(), Error> { Python::attach(|py| { self.to_object(py).call_method0(py, "flush")?; Ok(()) }) } fn requires_rich_root(&self) -> bool { Python::attach(|py| { self.to_object(py) .call_method0(py, "requires_rich_root") .unwrap() .extract(py) .unwrap() }) } fn reset_state(&self, revision_ids: Option<&[RevisionId]>) -> Result<(), Error> { Python::attach(|py| { let kwargs = pyo3::types::PyDict::new(py); if let Some(revision_ids) = revision_ids { let revision_ids_py: Vec> = revision_ids .iter() .map(|id| id.clone().into_pyobject(py).unwrap().unbind()) .collect(); kwargs.set_item("revision_ids", revision_ids_py)?; } self.to_object(py) .call_method(py, "reset_state", (), Some(&kwargs))?; Ok(()) }) } fn reference_parent( &self, path: &Path, branch: &dyn Branch, revision_id: Option<&RevisionId>, ) -> Result<(), Error> { Python::attach(|py| { let kwargs = pyo3::types::PyDict::new(py); if let Some(revision_id) = revision_id { kwargs.set_item( "revision_id", revision_id.clone().into_pyobject(py).unwrap(), )?; } // Try to cast to a concrete type that implements PyBranch let py_obj = if let Some(generic_branch) = branch.as_any().downcast_ref::() { generic_branch.to_object(py) } else if let Some(py_branch) = branch .as_any() .downcast_ref::() { py_branch.to_object(py) } else { return Err(Error::Other(PyErr::new::( "Branch must be a PyBranch implementation for reference_parent operation" ))); }; self.to_object(py).call_method( py, "reference_parent", (path.to_string_lossy().as_ref(), py_obj), Some(&kwargs), )?; Ok(()) }) } fn supports_merge_modified(&self) -> bool { Python::attach(|py| { self.to_object(py) .call_method0(py, "supports_merge_modified") .unwrap() .extract(py) .unwrap() }) } fn break_lock(&self) -> Result<(), Error> { Python::attach(|py| { self.to_object(py).call_method0(py, "break_lock")?; Ok(()) }) } fn get_physical_lock_status(&self) -> Result { Python::attach(|py| { Ok(self .to_object(py) .call_method0(py, "get_physical_lock_status")? .extract(py)?) }) } } /// A working tree in a version control system. /// /// A working tree is a local directory containing the files of a branch that can /// be edited. This struct wraps a Python working tree object and provides access /// to its functionality. pub struct GenericWorkingTree(pub Py); impl crate::tree::PyTree for GenericWorkingTree { fn to_object(&self, py: Python) -> Py { self.0.clone_ref(py) } } impl crate::tree::PyMutableTree for GenericWorkingTree {} impl PyWorkingTree for GenericWorkingTree {} impl Clone for GenericWorkingTree { fn clone(&self) -> Self { Python::attach(|py| GenericWorkingTree(self.0.clone_ref(py))) } } impl<'py> IntoPyObject<'py> for GenericWorkingTree { type Target = PyAny; type Output = Bound<'py, Self::Target>; type Error = std::convert::Infallible; fn into_pyobject(self, py: Python<'py>) -> Result { Ok(self.0.into_bound(py)) } } /// A builder for creating commits in a working tree. /// /// This struct provides a fluent interface for setting the parameters of a commit /// and then creating it. pub struct CommitBuilder(GenericWorkingTree, Py); impl From for CommitBuilder { /// Create a new CommitBuilder from a WorkingTree. /// /// # Parameters /// /// * `wt` - The working tree to create commits in. /// /// # Returns /// /// A new CommitBuilder instance. fn from(wt: GenericWorkingTree) -> Self { Python::attach(|py| { let kwargs = pyo3::types::PyDict::new(py); CommitBuilder(wt, kwargs.into()) }) } } impl CommitBuilder { /// Set the committer for this commit. /// /// # Parameters /// /// * `committer` - The committer's name and email. /// /// # Returns /// /// Self for method chaining. pub fn committer(self, committer: &str) -> Self { Python::attach(|py| { self.1.bind(py).set_item("committer", committer).unwrap(); }); self } /// Set the commit message. /// /// # Parameters /// /// * `message` - The commit message. /// /// # Returns /// /// Self for method chaining. pub fn message(self, message: &str) -> Self { Python::attach(|py| { self.1.bind(py).set_item("message", message).unwrap(); }); self } /// Specify which files to include in this commit. /// /// # Parameters /// /// * `specific_files` - The paths of files to include in this commit. /// /// # Returns /// /// Self for method chaining. pub fn specific_files(self, specific_files: &[&Path]) -> Self { let specific_files: Vec = specific_files .iter() .map(|x| x.to_string_lossy().to_string()) .collect(); Python::attach(|py| { self.1 .bind(py) .set_item("specific_files", specific_files) .unwrap(); }); self } /// Allow pointless commits. /// /// # Parameters /// /// * `allow_pointless` - Whether to allow commits that don't change any files. /// /// # Returns /// /// Self for method chaining. pub fn allow_pointless(self, allow_pointless: bool) -> Self { Python::attach(|py| { self.1 .bind(py) .set_item("allow_pointless", allow_pointless) .unwrap(); }); self } /// Set a reporter for this commit. /// /// # Parameters /// /// * `reporter` - The commit reporter to use. /// /// # Returns /// /// Self for method chaining. pub fn reporter(self, reporter: &dyn crate::commit::PyCommitReporter) -> Self { Python::attach(|py| { self.1 .bind(py) .set_item("reporter", reporter.to_object(py)) .unwrap(); }); self } /// Set the timestamp for this commit. /// /// # Parameters /// /// * `timestamp` - The timestamp for the commit. /// /// # Returns /// /// Self for method chaining. pub fn timestamp(self, timestamp: f64) -> Self { Python::attach(|py| { self.1.bind(py).set_item("timestamp", timestamp).unwrap(); }); self } /// Set a revision property for this commit. /// /// Revision properties are key-value pairs that can be attached to commits /// to store additional metadata beyond the standard commit fields. /// /// # Parameters /// /// * `key` - The property key (name). /// * `value` - The property value as a string. /// /// # Returns /// /// Self for method chaining, or an error if the operation failed. pub fn set_revprop(self, key: &str, value: &str) -> Result { Python::attach(|py| { // Get or create the revprops dictionary if self.1.bind(py).get_item("revprops")?.is_none() { let new_revprops = pyo3::types::PyDict::new(py); self.1.bind(py).set_item("revprops", new_revprops)?; } // Now get the revprops dictionary and set the property value let revprops = self.1.bind(py).get_item("revprops")?.ok_or_else(|| { Error::Other(pyo3::PyErr::new::( "revprops should exist after setting it", )) })?; let revprops_dict = revprops.cast::().map_err(|_| { Error::Other(pyo3::PyErr::new::( "revprops is not a dictionary", )) })?; revprops_dict.set_item(key, value)?; Ok(self) }) } /// Create the commit. /// /// # Returns /// /// The revision ID of the new commit, or an error if the commit could not be created. pub fn commit(self) -> Result { Python::attach(|py| { Ok(self .0 .to_object(py) .call_method(py, "commit", (), Some(self.1.bind(py)))? .extract(py) .unwrap()) }) } } impl GenericWorkingTree { /// Open a working tree at the specified path. /// /// This method is deprecated, use the module-level `open` function instead. /// /// # Parameters /// /// * `path` - The path to the working tree. /// /// # Returns /// /// The working tree, or an error if it could not be opened. #[deprecated = "Use ::open instead"] pub fn open(path: &Path) -> Result { open(path) } /// Open a working tree containing the specified path. /// /// This method is deprecated, use the module-level `open_containing` function instead. /// /// # Parameters /// /// * `path` - The path to look for a containing working tree. /// /// # Returns /// /// A tuple containing the working tree and the relative path, or an error /// if no containing working tree could be found. #[deprecated = "Use ::open_containing instead"] pub fn open_containing(path: &Path) -> Result<(GenericWorkingTree, PathBuf), Error> { open_containing(path) } /// Create a commit with the specified parameters. /// /// This method is deprecated, use the `build_commit` method instead. /// /// # Parameters /// /// * `message` - The commit message. /// * `allow_pointless` - Whether to allow commits that don't change any files. /// * `committer` - The committer's name and email. /// * `specific_files` - The paths of files to include in this commit. /// /// # Returns /// /// The revision ID of the new commit, or an error if the commit could not be created. #[deprecated = "Use build_commit instead"] pub fn commit( &self, message: &str, committer: Option<&str>, timestamp: Option, allow_pointless: Option, specific_files: Option<&[&Path]>, ) -> Result { let mut builder = self.build_commit().message(message); if let Some(specific_files) = specific_files { builder = builder.specific_files(specific_files); } if let Some(allow_pointless) = allow_pointless { builder = builder.allow_pointless(allow_pointless); } if let Some(committer) = committer { builder = builder.committer(committer); } if let Some(timestamp) = timestamp { builder = builder.timestamp(timestamp); } builder.commit() } } /// Open a working tree at the specified path. /// /// # Parameters /// /// * `path` - The path of the working tree to open. /// /// # Returns /// /// The working tree, or an error if it could not be opened. pub fn open(path: &Path) -> Result { Python::attach(|py| { let m = py.import("breezy.workingtree")?; let c = m.getattr("WorkingTree")?; let wt = c.call_method1("open", (path.to_string_lossy().to_string(),))?; Ok(GenericWorkingTree(wt.unbind())) }) } /// Open a working tree containing the specified path. /// /// This function searches for a working tree containing the specified path /// and returns both the working tree and the path relative to the working tree. /// /// # Parameters /// /// * `path` - The path to look for a containing working tree. /// /// # Returns /// /// A tuple containing the working tree and the relative path, or an error /// if no containing working tree could be found. pub fn open_containing(path: &Path) -> Result<(GenericWorkingTree, PathBuf), Error> { Python::attach(|py| { let m = py.import("breezy.workingtree")?; let c = m.getattr("WorkingTree")?; let (wt, p): (Bound, String) = c .call_method1("open_containing", (path.to_string_lossy(),))? .extract()?; Ok((GenericWorkingTree(wt.unbind()), PathBuf::from(p))) }) } /// Implementation of From> for GenericWorkingTree. impl From> for GenericWorkingTree { /// Create a new WorkingTree from a Python object. /// /// # Parameters /// /// * `obj` - The Python object representing a working tree. /// /// # Returns /// /// A new WorkingTree instance. fn from(obj: Py) -> Self { GenericWorkingTree(obj) } } breezyshim-0.7.5/src/workspace.rs000064400000000000000000000160071046102023000151550ustar 00000000000000//! Convenience functions for automated operations on a VCS tree #[cfg(feature = "dirty-tracker")] use crate::dirty_tracker::{DirtyTreeTracker, State as DirtyTrackerState}; use crate::error::Error; use crate::tree::PyTree; use crate::workingtree::PyWorkingTree; use pyo3::prelude::*; /// Reset a tree with a dirty tracker. /// /// This function resets a working tree to match a basis tree, but only if the /// dirty tracker indicates that the tree is dirty. If the tree is clean, the /// function does nothing. /// /// # Parameters /// /// * `local_tree` - The working tree to reset. /// * `basis_tree` - The basis tree to reset to, or None to use the working tree's basis tree. /// * `subpath` - The path within the tree to reset, or None to reset the entire tree. /// * `dirty_tracker` - The dirty tracker to use, or None to ignore dirty tracking. /// /// # Returns /// /// `Ok(())` on success, or an error if the tree could not be reset. #[cfg(feature = "dirty-tracker")] pub fn reset_tree_with_dirty_tracker( local_tree: &dyn PyWorkingTree, basis_tree: Option<&dyn PyTree>, subpath: Option<&std::path::Path>, dirty_tracker: Option<&mut DirtyTreeTracker>, ) -> Result<(), Error> { if let Some(dirty_tracker) = dirty_tracker { if dirty_tracker.state() == DirtyTrackerState::Clean { return Ok(()); } // TODO: Only reset those files that are dirty } reset_tree(local_tree, basis_tree, subpath) } /// Reset a tree to match a basis tree. /// /// This function resets a working tree to match a basis tree, discarding any /// uncommitted changes in the working tree. /// /// # Parameters /// /// * `local_tree` - The working tree to reset. /// * `basis_tree` - The basis tree to reset to, or None to use the working tree's basis tree. /// * `subpath` - The path within the tree to reset, or None to reset the entire tree. /// /// # Returns /// /// `Ok(())` on success, or an error if the tree could not be reset. pub fn reset_tree( local_tree: &dyn PyWorkingTree, basis_tree: Option<&dyn PyTree>, subpath: Option<&std::path::Path>, ) -> Result<(), Error> { // Lock the tree before resetting let lock = local_tree.lock_write()?; let result = Python::attach(|py| { let workspace_m = py.import("breezy.workspace")?; let reset_tree = workspace_m.getattr("reset_tree")?; let local_tree: Py = local_tree.to_object(py); let basis_tree: Option> = basis_tree.map(|o| o.to_object(py)); // Breezy uses forward slashes internally, even on Windows let subpath_str = subpath.map(|p| p.to_string_lossy().replace('\\', "/")); reset_tree.call1((local_tree, basis_tree, subpath_str))?; Ok(()) }); drop(lock); result } /// Check if a tree is clean. /// /// This function checks if a working tree is clean, meaning it has no uncommitted /// changes compared to a basis tree. /// /// # Parameters /// /// * `local_tree` - The working tree to check. /// * `basis_tree` - The basis tree to compare against. /// * `subpath` - The path within the tree to check. /// /// # Returns /// /// `Ok(())` if the tree is clean, or an error if the tree is dirty or the check failed. pub fn check_clean_tree( local_tree: &dyn PyWorkingTree, basis_tree: &dyn PyTree, subpath: &std::path::Path, ) -> Result<(), Error> { // Lock the tree before checking let lock = local_tree.lock_read()?; let result = Python::attach(|py| { let workspace_m = py.import("breezy.workspace")?; let check_clean_tree = workspace_m.getattr("check_clean_tree")?; let local_tree: Py = local_tree.to_object(py).clone_ref(py); let basis_tree: Py = basis_tree.to_object(py).clone_ref(py); // Breezy uses forward slashes internally, even on Windows let subpath_str = subpath.to_string_lossy().replace('\\', "/"); check_clean_tree.call1((local_tree, basis_tree, subpath_str))?; Ok(()) }); drop(lock); result } #[cfg(test)] mod tests { use super::*; use crate::controldir::create_standalone_workingtree; use crate::tree::MutableTree; use crate::workingtree::WorkingTree; use std::path::Path; #[test] fn test_reset_tree() { let tmp_dir = tempfile::tempdir().unwrap(); let wt = create_standalone_workingtree(tmp_dir.path(), "2a").unwrap(); let basis_tree = wt.basis_tree().unwrap(); let result = reset_tree(&wt, Some(&basis_tree), None); assert!(result.is_ok()); } #[test] fn test_reset_tree_no_basis() { let tmp_dir = tempfile::tempdir().unwrap(); let wt = create_standalone_workingtree(tmp_dir.path(), "2a").unwrap(); let result = reset_tree(&wt, None, None); assert!(result.is_ok()); } #[test] #[serial_test::serial] #[cfg_attr( windows, ignore = "Breezy raises OSError errno 13 (ERROR_INVALID_DATA) on Windows when calling reset_tree with subpath - likely a Breezy bug" )] fn test_reset_tree_with_subpath() { let env = crate::testing::TestEnv::new(); let wt = create_standalone_workingtree(&env.working_dir, "2a").unwrap(); // Create a subdir in the working tree std::fs::create_dir("subdir").unwrap(); std::fs::write("subdir/file.txt", "content").unwrap(); wt.add(&[Path::new("subdir")]).unwrap(); wt.add(&[Path::new("subdir/file.txt")]).unwrap(); wt.build_commit().message("Add subdir").commit().unwrap(); let basis_tree = wt.basis_tree().unwrap(); let subpath = Path::new("subdir"); let result = reset_tree(&wt, Some(&basis_tree), Some(subpath)); if let Err(e) = &result { eprintln!("reset_tree failed with error: {:?}", e); } assert!(result.is_ok()); std::mem::drop(env); // Ensure the test environment is cleaned up } #[test] fn test_check_clean_tree() { let tmp_dir = tempfile::tempdir().unwrap(); let wt = create_standalone_workingtree(tmp_dir.path(), "2a").unwrap(); // Add and commit some content first std::fs::write(tmp_dir.path().join("file.txt"), "content").unwrap(); wt.add(&[Path::new("file.txt")]).unwrap(); wt.build_commit() .message("Initial commit") .commit() .unwrap(); let basis_tree = wt.basis_tree().unwrap(); let subpath = Path::new(""); let result = check_clean_tree(&wt, &basis_tree, subpath); assert!(result.is_ok()); } #[cfg(feature = "dirty-tracker")] #[test] fn test_reset_tree_with_dirty_tracker() { use crate::dirty_tracker::DirtyTreeTracker; let tmp_dir = tempfile::tempdir().unwrap(); let wt = create_standalone_workingtree(tmp_dir.path(), "2a").unwrap(); let basis_tree = wt.basis_tree().unwrap(); let mut dirty_tracker = DirtyTreeTracker::new(Clone::clone(&wt)); let result = reset_tree_with_dirty_tracker(&wt, Some(&basis_tree), None, Some(&mut dirty_tracker)); assert!(result.is_ok()); } }