diff --git a/Cargo.lock b/Cargo.lock index 9c1dd825..507169a4 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2,6 +2,21 @@ # It is not intended for manual editing. version = 4 +[[package]] +name = "addr2line" +version = "0.25.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b5d307320b3181d6d7954e663bd7c774a838b8220fe0593c86d9fb09f498b4b" +dependencies = [ + "gimli", +] + +[[package]] +name = "adler2" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "320119579fcad9c21884f5c4861d16174d0e06250625266f50fe6898340abefa" + [[package]] name = "aho-corasick" version = "1.1.4" @@ -72,9 +87,9 @@ dependencies = [ [[package]] name = "assert_cmd" -version = "2.1.1" +version = "2.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bcbb6924530aa9e0432442af08bbcafdad182db80d2e560da42a6d442535bf85" +checksum = "9c5bcfa8749ac45dd12cb11055aeeb6b27a3895560d60d71e3c23bf979e60514" dependencies = [ "anstyle", "bstr", @@ -102,6 +117,30 @@ version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8" +[[package]] +name = "backtrace" +version = "0.3.76" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bb531853791a215d7c62a30daf0dde835f381ab5de4589cfe7c649d2cbe92bd6" +dependencies = [ + "addr2line", + "cfg-if", + "libc", + "miniz_oxide", + "object", + "rustc-demangle", + "windows-link", +] + +[[package]] +name = "backtrace-ext" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "537beee3be4a18fb023b570f80e3ae28003db9167a751266b259926e25539d50" +dependencies = [ + "backtrace", +] + [[package]] name = "bender" version = "0.29.1" @@ -116,10 +155,14 @@ dependencies = [ "futures", "glob", "indexmap", + "indicatif", "is-terminal", "itertools", + "miette", + "owo-colors", "pathdiff", "pretty_assertions", + "regex", "semver", "serde", "serde_json", @@ -128,6 +171,7 @@ dependencies = [ "tabwriter", "tempfile", "tera", + "thiserror", "tokio", "typed-arena", "walkdir", @@ -170,9 +214,9 @@ dependencies = [ [[package]] name = "bumpalo" -version = "3.19.0" +version = "3.19.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "46c5e41b57b8bba42a04676d81cb89e9ee8e859a1a66f80a5a72e1cb76b34d43" +checksum = "5dd9dc738b7a8311c7ade152424974d8115f2cdad61e8dab8dac9f2362298510" [[package]] name = "bytes" @@ -182,9 +226,9 @@ checksum = "b35204fbdc0b3f4446b89fc1ac2cf84a8a68971995d0bf2e925ec7cd960f9cb3" [[package]] name = "cc" -version = "1.2.47" +version = "1.2.52" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cd405d82c84ff7f35739f175f67d8b9fb7687a0e84ccdc78bd3568839827cf07" +checksum = "cd4932aefd12402b36c60956a4fe0035421f544799057659ff86f923657aada3" dependencies = [ "find-msvc-tools", "shlex", @@ -253,9 +297,9 @@ dependencies = [ [[package]] name = "clap_complete" -version = "4.5.61" +version = "4.5.65" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "39615915e2ece2550c0149addac32fb5bd312c657f43845bb9088cb9c8a7c992" +checksum = "430b4dc2b5e3861848de79627b2bedc9f3342c7da5173a14eaa5d0f8dc18ae5d" dependencies = [ "clap", ] @@ -274,9 +318,9 @@ dependencies = [ [[package]] name = "clap_lex" -version = "0.7.6" +version = "0.7.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1d728cc89cf3aee9ff92b05e62b19ee65a02b5702cff7d5a377e32c6ae29d8d" +checksum = "c3e64b0cc0439b12df2fa678eae89a1c56a529fd067a9115f7827f1fffd22b32" [[package]] name = "colorchoice" @@ -284,6 +328,19 @@ version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b05b61dc5112cbb17e4b6cd61790d9845d13888356391624cbe7e41efeac1e75" +[[package]] +name = "console" +version = "0.16.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "03e45a4a8926227e4197636ba97a9fc9b00477e9f4bd711395687c5f0734bec4" +dependencies = [ + "encode_unicode", + "libc", + "once_cell", + "unicode-width 0.2.2", + "windows-sys 0.61.2", +] + [[package]] name = "core-foundation-sys" version = "0.8.7" @@ -396,6 +453,12 @@ version = "1.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719" +[[package]] +name = "encode_unicode" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34aa73646ffb006b8f5147f3dc182bd4bcb190227ce861fc4a4844bf8e3cb2c0" + [[package]] name = "equivalent" version = "1.0.2" @@ -420,9 +483,9 @@ checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" [[package]] name = "find-msvc-tools" -version = "0.1.5" +version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3a3076410a55c90011c298b04d0cfa770b00fa04e1e3c97d3f6c9de105a03844" +checksum = "f449e6c6c08c865631d4890cfacf252b3d396c9bcc83adb6623cdb02a8336c41" [[package]] name = "futures" @@ -525,9 +588,9 @@ dependencies = [ [[package]] name = "getrandom" -version = "0.2.16" +version = "0.2.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "335ff9f135e4384c8150d6f27c6daed433577f86b4750418338c01a1a2528592" +checksum = "ff2abc00be7fca6ebc474524697ae276ad847ad0a6b3faa4bcb027e9a4614ad0" dependencies = [ "cfg-if", "libc", @@ -546,6 +609,12 @@ dependencies = [ "wasip2", ] +[[package]] +name = "gimli" +version = "0.32.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e629b9b98ef3dd8afe6ca2bd0f89306cec16d43d907889945bc5d6687f2f13c7" + [[package]] name = "glob" version = "0.3.3" @@ -645,9 +714,9 @@ dependencies = [ [[package]] name = "indexmap" -version = "2.12.1" +version = "2.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0ad4bb2b565bca0645f4d68c5c9af97fba094e9791da685bf83cb5f3ce74acf2" +checksum = "7714e70437a7dc3ac8eb7e6f8df75fd8eb422675fc7678aff7364301092b1017" dependencies = [ "equivalent", "hashbrown", @@ -655,6 +724,19 @@ dependencies = [ "serde_core", ] +[[package]] +name = "indicatif" +version = "0.18.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9375e112e4b463ec1b1c6c011953545c65a30164fbab5b581df32b3abf0dcb88" +dependencies = [ + "console", + "portable-atomic", + "unicode-width 0.2.2", + "unit-prefix", + "web-time", +] + [[package]] name = "is-terminal" version = "0.4.17" @@ -666,6 +748,12 @@ dependencies = [ "windows-sys 0.60.2", ] +[[package]] +name = "is_ci" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7655c9839580ee829dfacba1d1278c2b7883e50a277ff7541299489d6bdfdc45" + [[package]] name = "is_terminal_polyfill" version = "1.70.2" @@ -683,15 +771,15 @@ dependencies = [ [[package]] name = "itoa" -version = "1.0.15" +version = "1.0.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c" +checksum = "92ecc6618181def0457392ccd0ee51198e065e016d1d527a7ac1b6dc7c1f09d2" [[package]] name = "js-sys" -version = "0.3.82" +version = "0.3.83" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b011eec8cc36da2aab2d5cff675ec18454fad408585853910a202391cf9f8e65" +checksum = "464a3709c7f55f1f721e5389aa6ea4e3bc6aba669353300af094b29ffbdde1d8" dependencies = [ "once_cell", "wasm-bindgen", @@ -705,9 +793,9 @@ checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" [[package]] name = "libc" -version = "0.2.177" +version = "0.2.180" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2874a2af47a2325c2001a6e6fad9b16a53b802102b528163885171cf92b15976" +checksum = "bcc35a38544a891a5f7c865aca548a982ccb3b8650a5b06d0fd33a10283c56fc" [[package]] name = "libm" @@ -717,9 +805,9 @@ checksum = "f9fbbcab51052fe104eb5e5d351cf728d30a5be1fe14d9be8a3b097481fb97de" [[package]] name = "libredox" -version = "0.1.10" +version = "0.1.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "416f7e718bdb06000964960ffa43b4335ad4012ae8b99060261aa4a8088d5ccb" +checksum = "3d0b95e02c851351f877147b7deea7b1afb1df71b63aa5f8270716e0c5720616" dependencies = [ "bitflags", "libc", @@ -742,9 +830,9 @@ dependencies = [ [[package]] name = "log" -version = "0.4.28" +version = "0.4.29" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34080505efa8e45a4b816c349525ebe327ceaa8559756f0356cba97ef3bf7432" +checksum = "5e5032e24019045c762d3c0f28f5b6b8bbf38563a65908389bf7978758920897" [[package]] name = "memchr" @@ -752,11 +840,50 @@ version = "2.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f52b00d39961fc5b2736ea853c9cc86238e165017a493d1d5c8eac6bdc4cc273" +[[package]] +name = "miette" +version = "7.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f98efec8807c63c752b5bd61f862c165c115b0a35685bdcfd9238c7aeb592b7" +dependencies = [ + "backtrace", + "backtrace-ext", + "cfg-if", + "miette-derive", + "owo-colors", + "supports-color", + "supports-hyperlinks", + "supports-unicode", + "terminal_size", + "textwrap", + "unicode-width 0.1.14", +] + +[[package]] +name = "miette-derive" +version = "7.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db5b29714e950dbb20d5e6f74f9dcec4edbcc1067bb7f8ed198c097b8c1a818b" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "miniz_oxide" +version = "0.8.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fa76a2c86f704bdb222d66965fb3d63269ce38518b83cb0575fca855ebb6316" +dependencies = [ + "adler2", +] + [[package]] name = "mio" -version = "1.1.0" +version = "1.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "69d83b0086dc8ecf3ce9ae2874b2d1290252e2a30720bea58a5c6639b0092873" +checksum = "a69bcab0ad47271a0234d9422b131806bf3968021e5dc9328caf2d4cd58557fc" dependencies = [ "libc", "wasi", @@ -772,6 +899,15 @@ dependencies = [ "autocfg", ] +[[package]] +name = "object" +version = "0.37.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff76201f031d8863c38aa7f905eca4f53abbfa15f609db4277d44cd8938f33fe" +dependencies = [ + "memchr", +] + [[package]] name = "once_cell" version = "1.21.3" @@ -790,6 +926,12 @@ version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "04744f49eae99ab78e0d5c0b603ab218f515ea8cfe5a456d7629ad883a3b6e7d" +[[package]] +name = "owo-colors" +version = "4.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c6901729fa79e91a0913333229e9ca5dc725089d1c363b2f4b4760709dc4a52" + [[package]] name = "parking_lot" version = "0.12.5" @@ -836,9 +978,9 @@ checksum = "9b4f627cb1b25917193a259e49bdad08f671f8d9708acfd5fe0a8c1455d87220" [[package]] name = "pest" -version = "2.8.4" +version = "2.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cbcfd20a6d4eeba40179f05735784ad32bdaef05ce8e8af05f180d45bb3e7e22" +checksum = "2c9eb05c21a464ea704b53158d358a31e6425db2f63a1a7312268b05fe2b75f7" dependencies = [ "memchr", "ucd-trie", @@ -846,9 +988,9 @@ dependencies = [ [[package]] name = "pest_derive" -version = "2.8.4" +version = "2.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "51f72981ade67b1ca6adc26ec221be9f463f2b5839c7508998daa17c23d94d7f" +checksum = "68f9dbced329c441fa79d80472764b1a2c7e57123553b8519b36663a2fb234ed" dependencies = [ "pest", "pest_generator", @@ -856,9 +998,9 @@ dependencies = [ [[package]] name = "pest_generator" -version = "2.8.4" +version = "2.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dee9efd8cdb50d719a80088b76f81aec7c41ed6d522ee750178f83883d271625" +checksum = "3bb96d5051a78f44f43c8f712d8e810adb0ebf923fc9ed2655a7f66f63ba8ee5" dependencies = [ "pest", "pest_meta", @@ -869,9 +1011,9 @@ dependencies = [ [[package]] name = "pest_meta" -version = "2.8.4" +version = "2.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bf1d70880e76bdc13ba52eafa6239ce793d85c8e43896507e43dd8984ff05b82" +checksum = "602113b5b5e8621770cfd490cfd90b9f84ab29bd2b0e49ad83eb6d186cef2365" dependencies = [ "pest", "sha2", @@ -927,6 +1069,12 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" +[[package]] +name = "portable-atomic" +version = "1.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f89776e4d69bb58bc6993e99ffa1d11f228b839984854c7daeb5d37f87cbe950" + [[package]] name = "ppv-lite86" version = "0.2.21" @@ -975,18 +1123,18 @@ dependencies = [ [[package]] name = "proc-macro2" -version = "1.0.103" +version = "1.0.105" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ee95bc4ef87b8d5ba32e8b7714ccc834865276eab0aed5c9958d00ec45f49e8" +checksum = "535d180e0ecab6268a3e718bb9fd44db66bbbc256257165fc699dadf70d16fe7" dependencies = [ "unicode-ident", ] [[package]] name = "quote" -version = "1.0.42" +version = "1.0.43" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a338cc41d27e6cc6dce6cefc13a0729dfbb81c262b1f519331575dd80ef3067f" +checksum = "dc74d9a594b72ae6656596548f56f667211f8a97b3d4c3d467150794690dc40a" dependencies = [ "proc-macro2", ] @@ -1024,7 +1172,7 @@ version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" dependencies = [ - "getrandom 0.2.16", + "getrandom 0.2.17", ] [[package]] @@ -1042,7 +1190,7 @@ version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a4e608c6638b9c18977b00b475ac1f28d14e84b27d8d42f70e0bf1e3dec127ac" dependencies = [ - "getrandom 0.2.16", + "getrandom 0.2.17", "libredox", "thiserror", ] @@ -1076,11 +1224,17 @@ version = "0.8.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7a2d987857b319362043e95f5353c0535c1f58eec5336fdfcf626430af7def58" +[[package]] +name = "rustc-demangle" +version = "0.1.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56f7d92ca342cea22a06f2121d944b4fd82af56988c270852495420f961d4ace" + [[package]] name = "rustix" -version = "1.1.2" +version = "1.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cd15f8a2c5551a84d56efdc1cd049089e409ac19a3072d5037a17fd70719ff3e" +checksum = "146c9e247ccc180c1f61615433868c99f3de3ae256a30a43b49f67c2d9171f34" dependencies = [ "bitflags", "errno", @@ -1097,9 +1251,9 @@ checksum = "b39cdef0fa800fc44525c84ccb54a029961a8215f9619753635a9c0d2538d46d" [[package]] name = "ryu" -version = "1.0.20" +version = "1.0.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f" +checksum = "a50f4cf475b65d88e057964e0e9bb1f0aa9bbb2036dc65c64596b42932536984" [[package]] name = "same-file" @@ -1158,15 +1312,15 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.145" +version = "1.0.149" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "402a6f66d8c709116cf22f558eab210f5a50187f702eb4d7e5ef38d9a7f1c79c" +checksum = "83fc039473c5595ace860d8c4fafa220ff474b3fc6bfdb4293327f1a37e94d86" dependencies = [ "itoa", "memchr", - "ryu", "serde", "serde_core", + "zmij", ] [[package]] @@ -1201,10 +1355,11 @@ checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" [[package]] name = "signal-hook-registry" -version = "1.4.7" +version = "1.4.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7664a098b8e616bdfcc2dc0e9ac44eb231eedf41db4e9fe95d8d32ec728dedad" +checksum = "c4db69cba1110affc0e9f7bcd48bbf87b3f4fc7c61fc9155afd4c469eb3d6c1b" dependencies = [ + "errno", "libc", ] @@ -1268,11 +1423,32 @@ version = "2.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" +[[package]] +name = "supports-color" +version = "3.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c64fc7232dd8d2e4ac5ce4ef302b1d81e0b80d055b9d77c7c4f51f6aa4c867d6" +dependencies = [ + "is_ci", +] + +[[package]] +name = "supports-hyperlinks" +version = "3.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e396b6523b11ccb83120b115a0b7366de372751aa6edf19844dfb13a6af97e91" + +[[package]] +name = "supports-unicode" +version = "3.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b7401a30af6cb5818bb64852270bb722533397edcfc7344954a38f420819ece2" + [[package]] name = "syn" -version = "2.0.111" +version = "2.0.114" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "390cc9a294ab71bdb1aa2e99d13be9c753cd2d7bd6560c77118597410c4d2e87" +checksum = "d4d107df263a3013ef9b1879b0df87d706ff80f65a86ea879bd9c31f9b307c2a" dependencies = [ "proc-macro2", "quote", @@ -1290,9 +1466,9 @@ dependencies = [ [[package]] name = "tempfile" -version = "3.23.0" +version = "3.24.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2d31c77bdf42a745371d260a26ca7163f1e0924b64afa0b688e61b5a9fa02f16" +checksum = "655da9c7eb6305c55742045d5a8d2037996d61d8de95806335c7c86ce0f82e9c" dependencies = [ "fastrand", "getrandom 0.3.4", @@ -1323,12 +1499,32 @@ dependencies = [ "unicode-segmentation", ] +[[package]] +name = "terminal_size" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "60b8cb979cb11c32ce1603f8137b22262a9d131aaa5c37b5678025f22b8becd0" +dependencies = [ + "rustix", + "windows-sys 0.60.2", +] + [[package]] name = "termtree" version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8f50febec83f5ee1df3015341d8bd429f2d1cc62bcba7ea2076759d315084683" +[[package]] +name = "textwrap" +version = "0.16.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c13547615a44dc9c452a8a534638acdf07120d4b6847c8178705da06306a3057" +dependencies = [ + "unicode-linebreak", + "unicode-width 0.2.2", +] + [[package]] name = "thiserror" version = "2.0.17" @@ -1351,9 +1547,9 @@ dependencies = [ [[package]] name = "tokio" -version = "1.48.0" +version = "1.49.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ff360e02eab121e0bc37a2d3b4d4dc622e6eda3a8e5253d5435ecf5bd4c68408" +checksum = "72a2903cd7736441aac9df9d7688bd0ce48edccaadf181c3b90be801e81d3d86" dependencies = [ "bytes", "libc", @@ -1401,6 +1597,12 @@ version = "1.0.22" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9312f7c4f6ff9069b165498234ce8be658059c6728633667c526e27dc2cf1df5" +[[package]] +name = "unicode-linebreak" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b09c83c3c29d37506a3e260c08c03743a6bb66a9cd432c6934ab501a190571f" + [[package]] name = "unicode-segmentation" version = "1.12.0" @@ -1419,6 +1621,12 @@ version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b4ac048d71ede7ee76d585517add45da530660ef4390e49b098733c6e897f254" +[[package]] +name = "unit-prefix" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "81e544489bf3d8ef66c953931f56617f423cd4b5494be343d9b9d3dda037b9a3" + [[package]] name = "unsafe-libyaml" version = "0.2.11" @@ -1473,9 +1681,9 @@ dependencies = [ [[package]] name = "wasm-bindgen" -version = "0.2.105" +version = "0.2.106" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da95793dfc411fbbd93f5be7715b0578ec61fe87cb1a42b12eb625caa5c5ea60" +checksum = "0d759f433fa64a2d763d1340820e46e111a7a5ab75f993d1852d70b03dbb80fd" dependencies = [ "cfg-if", "once_cell", @@ -1486,9 +1694,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro" -version = "0.2.105" +version = "0.2.106" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "04264334509e04a7bf8690f2384ef5265f05143a4bff3889ab7a3269adab59c2" +checksum = "48cb0d2638f8baedbc542ed444afc0644a29166f1595371af4fecf8ce1e7eeb3" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -1496,9 +1704,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.105" +version = "0.2.106" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "420bc339d9f322e562942d52e115d57e950d12d88983a14c79b86859ee6c7ebc" +checksum = "cefb59d5cd5f92d9dcf80e4683949f15ca4b511f4ac0a6e14d4e1ac60c6ecd40" dependencies = [ "bumpalo", "proc-macro2", @@ -1509,13 +1717,23 @@ dependencies = [ [[package]] name = "wasm-bindgen-shared" -version = "0.2.105" +version = "0.2.106" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "76f218a38c84bcb33c25ec7059b07847d465ce0e0a76b995e134a45adcb6af76" +checksum = "cbc538057e648b67f72a982e708d485b2efa771e1ac05fec311f9f63e5800db4" dependencies = [ "unicode-ident", ] +[[package]] +name = "web-time" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a6580f308b1fad9207618087a65c04e7a10bc77e02c8e84e9b00dd4b12fa0bb" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + [[package]] name = "winapi-util" version = "0.1.11" @@ -1681,20 +1899,26 @@ checksum = "cfe53a6657fd280eaa890a3bc59152892ffa3e30101319d168b781ed6529b049" [[package]] name = "zerocopy" -version = "0.8.28" +version = "0.8.33" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "43fa6694ed34d6e57407afbccdeecfa268c470a7d2a5b0cf49ce9fcc345afb90" +checksum = "668f5168d10b9ee831de31933dc111a459c97ec93225beb307aed970d1372dfd" dependencies = [ "zerocopy-derive", ] [[package]] name = "zerocopy-derive" -version = "0.8.28" +version = "0.8.33" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c640b22cd9817fae95be82f0d2f90b11f7605f6c319d16705c459b27ac2cbc26" +checksum = "2c7962b26b0a8685668b671ee4b54d007a67d4eaf05fda79ac0ecf41e32270f1" dependencies = [ "proc-macro2", "quote", "syn", ] + +[[package]] +name = "zmij" +version = "1.0.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac93432f5b761b22864c774aac244fa5c0fd877678a4c37ebf6cf42208f9c9ec" diff --git a/Cargo.toml b/Cargo.toml index d0037bef..1b86ab81 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -38,6 +38,11 @@ glob = "0.3" walkdir = "2" subst = "0.3" tera = "1.19" +miette = { version = "7.6.0", features = ["fancy"] } +thiserror = "2.0.17" +owo-colors = "4.2.3" +indicatif = "0.18.3" +regex = "1.12.2" [target.'cfg(windows)'.dependencies] dunce = "1.0.4" diff --git a/src/cli.rs b/src/cli.rs index 1e4079ee..8e07ee5f 100644 --- a/src/cli.rs +++ b/src/cli.rs @@ -4,6 +4,7 @@ //! Main command line tool implementation. use std; +use std::collections::HashSet; use std::path::{Path, PathBuf}; use std::process::Command as SysCommand; @@ -22,6 +23,7 @@ use tokio::runtime::Runtime; use crate::cmd; use crate::cmd::fusesoc::FusesocArgs; use crate::config::{Config, Manifest, Merge, PartialConfig, PrefixPaths, Validate}; +use crate::diagnostic::{Diagnostics, Warnings}; use crate::error::*; use crate::lockfile::*; use crate::sess::{Session, SessionArenas, SessionIo}; @@ -113,8 +115,9 @@ pub fn main() -> Result<()> { // Parse command line arguments. let cli = Cli::parse(); - let mut suppressed_warnings: IndexSet = + let mut suppressed_warnings: HashSet = cli.suppress.into_iter().map(|s| s.to_owned()).collect(); + // split suppress strings on commas and spaces suppressed_warnings = suppressed_warnings .into_iter() @@ -125,9 +128,9 @@ pub fn main() -> Result<()> { }) .collect(); - if suppressed_warnings.contains("all") || suppressed_warnings.contains("Wall") { - suppressed_warnings.extend((1..24).map(|i| format!("W{:02}", i))); - } + let warn_config_loaded = !suppressed_warnings.contains("W02"); + + Diagnostics::init(suppressed_warnings); #[cfg(debug_assertions)] if cli.debug { @@ -147,7 +150,7 @@ pub fn main() -> Result<()> { } let force_fetch = match cli.command { - Commands::Update(ref args) => cmd::update::setup(args, cli.local, &suppressed_warnings)?, + Commands::Update(ref args) => cmd::update::setup(args, cli.local)?, _ => false, }; @@ -165,14 +168,13 @@ pub fn main() -> Result<()> { // Parse the manifest file of the package. let manifest_path = root_dir.join("Bender.yml"); - let manifest = read_manifest(&manifest_path, &suppressed_warnings)?; + let manifest = read_manifest(&manifest_path)?; debugln!("main: {:#?}", manifest); // Gather and parse the tool configuration. let config = load_config( &root_dir, - matches!(cli.command, Commands::Update(_)) && !suppressed_warnings.contains("W02"), - &suppressed_warnings, + matches!(cli.command, Commands::Update(_)) && warn_config_loaded, )?; debugln!("main: {:#?}", config); @@ -189,7 +191,6 @@ pub fn main() -> Result<()> { cli.local, force_fetch, git_throttle, - suppressed_warnings, ); if let Commands::Clean(args) = cli.command { @@ -252,13 +253,7 @@ pub fn main() -> Result<()> { ) })?; if !meta.file_type().is_symlink() { - if !sess.suppress_warnings.contains("W01") { - warnln!( - "[W01] Skipping link to package {} at {:?} since there is something there", - pkg_name, - path - ); - } + Warnings::SkippingPackageLink(pkg_name.clone(), path.clone()).emit(); continue; } if path.read_link().map(|d| d != pkg_path).unwrap_or(true) { @@ -398,7 +393,7 @@ fn find_package_root(from: &Path) -> Result { } /// Read a package manifest from a file. -pub fn read_manifest(path: &Path, suppress_warnings: &IndexSet) -> Result { +pub fn read_manifest(path: &Path) -> Result { use crate::config::PartialManifest; use std::fs::File; debugln!("read_manifest: {:?}", path); @@ -409,16 +404,12 @@ pub fn read_manifest(path: &Path, suppress_warnings: &IndexSet) -> Resul partial .prefix_paths(path.parent().unwrap()) .map_err(|cause| Error::chain(format!("Error in manifest prefixing {:?}.", path), cause))? - .validate("", false, suppress_warnings) + .validate("", false) .map_err(|cause| Error::chain(format!("Error in manifest {:?}.", path), cause)) } /// Load a configuration by traversing a directory hierarchy upwards. -fn load_config( - from: &Path, - warn_config_loaded: bool, - suppress_warnings: &IndexSet, -) -> Result { +fn load_config(from: &Path, warn_config_loaded: bool) -> Result { #[cfg(unix)] use std::os::unix::fs::MetadataExt; @@ -491,7 +482,7 @@ fn load_config( // Validate the configuration. let mut out = out - .validate("", false, suppress_warnings) + .validate("", false) .map_err(|cause| Error::chain("Invalid configuration:", cause))?; out.overrides = out @@ -515,8 +506,11 @@ fn maybe_load_config(path: &Path, warn_config_loaded: bool) -> Result Result<()> { pub fn run_plain(sess: &Session, force: bool, update_list: &[String]) -> Result<()> { let rt = Runtime::new()?; let io = SessionIo::new(sess); + let start_time = std::time::Instant::now(); let _srcs = rt.block_on(io.sources(force, update_list))?; + let num_dependencies = io.sess.packages().iter().flatten().count(); + infoln!( + "{} {} dependencies {}", + "Checked out".dimmed(), + num_dependencies, + fmt_duration(start_time.elapsed()).dimmed() + ); Ok(()) } diff --git a/src/cmd/clone.rs b/src/cmd/clone.rs index 04ca26c1..45a232d2 100644 --- a/src/cmd/clone.rs +++ b/src/cmd/clone.rs @@ -12,6 +12,7 @@ use tokio::runtime::Runtime; use crate::config; use crate::config::{Locked, LockedSource}; +use crate::diagnostic::Warnings; use crate::error::*; use crate::sess::{DependencyRef, DependencySource, Session, SessionIo}; @@ -138,8 +139,8 @@ pub fn run(sess: &Session, path: &Path, args: &CloneArgs) -> Result<()> { { Err(Error::new("git fetch failed".to_string()))?; } - } else if !sess.suppress_warnings.contains("W14") { - warnln!("[W14] fetch not performed due to --local argument."); + } else { + Warnings::LocalNoFetch.emit(); } eprintln!( @@ -263,11 +264,7 @@ pub fn run(sess: &Session, path: &Path, args: &CloneArgs) -> Result<()> { ) })?; if !meta.file_type().is_symlink() { - warnln!( - "[W15] Skipping link to package {} at {:?} since there is something there", - pkg_name, - link_path - ); + Warnings::SkippingPackageLink(pkg_name.clone(), link_path.to_path_buf()).emit(); continue; } if link_path.read_link().map(|d| d != pkg_path).unwrap_or(true) { diff --git a/src/cmd/fusesoc.rs b/src/cmd/fusesoc.rs index 559f8594..634c70a3 100644 --- a/src/cmd/fusesoc.rs +++ b/src/cmd/fusesoc.rs @@ -18,6 +18,7 @@ use itertools::Itertools; use tokio::runtime::Runtime; use walkdir::{DirEntry, WalkDir}; +use crate::diagnostic::Warnings; use crate::error::*; use crate::sess::{Session, SessionIo}; use crate::src::{SourceFile, SourceGroup}; @@ -132,8 +133,8 @@ pub fn run_single(sess: &Session, args: &FusesocArgs) -> Result<()> { Error::chain(format!("Unable to write corefile for {:?}.", &name), cause) })?; - if fuse_depend_string.len() > 1 && !sess.suppress_warnings.contains("W16") { - warnln!("[W16] Depend strings may be wrong for the included dependencies!"); + if fuse_depend_string.len() > 1 { + Warnings::DependStringMaybeWrong.emit(); } Ok(()) diff --git a/src/cmd/parents.rs b/src/cmd/parents.rs index 2445082d..abfa133b 100644 --- a/src/cmd/parents.rs +++ b/src/cmd/parents.rs @@ -5,6 +5,7 @@ use std::io::Write; +use crate::diagnostic::Warnings; use clap::Args; use indexmap::IndexMap; use tabwriter::TabWriter; @@ -14,6 +15,7 @@ use crate::config::Dependency; use crate::error::*; use crate::sess::{DependencyConstraint, DependencySource}; use crate::sess::{Session, SessionIo}; +use crate::{fmt_path, fmt_pkg, fmt_version}; /// List packages calling this dependency #[derive(Args, Debug)] @@ -96,12 +98,21 @@ pub fn run(sess: &Session, args: &ParentsArgs) -> Result<()> { } ); - if sess.config.overrides.contains_key(dep) && !sess.suppress_warnings.contains("W18") { - warnln!( - "[W18] An override is configured for {} to {:?}", - dep, - sess.config.overrides[dep] - ) + if sess.config.overrides.contains_key(dep) { + Warnings::DepOverride { + pkg: dep.to_string(), + pkg_override: match sess.config.overrides[dep] { + Dependency::Version(ref v, _) => format!("version {}", fmt_version!(v)), + Dependency::Path(ref path, _) => format!("path {}", fmt_path!(path.display())), + Dependency::GitRevision(ref url, ref rev, _) => { + format!("git {} at revision {}", fmt_path!(url), fmt_version!(rev)) + } + Dependency::GitVersion(ref url, ref version, _) => { + format!("git {} with version {}", fmt_path!(url), fmt_pkg!(version)) + } + }, + } + .emit(); } Ok(()) @@ -150,9 +161,10 @@ pub fn get_parent_array( let dep_manifest = rt.block_on(io.dependency_manifest(pkg, false, &[]))?; // Filter out dependencies without a manifest if dep_manifest.is_none() { - if !sess.suppress_warnings.contains("W17") { - warnln!("[W17] {} is shown to include dependency, but manifest does not have this information.", pkg_name.to_string()); + Warnings::IncludeDepManifestMismatch { + pkg: pkg_name.to_string(), } + .emit(); continue; } let dep_manifest = dep_manifest.unwrap(); @@ -182,9 +194,11 @@ pub fn get_parent_array( ], ); } - } else if !sess.suppress_warnings.contains("W17") { - // Filter out dependencies with mismatching manifest - warnln!("[W17] {} is shown to include dependency, but manifest does not have this information.", pkg_name.to_string()); + } else { + Warnings::IncludeDepManifestMismatch { + pkg: pkg_name.to_string(), + } + .emit(); } } } diff --git a/src/cmd/script.rs b/src/cmd/script.rs index e58c00ae..b84ab371 100644 --- a/src/cmd/script.rs +++ b/src/cmd/script.rs @@ -283,7 +283,7 @@ pub fn run(sess: &Session, args: &ScriptArgs) -> Result<()> { let srcs = srcs .flatten() .into_iter() - .map(|f| f.validate("", false, &sess.suppress_warnings)) + .map(|f| f.validate("", false)) .collect::>>()?; let mut tera_context = Context::new(); diff --git a/src/cmd/snapshot.rs b/src/cmd/snapshot.rs index 8a294b47..ce47e105 100644 --- a/src/cmd/snapshot.rs +++ b/src/cmd/snapshot.rs @@ -12,6 +12,7 @@ use tokio::runtime::Runtime; use crate::cmd::clone::{get_path_subdeps, symlink_dir}; use crate::config::{Dependency, Locked, LockedSource}; +use crate::diagnostic::Warnings; use crate::error::*; use crate::sess::{DependencySource, Session, SessionIo}; @@ -57,11 +58,7 @@ pub fn run(sess: &Session, args: &SnapshotArgs) -> Result<()> { .is_empty() && !args.no_skip { - warnln!( - "Skipping dirty dependency {}\ - \t use `--no-skip` to still snapshot.", - name - ); + Warnings::SkippingDirtyDep { pkg: name.clone() }.emit(); continue; } @@ -255,11 +252,7 @@ pub fn run(sess: &Session, args: &SnapshotArgs) -> Result<()> { ) })?; if !meta.file_type().is_symlink() { - warnln!( - "[W15] Skipping link to package {} at {:?} since there is something there", - pkg_name, - link_path - ); + Warnings::SkippingPackageLink(pkg_name.clone(), link_path.to_path_buf()).emit(); continue; } if link_path.read_link().map(|d| d != pkg_path).unwrap_or(true) { diff --git a/src/cmd/sources.rs b/src/cmd/sources.rs index 91a94675..6c61615e 100644 --- a/src/cmd/sources.rs +++ b/src/cmd/sources.rs @@ -99,7 +99,7 @@ pub fn run(sess: &Session, args: &SourcesArgs) -> Result<()> { srcs = srcs.filter_packages(packages).unwrap_or_default(); } - srcs = srcs.validate("", false, &sess.suppress_warnings)?; + srcs = srcs.validate("", false)?; let result = { let stdout = std::io::stdout(); diff --git a/src/cmd/update.rs b/src/cmd/update.rs index 0b934d75..3d8999e1 100644 --- a/src/cmd/update.rs +++ b/src/cmd/update.rs @@ -12,6 +12,7 @@ use tabwriter::TabWriter; use crate::cmd; use crate::config::{Locked, LockedPackage}; +use crate::diagnostic::Warnings; use crate::error::*; use crate::lockfile::*; use crate::resolver::DependencyResolver; @@ -42,11 +43,9 @@ pub struct UpdateArgs { } /// Execute the `update` subcommand. -pub fn setup(args: &UpdateArgs, local: bool, suppress_warnings: &IndexSet) -> Result { - if local && args.fetch && !suppress_warnings.contains("W14") { - warnln!( - "[W14] As --local argument is set for bender command, no fetching will be performed." - ); +pub fn setup(args: &UpdateArgs, local: bool) -> Result { + if local && args.fetch { + Warnings::LocalNoFetch.emit(); } Ok(args.fetch) } diff --git a/src/cmd/vendor.rs b/src/cmd/vendor.rs index 5d271025..8dc91a3b 100644 --- a/src/cmd/vendor.rs +++ b/src/cmd/vendor.rs @@ -17,9 +17,11 @@ use tokio::runtime::Runtime; use crate::config; use crate::config::PrefixPaths; +use crate::diagnostic::Warnings; use crate::error::*; use crate::futures::TryFutureExt; use crate::git::Git; +use crate::progress::{GitProgressOps, ProgressHandler}; use crate::sess::{DependencySource, Session}; /// A patch linkage @@ -98,15 +100,19 @@ pub fn run(sess: &Session, args: &VendorArgs) -> Result<()> { let dep_path = match dep_src { DependencySource::Path(path) => path, DependencySource::Git(ref url) => { - let git = Git::new(tmp_path, &sess.config.git, sess.git_throttle.clone()); + let git = Git::new(tmp_path, &sess.config.git); rt.block_on(async { - stageln!("Cloning", "{} ({})", vendor_package.name, url); - git.clone().spawn_with(|c| c.arg("clone").arg(url).arg(".")) + let pb = ProgressHandler::new( + sess.multiprogress.clone(), + GitProgressOps::Clone, + vendor_package.name.as_str(), + ); + git.clone().spawn_with(|c| c.arg("clone").arg(url).arg("."), Some(sess.git_throttle.clone()), Some(pb)) .map_err(move |cause| { if url.contains("git@") { - warnln!("[W07] Please ensure your public ssh key is added to the git server."); + Warnings::SshKeyMaybeMissing.emit(); } - warnln!("[W07] Please ensure the url is correct and you have access to the repository."); + Warnings::UrlMaybeIncorrect.emit(); Error::chain( format!("Failed to initialize git database in {:?}.", tmp_path), cause, @@ -116,8 +122,13 @@ pub fn run(sess: &Session, args: &VendorArgs) -> Result<()> { config::Dependency::GitRevision(_, ref rev, _) => Ok(rev), _ => Err(Error::new("Please ensure your vendor reference is a commit hash to avoid upstream changes impacting your checkout")), }?; - git.clone().spawn_with(|c| c.arg("checkout").arg(rev_hash)).await?; - if *rev_hash != git.spawn_with(|c| c.arg("rev-parse").arg("--verify").arg(format!("{}^{{commit}}", rev_hash))).await?.trim_end_matches('\n') { + let pb = ProgressHandler::new( + sess.multiprogress.clone(), + GitProgressOps::Checkout, + vendor_package.name.as_str(), + ); + git.clone().spawn_with(|c| c.arg("checkout").arg(rev_hash), None ,Some(pb)).await?; + if *rev_hash != git.spawn_with(|c| c.arg("rev-parse").arg("--verify").arg(format!("{}^{{commit}}", rev_hash)), None, None).await?.trim_end_matches('\n') { Err(Error::new("Please ensure your vendor reference is a commit hash to avoid upstream changes impacting your checkout")) } else { Ok(()) @@ -183,7 +194,7 @@ pub fn run(sess: &Session, args: &VendorArgs) -> Result<()> { seen_paths.insert(patch_link.to_prefix.clone()); } - let git = Git::new(tmp_path, &sess.config.git, sess.git_throttle.clone()); + let git = Git::new(tmp_path, &sess.config.git); match &args.vendor_subcommand { VendorSubcommand::Diff { err_on_diff } => { @@ -269,25 +280,39 @@ pub fn run(sess: &Session, args: &VendorArgs) -> Result<()> { } // Generate patch - sorted_links.into_iter().try_for_each( |patch_link| { + sorted_links.into_iter().try_for_each(|patch_link| { match patch_link.patch_dir.clone() { Some(patch_dir) => { if *plain { - let get_diff = diff(&rt, - git.clone(), - vendor_package, - patch_link, - dep_path.clone()) - .map_err(|cause| Error::chain("Failed to get diff.", cause))?; + let get_diff = diff( + &rt, + git.clone(), + vendor_package, + patch_link, + dep_path.clone(), + ) + .map_err(|cause| Error::chain("Failed to get diff.", cause))?; gen_plain_patch(get_diff, patch_dir, false) } else { - gen_format_patch(&rt, sess, git.clone(), patch_link, vendor_package.target_dir.clone(), message.as_ref()) + gen_format_patch( + &rt, + sess, + git.clone(), + patch_link, + vendor_package.target_dir.clone(), + message.as_ref(), + ) } - }, + } None => { - warnln!("[W15] No patch directory specified for package {}, mapping {} => {}. Skipping patch generation.", vendor_package.name.clone(), patch_link.from_prefix.to_str().unwrap(), patch_link.to_prefix.to_str().unwrap()); + Warnings::NoPatchDir { + vendor_pkg: vendor_package.name.clone(), + from_prefix: patch_link.from_prefix.clone(), + to_prefix: patch_link.to_prefix.clone(), + } + .emit(); Ok(()) - }, + } } }) } @@ -336,7 +361,7 @@ pub fn init( if !PathBuf::from(extend_paths(std::slice::from_ref(&path), dep_path, true)?[0].clone()) .exists() { - warnln!("[W16] {} not found in upstream, continuing.", path); + Warnings::NotInUpstream { path }.emit(); } } @@ -366,10 +391,10 @@ pub fn init( ) })?; } else { - warnln!( - "[W16] {} not found in upstream, continuing.", - link_from.to_str().unwrap() - ); + Warnings::NotInUpstream { + path: link_from.to_str().unwrap().to_string(), + } + .emit(); } } }; @@ -412,34 +437,38 @@ pub fn apply_patches( Ok(()) }) .and_then(|_| { - git.clone().spawn_with(|c| { - let is_file = patch_link - .from_prefix - .clone() - .prefix_paths(git.path) - .unwrap() - .is_file(); - - let current_patch_target = if is_file { - patch_link.from_prefix.parent().unwrap().to_str().unwrap() - } else { - patch_link.from_prefix.as_path().to_str().unwrap() - }; - - c.arg("apply") - .arg("--directory") - .arg(current_patch_target) - .arg("-p1") - .arg(&patch); - - // limit to specific file for file links - if is_file { - let file_path = patch_link.from_prefix.to_str().unwrap(); - c.arg("--include").arg(file_path); - } + git.clone().spawn_with( + |c| { + let is_file = patch_link + .from_prefix + .clone() + .prefix_paths(git.path) + .unwrap() + .is_file(); - c - }) + let current_patch_target = if is_file { + patch_link.from_prefix.parent().unwrap().to_str().unwrap() + } else { + patch_link.from_prefix.as_path().to_str().unwrap() + }; + + c.arg("apply") + .arg("--directory") + .arg(current_patch_target) + .arg("-p1") + .arg(&patch); + + // limit to specific file for file links + if is_file { + let file_path = patch_link.from_prefix.to_str().unwrap(); + c.arg("--include").arg(file_path); + } + + c + }, + None, + None, + ) }) .await .map_err(move |cause| { @@ -509,15 +538,19 @@ pub fn diff( }; // Get diff rt.block_on(async { - git.spawn_with(|c| { - c.arg("diff").arg(format!( - "--relative={}", - patch_link - .from_prefix - .to_str() - .expect("Failed to convert from_prefix to string.") - )) - }) + git.spawn_with( + |c| { + c.arg("diff").arg(format!( + "--relative={}", + patch_link + .from_prefix + .to_str() + .expect("Failed to convert from_prefix to string.") + )) + }, + None, + None, + ) .await }) } @@ -605,7 +638,6 @@ pub fn gen_format_patch( to_path.parent().unwrap() }, &sess.config.git, - sess.git_throttle.clone(), ); // If the patch link maps a file, use the parent directory for the following git operations. @@ -652,7 +684,11 @@ pub fn gen_format_patch( // Get staged changes in dependency let get_diff_cached = rt - .block_on(async { git_parent.spawn_with(|c| c.args(&diff_args)).await }) + .block_on(async { + git_parent + .spawn_with(|c| c.args(&diff_args), None, None) + .await + }) .map_err(|cause| Error::chain("Failed to generate diff", cause))?; if !get_diff_cached.is_empty() { @@ -670,8 +706,8 @@ pub fn gen_format_patch( .arg(&from_path_relative) .arg("-p1") .arg(&diff_cached_path) - }) - .and_then(|_| git.clone().spawn_with(|c| c.arg("add").arg("--all"))) + }, None, None) + .and_then(|_| git.clone().spawn_with(|c| c.arg("add").arg("--all"), None, None)) .await }).map_err(|cause| Error::chain("Could not apply staged changes on top of patched upstream repository. Did you commit all previously patched modifications?", cause))?; @@ -720,18 +756,22 @@ pub fn gen_format_patch( // Generate format-patch rt.block_on(async { - git.spawn_with(|c| { - c.arg("format-patch") - .arg("-o") - .arg(patch_dir.to_str().unwrap()) - .arg("-1") - .arg(format!("--start-number={}", max_number + 1)) - .arg(format!( - "--relative={}", - from_path_relative.to_str().unwrap() - )) - .arg("HEAD") - }) + git.spawn_with( + |c| { + c.arg("format-patch") + .arg("-o") + .arg(patch_dir.to_str().unwrap()) + .arg("-1") + .arg(format!("--start-number={}", max_number + 1)) + .arg(format!( + "--relative={}", + from_path_relative.to_str().unwrap() + )) + .arg("HEAD") + }, + None, + None, + ) .await })?; } diff --git a/src/config.rs b/src/config.rs index 908885c9..05f1d98e 100644 --- a/src/config.rs +++ b/src/config.rs @@ -18,7 +18,7 @@ use std::path::{Path, PathBuf}; use std::str::FromStr; use glob::glob; -use indexmap::{IndexMap, IndexSet}; +use indexmap::IndexMap; use semver; use serde::de::{Deserialize, Deserializer}; use serde::ser::{Serialize, Serializer}; @@ -26,6 +26,7 @@ use serde_yaml_ng::Value; #[cfg(unix)] use subst; +use crate::diagnostic::{Diagnostics, Warnings}; use crate::error::*; use crate::target::TargetSpec; use crate::util::*; @@ -266,7 +267,6 @@ pub trait Validate { self, package_name: &str, pre_output: bool, - suppress_warnings: &IndexSet, ) -> std::result::Result; } @@ -282,15 +282,12 @@ where self, package_name: &str, pre_output: bool, - suppress_warnings: &IndexSet, ) -> std::result::Result { self.into_iter() - .map( - |(k, v)| match v.validate(package_name, pre_output, suppress_warnings) { - Ok(v) => Ok((k, v)), - Err(e) => Err((k, e)), - }, - ) + .map(|(k, v)| match v.validate(package_name, pre_output) { + Ok(v) => Ok((k, v)), + Err(e) => Err((k, e)), + }) .collect() } } @@ -305,15 +302,12 @@ where self, package_name: &str, pre_output: bool, - suppress_warnings: &IndexSet, ) -> std::result::Result { self.into_iter() - .map( - |v| match v.validate(package_name, pre_output, suppress_warnings) { - Ok(v) => Ok(v), - Err(e) => Err(e), - }, - ) + .map(|v| match v.validate(package_name, pre_output) { + Ok(v) => Ok(v), + Err(e) => Err(e), + }) .collect() } } @@ -329,9 +323,8 @@ where self, package_name: &str, pre_output: bool, - suppress_warnings: &IndexSet, ) -> std::result::Result { - self.0.validate(package_name, pre_output, suppress_warnings) + self.0.validate(package_name, pre_output) } } @@ -346,9 +339,8 @@ where self, package_name: &str, pre_output: bool, - suppress_warnings: &IndexSet, ) -> std::result::Result { - self.0.validate(package_name, pre_output, suppress_warnings) + self.0.validate(package_name, pre_output) } } @@ -406,10 +398,9 @@ impl PartialManifest { mut self, package_name: &str, pre_output: bool, - suppress_warnings: &IndexSet, ) -> Result { self.sources = Some(SeqOrStruct::new(PartialSources::new_empty())); - self.validate(package_name, pre_output, suppress_warnings) + self.validate(package_name, pre_output) } } @@ -443,24 +434,17 @@ impl PrefixPaths for PartialManifest { impl Validate for PartialManifest { type Output = Manifest; type Error = Error; - fn validate( - self, - _package_name: &str, - pre_output: bool, - suppress_warnings: &IndexSet, - ) -> Result { + fn validate(self, _package_name: &str, pre_output: bool) -> Result { let pkg = match self.package { Some(mut p) => { p.name = p.name.to_lowercase(); if !pre_output { p.extra.iter().for_each(|(k, _)| { - if !suppress_warnings.contains("W03") { - warnln!( - "[W03] Ignoring unknown field `{}` in manifest package for {}.", - k, - p.name - ); + Warnings::IgnoreUnknownField { + field: k.clone(), + pkg: p.name.clone(), } + .emit(); }); } p @@ -472,7 +456,7 @@ impl Validate for PartialManifest { .into_iter() .map(|(k, v)| (k.to_lowercase(), v)) .collect::>() - .validate(&pkg.name, pre_output, suppress_warnings) + .validate(&pkg.name, pre_output) .map_err(|(key, cause)| { Error::chain( format!("In dependency `{}` of package `{}`:", key, pkg.name), @@ -482,12 +466,9 @@ impl Validate for PartialManifest { None => IndexMap::new(), }; let srcs = match self.sources { - Some(s) => Some( - s.validate(&pkg.name, pre_output, suppress_warnings) - .map_err(|cause| { - Error::chain(format!("In source list of package `{}`:", pkg.name), cause) - })?, - ), + Some(s) => Some(s.validate(&pkg.name, pre_output).map_err(|cause| { + Error::chain(format!("In source list of package `{}`:", pkg.name), cause) + })?), None => None, }; let exp_inc_dirs = self.export_include_dirs.unwrap_or_default(); @@ -501,25 +482,23 @@ impl Validate for PartialManifest { let frozen = self.frozen.unwrap_or(false); let workspace = match self.workspace { Some(w) => w - .validate(&pkg.name, pre_output, suppress_warnings) + .validate(&pkg.name, pre_output) .map_err(|cause| Error::chain("In workspace configuration:", cause))?, None => Workspace::default(), }; let vendor_package = match self.vendor_package { Some(vend) => vend - .validate(&pkg.name, pre_output, suppress_warnings) + .validate(&pkg.name, pre_output) .map_err(|cause| Error::chain("Unable to parse vendor_package", cause))?, None => Vec::new(), }; if !pre_output { self.extra.iter().for_each(|(k, _)| { - if !suppress_warnings.contains("W03") { - warnln!( - "[W03] Ignoring unknown field `{}` in manifest for {}.", - k, - pkg.name - ); + Warnings::IgnoreUnknownField { + field: k.clone(), + pkg: pkg.name.clone(), } + .emit(); }); } Ok(Manifest { @@ -542,22 +521,18 @@ impl Validate for PartialManifest { .iter() .filter_map(|path| match env_path_from_string(path.to_string()) { Ok(parsed_path) => { - if !(suppress_warnings.contains("W24") - || pre_output - || parsed_path.exists() && parsed_path.is_dir()) - { - warnln!( - "[W24] Include directory {} doesn't exist.", - &parsed_path.display() - ); + if !(pre_output || parsed_path.exists() && parsed_path.is_dir()) { + Warnings::IncludeDirMissing(parsed_path.clone()).emit(); } + Some(Ok(parsed_path)) } Err(cause) => { - if suppress_warnings.contains("E30") { - if !suppress_warnings.contains("W30") { - warnln!("[W30] File not added, ignoring: {}", cause); + if Diagnostics::is_suppressed("E30") { + Warnings::IgnoredPath { + cause: cause.to_string(), } + .emit(); None } else { Some(Err(Error::chain("[E30]", cause))) @@ -629,12 +604,7 @@ impl PrefixPaths for PartialDependency { impl Validate for PartialDependency { type Output = Dependency; type Error = Error; - fn validate( - self, - package_name: &str, - pre_output: bool, - suppress_warnings: &IndexSet, - ) -> Result { + fn validate(self, package_name: &str, pre_output: bool) -> Result { let pass_targets = self .pass_targets .unwrap_or_default() @@ -657,13 +627,11 @@ impl Validate for PartialDependency { } if !pre_output { self.extra.iter().for_each(|(k, _)| { - if !suppress_warnings.contains("W03") { - warnln!( - "[W03] Ignoring unknown field `{}` in a dependency in manifest for {}.", - k, - package_name - ); + Warnings::IgnoreUnknownField { + field: k.clone(), + pkg: package_name.to_string(), } + .emit(); }); } if let Some(path) = self.path { @@ -780,12 +748,7 @@ impl From> for PartialSources { impl Validate for PartialSources { type Output = SourceFile; type Error = Error; - fn validate( - self, - package_name: &str, - pre_output: bool, - suppress_warnings: &IndexSet, - ) -> Result { + fn validate(self, package_name: &str, pre_output: bool) -> Result { match self { PartialSources { target: None, @@ -797,7 +760,7 @@ impl Validate for PartialSources { vhd: None, external_flists: None, extra: _, - } => PartialSourceFile::SvFile(sv).validate(package_name, pre_output, suppress_warnings), + } => PartialSourceFile::SvFile(sv).validate(package_name, pre_output), PartialSources { target: None, include_dirs: None, @@ -808,7 +771,7 @@ impl Validate for PartialSources { vhd: None, external_flists: None, extra: _, - } => PartialSourceFile::VerilogFile(v).validate(package_name, pre_output, suppress_warnings), + } => PartialSourceFile::VerilogFile(v).validate(package_name, pre_output), PartialSources { target: None, include_dirs: None, @@ -819,7 +782,7 @@ impl Validate for PartialSources { vhd: Some(vhd), external_flists: None, extra: _, - } => PartialSourceFile::VhdlFile(vhd).validate(package_name, pre_output, suppress_warnings), + } => PartialSourceFile::VhdlFile(vhd).validate(package_name, pre_output), PartialSources { target, include_dirs, @@ -838,10 +801,8 @@ impl Validate for PartialSources { .filter_map(|path| match env_path_from_string(path.to_string()) { Ok(p) => Some(Ok(p)), Err(cause) => { - if suppress_warnings.contains("E30") { - if !suppress_warnings.contains("W30") { - warnln!("[W30] File not added, ignoring: {}", cause); - } + if Diagnostics::is_suppressed("E30") { + Warnings::IgnoredPath {cause: cause.to_string()}.emit(); None } else { Some(Err(Error::chain("[E30]", cause))) @@ -980,10 +941,8 @@ impl Validate for PartialSources { _ => unreachable!(), }, Err(cause) => { - if suppress_warnings.contains("E30") { - if !suppress_warnings.contains("W30") { - warnln!("[W30] File not added, ignoring: {}", cause); - } + if Diagnostics::is_suppressed("E30") { + Warnings::IgnoredPath {cause: cause.to_string()}.emit(); None } else { Some(Err(Error::chain("[E30]", cause))) @@ -1005,7 +964,7 @@ impl Validate for PartialSources { | PartialSourceFile::VerilogFile(_) | PartialSourceFile::VhdlFile(_) => { // PartialSources .files item is pointing to PartialSourceFiles::file so do glob extension - pre_glob_file.glob_file(suppress_warnings) + pre_glob_file.glob_file() } _ => { // PartialSources .files item is pointing to PartialSourceFiles::group so pass on for recursion @@ -1025,10 +984,8 @@ impl Validate for PartialSources { .filter_map(|path| match env_path_from_string(path.to_string()) { Ok(p) => Some(Ok(p)), Err(cause) => { - if suppress_warnings.contains("E30") { - if !suppress_warnings.contains("W30") { - warnln!("[W30] File not added, ignoring: {}", cause); - } + if Diagnostics::is_suppressed("E30") { + Warnings::IgnoredPath {cause: cause.to_string()}.emit(); None } else { Some(Err(Error::chain("[E30]", cause))) @@ -1040,25 +997,20 @@ impl Validate for PartialSources { let defines = defines.unwrap_or_default(); let files: Result> = post_glob_files .into_iter() - .map(|f| f.validate(package_name, pre_output, suppress_warnings)) + .map(|f| f.validate(package_name, pre_output)) .collect(); let files: Vec = files?; let files: Vec = files.into_iter().collect(); - if files.is_empty() && !pre_output && !suppress_warnings.contains("W04") { - warnln!( - "[W04] No source files specified in a sourcegroup in manifest for {}.", - package_name - ); + if files.is_empty() && !pre_output { + Warnings::NoFilesInSourceGroup(package_name.to_string()).emit(); } if !pre_output { extra.iter().for_each(|(k, _)| { - if !suppress_warnings.contains("W03") { - warnln!( - "[W03] Ignoring unknown field `{}` in sources in manifest for {}.", - k, - package_name - ); + Warnings::IgnoreUnknownField { + field: k.clone(), + pkg: package_name.to_string(), } + .emit(); }); } Ok(SourceFile::Group(Box::new(Sources { @@ -1184,20 +1136,13 @@ impl<'de> Deserialize<'de> for PartialSourceFile { impl Validate for PartialSourceFile { type Output = SourceFile; type Error = Error; - fn validate( - self, - package_name: &str, - pre_output: bool, - suppress_warnings: &IndexSet, - ) -> Result { + fn validate(self, package_name: &str, pre_output: bool) -> Result { match self { PartialSourceFile::File(path) => Ok(SourceFile::File(PathBuf::from(path))), // PartialSourceFile::Group(srcs) => Ok(Some(SourceFile::Group(Box::new( // srcs.validate(package_name, pre_output, suppress_warnings)?, // )))), - PartialSourceFile::Group(srcs) => { - Ok(srcs.validate(package_name, pre_output, suppress_warnings)?) - } + PartialSourceFile::Group(srcs) => Ok(srcs.validate(package_name, pre_output)?), PartialSourceFile::SvFile(path) => Ok(SourceFile::SvFile(env_path_from_string(path)?)), PartialSourceFile::VerilogFile(path) => { Ok(SourceFile::VerilogFile(env_path_from_string(path)?)) @@ -1216,14 +1161,14 @@ pub trait GlobFile { /// The error type produced by validation. type Error; /// Validate self and convert to a full list of paths that exist - fn glob_file(self, suppress_warnings: &IndexSet) -> Result; + fn glob_file(self) -> Result; } impl GlobFile for PartialSourceFile { type Output = Vec; type Error = Error; - fn glob_file(self, suppress_warnings: &IndexSet) -> Result> { + fn glob_file(self) -> Result> { // let mut partial_source_files_vec: Vec = Vec::new(); // Only operate on files, not groups @@ -1259,8 +1204,8 @@ impl GlobFile for PartialSourceFile { }) }) .collect::>>()?; - if out.is_empty() && !suppress_warnings.contains("W05") { - warnln!("[W05] No files found for glob pattern {:?}", path); + if out.is_empty() { + Warnings::NoFilesForGlobalPattern { path: path.clone() }.emit(); } Ok(out) } else { @@ -1309,12 +1254,7 @@ impl PrefixPaths for PartialWorkspace { impl Validate for PartialWorkspace { type Output = Workspace; type Error = Error; - fn validate( - self, - package_name: &str, - pre_output: bool, - suppress_warnings: &IndexSet, - ) -> Result { + fn validate(self, package_name: &str, pre_output: bool) -> Result { let package_links: Result> = self .package_links .unwrap_or_default() @@ -1323,13 +1263,11 @@ impl Validate for PartialWorkspace { .collect(); if !pre_output { self.extra.iter().for_each(|(k, _)| { - if !suppress_warnings.contains("W03") { - warnln!( - "[W03] Ignoring unknown field `{}` in workspace configuration in manifest for {}.", - k, - package_name - ); + Warnings::IgnoreUnknownField { + field: k.clone(), + pkg: package_name.to_string(), } + .emit(); }); } Ok(Workspace { @@ -1493,12 +1431,7 @@ impl Merge for PartialConfig { impl Validate for PartialConfig { type Output = Config; type Error = Error; - fn validate( - self, - package_name: &str, - pre_output: bool, - suppress_warnings: &IndexSet, - ) -> Result { + fn validate(self, package_name: &str, pre_output: bool) -> Result { Ok(Config { database: match self.database { Some(db) => env_path_from_string(db)?, @@ -1510,7 +1443,7 @@ impl Validate for PartialConfig { }, overrides: match self.overrides { Some(d) => d - .validate(package_name, pre_output, suppress_warnings) + .validate(package_name, pre_output) .map_err(|(key, cause)| { Error::chain(format!("In override `{}`:", key), cause) })?, @@ -1518,7 +1451,7 @@ impl Validate for PartialConfig { }, plugins: match self.plugins { Some(d) => d - .validate(package_name, pre_output, suppress_warnings) + .validate(package_name, pre_output) .map_err(|(key, cause)| Error::chain(format!("In plugin `{}`:", key), cause))?, None => IndexMap::new(), }, @@ -1640,12 +1573,7 @@ impl PrefixPaths for PartialVendorPackage { impl Validate for PartialVendorPackage { type Output = VendorPackage; type Error = Error; - fn validate( - self, - package_name: &str, - pre_output: bool, - suppress_warnings: &IndexSet, - ) -> Result { + fn validate(self, package_name: &str, pre_output: bool) -> Result { Ok(VendorPackage { name: match self.name { Some(name) => name, @@ -1657,7 +1585,7 @@ impl Validate for PartialVendorPackage { }, upstream: match self.upstream { Some(upstream) => upstream - .validate(package_name, pre_output, suppress_warnings) + .validate(package_name, pre_output) .map_err(|cause| { Error::chain("Unable to parse external import upstream", cause) })?, diff --git a/src/diagnostic.rs b/src/diagnostic.rs new file mode 100644 index 00000000..8ea5b77a --- /dev/null +++ b/src/diagnostic.rs @@ -0,0 +1,528 @@ +// Copyright (c) 2025 ETH Zurich +// Tim Fischer + +use std::collections::HashSet; +use std::fmt; +use std::path::PathBuf; +use std::sync::{Mutex, OnceLock}; + +use indicatif::MultiProgress; +use miette::{Diagnostic, ReportHandler}; +use owo_colors::{OwoColorize, Style}; +use thiserror::Error; + +use crate::{fmt_field, fmt_path, fmt_pkg, fmt_version}; + +static GLOBAL_DIAGNOSTICS: OnceLock = OnceLock::new(); + +/// A diagnostics manager that handles warnings (and errors). +#[derive(Debug)] +pub struct Diagnostics { + /// A set of suppressed warnings. + suppressed: HashSet, + /// Whether all warnings are suppressed. + all_suppressed: bool, + /// A set of already emitted warnings. + /// Requires synchronization as warnings may be emitted from multiple threads. + emitted: Mutex>, + /// The active multi-progress bar (if any). + multiprogress: Mutex>, +} + +impl Diagnostics { + /// Create a new diagnostics manager. + pub fn init(suppressed: HashSet) { + // Set up miette with our custom renderer + miette::set_hook(Box::new(|_| Box::new(DiagnosticRenderer))).unwrap(); + let diag = Diagnostics { + all_suppressed: suppressed.contains("all") || suppressed.contains("Wall"), + suppressed, + emitted: Mutex::new(HashSet::new()), + multiprogress: Mutex::new(None), + }; + + GLOBAL_DIAGNOSTICS + .set(diag) + .expect("Diagnostics already initialized!"); + } + + pub fn set_multiprogress(multiprogress: Option) { + let diag = Diagnostics::get(); + let mut guard = diag.multiprogress.lock().unwrap(); + *guard = multiprogress; + } + + /// Get the global diagnostics manager. + fn get() -> &'static Diagnostics { + GLOBAL_DIAGNOSTICS + .get() + .expect("Diagnostics not initialized!") + } + + /// Check whether a warning/error code is suppressed. + pub fn is_suppressed(code: &str) -> bool { + let diag = Diagnostics::get(); + diag.all_suppressed || diag.suppressed.contains(code) + } +} + +impl Warnings { + /// Checks suppression, deduplicates, and emits the warning to stderr. + pub fn emit(self) { + let diag = Diagnostics::get(); + + // Check whether the command is suppressed + if let Some(code) = self.code() { + if diag.all_suppressed || diag.suppressed.contains(&code.to_string()) { + return; + } + } + + // Check whether the warning was already emitted + let mut emitted = diag.emitted.lock().unwrap(); + if emitted.contains(&self) { + return; + } + emitted.insert(self.clone()); + drop(emitted); + + // Prepare the report + let report = miette::Report::new(self.clone()); + + // Print cleanly (using suspend if a bar exists) + let mp_guard = diag.multiprogress.lock().unwrap(); + + if let Some(mp) = &*mp_guard { + // If we have progress bars, hide them momentarily + mp.suspend(|| { + eprintln!("{:?}", report); + }); + } else { + eprintln!("No multiprogress bar available."); + // Otherwise just print + eprintln!("{:?}", report); + } + } +} + +pub struct DiagnosticRenderer; + +impl ReportHandler for DiagnosticRenderer { + fn debug(&self, diagnostic: &dyn Diagnostic, f: &mut fmt::Formatter<'_>) -> fmt::Result { + // Determine severity and the resulting style + let (severity, style) = match diagnostic.severity().unwrap_or_default() { + miette::Severity::Error => ("error", Style::new().red().bold()), + miette::Severity::Warning => ("warning", Style::new().yellow().bold()), + miette::Severity::Advice => ("advice", Style::new().cyan().bold()), + }; + + // Write the severity prefix + write!(f, "{}", severity.style(style))?; + + // Write the code, if any + if let Some(code) = diagnostic.code() { + write!(f, "{}", format!("[{}]", code).style(style))?; + } + + // Write the main diagnostic message + write!(f, ": {}", diagnostic)?; + + // We collect all footer lines into a vector. + let mut annotations: Vec = Vec::new(); + + // First, we write the help message(s) if any + if let Some(help) = diagnostic.help() { + let help_str = help.to_string(); + for line in help_str.lines() { + annotations.push(format!( + "{} {}", + "help:".bold(), + line.replace("\x1b[0m", "\x1b[0m\x1b[2m").dimmed() + )); + } + } + + // Prepare tree characters + let branch = " ├─›"; + let corner = " ╰─›"; + + // Iterate over the annotations and print them + for (i, note) in annotations.iter().enumerate() { + // The last item gets the corner, everyone else gets a branch + let is_last = i == annotations.len() - 1; + let prefix = if is_last { corner } else { branch }; + write!(f, "\n{} {}", prefix.dimmed(), note)?; + } + + Ok(()) + } +} + +// Note(fischeti): The enum variants should preferably use struct style +// variants for better readability, but this is not possible due to a current +// issue in `miette` that causes `unused` warnings when the help message does not +// use all fields of a struct variant. This is new since Rust 1.92.0, and a fix +// is pending in `miette`. See also: +// Issue: https://github.com/zkat/miette/issues/458 +// PR: https://github.com/zkat/miette/pull/459 +// The workaround for the moment is to use tuple style variants +// for variants where the help message does not use all fields. +#[derive(Error, Diagnostic, Hash, Eq, PartialEq, Debug, Clone)] +#[diagnostic(severity(Warning))] +pub enum Warnings { + #[error( + "Skipping link to package {} at {} since there is something there", + fmt_pkg!(.0), + fmt_path!(.1.display()) + )] + #[diagnostic( + code(W01), + help("Check the existing file or directory that is preventing the link.") + )] + SkippingPackageLink(String, PathBuf), + + #[error("Using config at {} for overrides.", fmt_path!(path.display()))] + #[diagnostic(code(W02))] + UsingConfigForOverride { path: PathBuf }, + + #[error("Ignoring unknown field {} in package {}.", fmt_field!(field), fmt_pkg!(pkg))] + #[diagnostic( + code(W03), + help("Check for typos in {} or remove it from the {} manifest.", fmt_field!(field), fmt_pkg!(pkg)) + )] + IgnoreUnknownField { field: String, pkg: String }, + + #[error("Source group in package {} contains no source files.", fmt_pkg!(.0))] + #[diagnostic( + code(W04), + help("Add source files to the source group or remove it from the manifest.") + )] + NoFilesInSourceGroup(String), + + #[error("No files matched the global pattern {}.", fmt_path!(path))] + #[diagnostic(code(W05))] + NoFilesForGlobalPattern { path: String }, + + // TODO(fischeti): Why are there two W06 variants? + #[error("Dependency {} in checkout_dir {} is not a git repository. Setting as path dependency.", fmt_pkg!(.0), fmt_path!(.1.display()))] + #[diagnostic( + code(W06), + help("Use `bender clone` to work on git dependencies.\nRun `bender update --ignore-checkout-dir` to overwrite this at your own risk.") + )] + NotAGitDependency(String, PathBuf), + + // TODO(fischeti): Why are there two W06 variants? + #[error("Dependency {} in checkout_dir {} is not in a clean state. Setting as path dependency.", fmt_pkg!(.0), fmt_path!(.1.display()))] + #[diagnostic(code(W06), help("Use `bender clone` to work on git dependencies.\nRun `bender update --ignore-checkout-dir` to overwrite this at your own risk."))] + DirtyGitDependency(String, PathBuf), + + // TODO(fischeti): This is part of an error, not a warning. Should be converted to an Error. + #[error("SSH key might be missing.")] + #[diagnostic( + code(W07), + help("Please ensure your public ssh key is added to the git server.") + )] + SshKeyMaybeMissing, + + // TODO(fischeti): This is part of an error, not a warning. Should be converted to an Error. + #[error("SSH key might be missing.")] + #[diagnostic( + code(W07), + help("Please ensure the url is correct and you have access to the repository.") + )] + UrlMaybeIncorrect, + + // TODO(fischeti): This is part of an error, not a warning. Should be converted to an Error. + #[error("Revision {} not found in repository {}.", fmt_version!(.0), fmt_pkg!(.1))] + #[diagnostic( + code(W08), + help("Check that the revision exists in the remote repository or run `bender update`.") + )] + RevisionNotFound(String, String), + + #[error("Path dependency {} inside git dependency {} detected. This is currently not fully suppored and your milage may vary.", fmt_pkg!(pkg), fmt_pkg!(top_pkg))] + #[diagnostic(code(W09))] + PathDepInGitDep { pkg: String, top_pkg: String }, + + #[error("There may be issues in the path for {}.", fmt_pkg!(.0))] + #[diagnostic( + code(W10), + help("Please check that {} is correct and accessible.", fmt_path!(.1.display())) + )] + MaybePathIssues(String, PathBuf), + + #[error("Dependency package name {} does not match the package name {} in its manifest.", fmt_pkg!(.0), fmt_pkg!(.1))] + #[diagnostic( + code(W11), + help("Check that the dependency name in your root manifest matches the name in the {} manifest.", fmt_pkg!(.0)) + )] + DepPkgNameNotMatching(String, String), + + #[error("Manifest for package {} not found at {}.", fmt_pkg!(pkg), fmt_path!(src))] + #[diagnostic(code(W12))] + ManifestNotFound { pkg: String, src: String }, + + #[error("Name issue with package {}. `export_include_dirs` cannot be handled.", fmt_pkg!(.0))] + #[diagnostic( + code(W13), + help("Could be related to name missmatch, check `bender update`.") + )] + ExportDirNameIssue(String), + + #[error("If `--local` is used, no fetching will be performed.")] + #[diagnostic(code(W14))] + LocalNoFetch, + + #[error("No patch directory found for package {} when trying to apply patches from {} to {}. Skipping patch generation.", fmt_pkg!(vendor_pkg), fmt_path!(from_prefix.display()), fmt_path!(to_prefix.display()))] + #[diagnostic(code(W15))] + NoPatchDir { + vendor_pkg: String, + from_prefix: PathBuf, + to_prefix: PathBuf, + }, + + #[error("Dependency string for the included dependencies might be wrong.")] + #[diagnostic(code(W16))] + DependStringMaybeWrong, + + // TODO(fischeti): Why are there two W16 variants? + #[error("{} not found in upstream, continuing.", fmt_path!(path))] + #[diagnostic(code(W16))] + NotInUpstream { path: String }, + + #[error("Package {} is shown to include dependency, but manifest does not have this information.", fmt_pkg!(pkg))] + #[diagnostic(code(W17))] + IncludeDepManifestMismatch { pkg: String }, + + #[error("An override is specified for dependency {} to {}.", fmt_pkg!(pkg), fmt_pkg!(pkg_override))] + #[diagnostic(code(W18))] + DepOverride { pkg: String, pkg_override: String }, + + #[error("Workspace checkout directory set and has uncommitted changes, not updating {} at {}.", fmt_pkg!(.0), fmt_path!(.1.display()))] + #[diagnostic( + code(W19), + help("Run `bender checkout --force` to overwrite the dependency at your own risk.") + )] + CheckoutDirDirty(String, PathBuf), + + #[error("Workspace checkout directory set and remote url doesn't match, not updating {} at {}.", fmt_pkg!(.0), fmt_path!(.1.display()))] + #[diagnostic( + code(W19), + help("Run `bender checkout --force` to overwrite the dependency at your own risk.") + )] + CheckoutDirUrlMismatch(String, PathBuf), + + // TODO(fischeti): Should this be an error instead of a warning? + #[error("Ignoring error for {} at {}: {}", fmt_pkg!(.0), fmt_path!(.1), .2)] + #[diagnostic(code(W20))] + IgnoringError(String, String, String), + + #[error("No revision found in lock file for git dependency {}.", fmt_pkg!(pkg))] + #[diagnostic(code(W21))] + NoRevisionInLockFile { pkg: String }, + + #[error("Dependency {} has source path {} which does not exist.", fmt_pkg!(.0), fmt_path!(.1.display()))] + #[diagnostic(code(W22), help("Please check that the path exists and is correct."))] + DepSourcePathMissing(String, PathBuf), + + #[error("Locked revision {} for dependency {} not found in available revisions, allowing update.", fmt_version!(rev), fmt_pkg!(pkg))] + #[diagnostic(code(W23))] + LockedRevisionNotFound { pkg: String, rev: String }, + + #[error("Include directory {} doesn't exist.", fmt_path!(.0.display()))] + #[diagnostic( + code(W24), + help("Please check that the include directory exists and is correct.") + )] + IncludeDirMissing(PathBuf), + + #[error("Skipping dirty dependency {}", fmt_pkg!(pkg))] + #[diagnostic(help("Use `--no-skip` to still snapshot {}.", fmt_pkg!(pkg)))] + SkippingDirtyDep { pkg: String }, + + #[error("File not added, ignoring: {cause}")] + #[diagnostic(code(W30))] + IgnoredPath { cause: String }, + + #[error("File {} doesn't exist.", fmt_path!(path.display()))] + #[diagnostic(code(W31))] + FileMissing { path: PathBuf }, + + #[error("Path {} for dependency {} does not exist.", fmt_path!(path.display()), fmt_pkg!(pkg))] + #[diagnostic(code(W32))] + DepPathMissing { pkg: String, path: PathBuf }, +} + +#[cfg(test)] +mod tests { + use super::*; + use std::sync::Once; + + static TEST_INIT: Once = Once::new(); + + /// Helper to initialize diagnostics once for the entire test run. + fn setup_diagnostics() { + TEST_INIT.call_once(|| { + // We use an empty set for the global init in tests + // or a specific set if needed. + Diagnostics::init(HashSet::from(["W02".to_string()])); + }); + } + + #[test] + fn test_is_suppressed() { + setup_diagnostics(); + assert!(Diagnostics::is_suppressed("W02")); + assert!(!Diagnostics::is_suppressed("W01")); + } + + #[test] + fn test_suppression_works() { + setup_diagnostics(); // Assumes this suppresses W02 + let diag = Diagnostics::get(); + + let warn = Warnings::UsingConfigForOverride { + path: PathBuf::from("/example/path"), + }; + + // Clear state + diag.emitted.lock().unwrap().clear(); + + // Call emit (The Gatekeeper) + warn.clone().emit(); + + let emitted = diag.emitted.lock().unwrap(); + assert!(!emitted.contains(&warn)); + } + + #[test] + fn test_all_suppressed() { + // Since we can't re-init the GLOBAL_DIAGNOSTICS with different values + // in the same process, we test the logic via a local instance. + let diag = Diagnostics { + suppressed: HashSet::new(), + all_suppressed: true, + emitted: Mutex::new(HashSet::new()), + multiprogress: Mutex::new(None), + }; + + // Manual check of the logic inside emit() + let warn = Warnings::LocalNoFetch; + let code = warn.code().unwrap().to_string(); + assert!(diag.all_suppressed || diag.suppressed.contains(&code)); + } + + #[test] + fn test_deduplication_logic() { + setup_diagnostics(); + let diag = Diagnostics::get(); + let warn1 = Warnings::NoRevisionInLockFile { + pkg: "example_pkg".into(), + }; + let warn2 = Warnings::NoRevisionInLockFile { + pkg: "other_pkg".into(), + }; + + // Clear state + diag.emitted.lock().unwrap().clear(); + + // Emit first warning + warn1.clone().emit(); + { + let emitted = diag.emitted.lock().unwrap(); + assert!(emitted.contains(&warn1)); + assert_eq!(emitted.len(), 1); + } + + // Emit second warning (different data) + warn2.clone().emit(); + { + let emitted = diag.emitted.lock().unwrap(); + assert!(emitted.contains(&warn2)); + assert_eq!(emitted.len(), 2); + } + + // Emit first warning again + warn1.clone().emit(); + { + let emitted = diag.emitted.lock().unwrap(); + // The length should STILL be 2, because warn1 was already there + assert_eq!(emitted.len(), 2); + } + } + + #[test] + fn test_contains_code() { + let warn = Warnings::LocalNoFetch; + let code = warn.code().unwrap().to_string(); + assert_eq!(code, "W14".to_string()); + } + + #[test] + fn test_contains_no_code() { + let warn = Warnings::SkippingDirtyDep { + pkg: "example_pkg".to_string(), + }; + let code = warn.code(); + assert!(code.is_none()); + } + + #[test] + fn test_contains_help() { + let warn = Warnings::SkippingPackageLink( + "example_pkg".to_string(), + PathBuf::from("/example/path"), + ); + let help = warn.help().unwrap().to_string(); + assert!(help.contains("Check the existing file or directory")); + } + + #[test] + fn test_contains_no_help() { + let warn = Warnings::NoRevisionInLockFile { + pkg: "example_pkg".to_string(), + }; + let help = warn.help(); + assert!(help.is_none()); + } + + #[test] + fn test_stderr_contains_code() { + setup_diagnostics(); + let warn = Warnings::LocalNoFetch; + let code = warn.code().unwrap().to_string(); + let report = format!("{:?}", miette::Report::new(warn)); + assert!(report.contains(&code)); + } + + #[test] + fn test_stderr_contains_help() { + setup_diagnostics(); + let warn = Warnings::SkippingPackageLink( + "example_pkg".to_string(), + PathBuf::from("/example/path"), + ); + let report = format!("{:?}", miette::Report::new(warn)); + assert!(report.contains("Check the existing file or directory")); + } + + #[test] + fn test_stderr_contains_no_help() { + setup_diagnostics(); + let warn = Warnings::NoRevisionInLockFile { + pkg: "example_pkg".to_string(), + }; + let report = format!("{:?}", miette::Report::new(warn)); + assert!(!report.contains("help:")); + } + + #[test] + fn test_stderr_contains_two_help() { + setup_diagnostics(); + let warn = + Warnings::NotAGitDependency("example_dep".to_string(), PathBuf::from("/example/path")); + let report = format!("{:?}", miette::Report::new(warn)); + let help_count = report.matches("help:").count(); + assert_eq!(help_count, 2); + } +} diff --git a/src/error.rs b/src/error.rs index 778d56ca..0980c1c5 100644 --- a/src/error.rs +++ b/src/error.rs @@ -5,12 +5,12 @@ use std; use std::fmt; -#[allow(deprecated)] -use std::sync::atomic::{AtomicBool, ATOMIC_BOOL_INIT}; +use std::sync::atomic::AtomicBool; use std::sync::Arc; -#[allow(deprecated)] -pub static ENABLE_DEBUG: AtomicBool = ATOMIC_BOOL_INIT; +use owo_colors::{OwoColorize, Style}; + +pub static ENABLE_DEBUG: AtomicBool = AtomicBool::new(false); /// Print an error. #[macro_export] @@ -18,16 +18,10 @@ macro_rules! errorln { ($($arg:tt)*) => { diagnostic!($crate::error::Severity::Error; $($arg)*); } } -/// Print a warning. -#[macro_export] -macro_rules! warnln { - ($($arg:tt)*) => { diagnostic!($crate::error::Severity::Warning; $($arg)*) } -} - /// Print an informational note. #[macro_export] -macro_rules! noteln { - ($($arg:tt)*) => { diagnostic!($crate::error::Severity::Note; $($arg)*); } +macro_rules! infoln { + ($($arg:tt)*) => { diagnostic!($crate::error::Severity::Info; $($arg)*); } } /// Print debug information. Omitted in release builds. @@ -41,6 +35,12 @@ macro_rules! debugln { } } +/// Format and print stage progress. +#[macro_export] +macro_rules! stageln { + ($stage_name:expr, $($arg:tt)*) => { diagnostic!($crate::error::Severity::Stage($stage_name); $($arg)*); } +} + /// Print debug information. Omitted in release builds. #[macro_export] #[cfg(not(debug_assertions))] @@ -61,20 +61,20 @@ macro_rules! diagnostic { #[derive(PartialEq, Eq)] pub enum Severity { Debug, - Note, - Warning, + Info, Error, + Stage(&'static str), } impl fmt::Display for Severity { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - let (color, prefix) = match *self { - Severity::Error => ("\x1B[31;1m", "error"), - Severity::Warning => ("\x1B[33;1m", "warning"), - Severity::Note => ("\x1B[;1m", "note"), - Severity::Debug => ("\x1B[34;1m", "debug"), + let (severity, style) = match *self { + Severity::Error => ("Error:", Style::new().red().bold()), + Severity::Info => ("Info:", Style::new().white().bold()), + Severity::Debug => ("Debug:", Style::new().blue().bold()), + Severity::Stage(name) => (name, Style::new().green().bold()), }; - write!(f, "{}{}:\x1B[m", color, prefix) + write!(f, " {}", severity.style(style)) } } @@ -146,16 +146,3 @@ impl From for Error { Error::chain("Cannot startup runtime.".to_string(), err) } } - -/// Format and print stage progress. -#[macro_export] -macro_rules! stageln { - ($stage:expr, $($arg:tt)*) => { - $crate::error::println_stage($stage, &format!($($arg)*)) - } -} - -/// Print stage progress. -pub fn println_stage(stage: &str, message: &str) { - eprintln!("\x1B[32;1m{:>12}\x1B[0m {}", stage, message); -} diff --git a/src/git.rs b/src/git.rs index b11e5af0..30b6cba4 100644 --- a/src/git.rs +++ b/src/git.rs @@ -7,12 +7,16 @@ use std::ffi::OsStr; use std::path::Path; +use std::process::Stdio; use std::sync::Arc; use futures::TryFutureExt; +use tokio::io::AsyncReadExt; use tokio::process::Command; use tokio::sync::Semaphore; +use crate::progress::{monitor_stderr, ProgressHandler}; + use crate::error::*; /// A git repository. @@ -25,18 +29,12 @@ pub struct Git<'ctx> { pub path: &'ctx Path, /// The session within which commands will be executed. pub git: &'ctx String, - /// Reference to the throttle object. - pub throttle: Arc, } impl<'ctx> Git<'ctx> { /// Create a new git context. - pub fn new(path: &'ctx Path, git: &'ctx String, throttle: Arc) -> Git<'ctx> { - Git { - path, - git, - throttle, - } + pub fn new(path: &'ctx Path, git: &'ctx String) -> Git<'ctx> { + Git { path, git } } /// Create a new git command. @@ -61,56 +59,99 @@ impl<'ctx> Git<'ctx> { /// If `check` is false, the stdout will be returned regardless of the /// command's exit code. #[allow(clippy::format_push_string)] - pub async fn spawn(self, mut cmd: Command, check: bool) -> Result { - // acquire throttle - let permit = self.throttle.clone().acquire_owned().await.unwrap(); - let output = cmd.output().map_err(|cause| { + pub async fn spawn( + self, + mut cmd: Command, + check: bool, + throttle: Option>, + pb: Option, + ) -> Result { + // Acquire the throttle semaphore + // let permit = self.throttle.clone().acquire_owned().await.unwrap(); + let permit = match throttle { + Some(sem) => Some(sem.acquire_owned().await.unwrap()), + None => None, + }; + + // Configure pipes for streaming + cmd.stdout(Stdio::piped()); + cmd.stderr(Stdio::piped()); + + // Spawn the child process + let mut child = cmd.spawn().map_err(|cause| { if cause .to_string() .to_lowercase() .contains("too many open files") { - eprintln!( - "Please consider increasing your `ulimit -n`, e.g. by running `ulimit -n 4096`" - ); - eprintln!("This is a known issue (#52)."); + eprintln!("Please consider increasing your `ulimit -n`..."); Error::chain("Failed to spawn child process.", cause) } else { Error::chain("Failed to spawn child process.", cause) } + })?; + + debugln!("git: {:?} in {:?}", cmd, self.path); + + // Setup Streaming for Stderr (Progress + Error Collection) + // We need to capture stderr in case the command fails, so we collect it while parsing. + let stderr = child.stderr.take().unwrap(); + + // Spawn a background task to handle stderr so it doesn't block + let stderr_handle = tokio::spawn(async move { + // We pass the handler clone into the async task + monitor_stderr(stderr, pb).await }); - let result = output.and_then(|output| async move { - debugln!("git: {:?} in {:?}", cmd, self.path); - if output.status.success() || !check { - String::from_utf8(output.stdout).map_err(|cause| { - Error::chain( - format!( - "Output of git command ({:?}) in directory {:?} is not valid UTF-8.", - cmd, self.path - ), - cause, - ) - }) - } else { - let mut msg = format!("Git command ({:?}) in directory {:?}", cmd, self.path); - match output.status.code() { - Some(code) => msg.push_str(&format!(" failed with exit code {}", code)), - None => msg.push_str(" failed"), - }; - match String::from_utf8(output.stderr) { - Ok(txt) => { - msg.push_str(":\n\n"); - msg.push_str(&txt); - } - Err(err) => msg.push_str(&format!(". Stderr is not valid UTF-8, {}.", err)), - }; - Err(Error::new(msg)) + + // Read Stdout (for the success return value) + let mut stdout_buffer = Vec::new(); + if let Some(mut stdout) = child.stdout.take() { + // We just read all of stdout. + if let Err(e) = stdout.read_to_end(&mut stdout_buffer).await { + return Err(Error::chain("Failed to read stdout", e)); } - }); - let result = result.await; - // release throttle + } + + // Wait for child process to finish + let status = child + .wait() + .await + .map_err(|e| Error::chain("Failed to wait on child", e))?; + + // Join the stderr task to get the error log + let collected_stderr = stderr_handle + .await + .unwrap_or_else(|_| String::from("")); + + // We can release the throttle here since we're done with the process drop(permit); - result + + // Process the output based on success and check flag + if status.success() || !check { + String::from_utf8(stdout_buffer).map_err(|cause| { + Error::chain( + format!( + "Output of git command ({:?}) in directory {:?} is not valid UTF-8.", + cmd, self.path + ), + cause, + ) + }) + } else { + let mut msg = format!("Git command ({:?}) in directory {:?}", cmd, self.path); + match status.code() { + Some(code) => msg.push_str(&format!(" failed with exit code {}", code)), + None => msg.push_str(" failed"), + }; + + // Use the stderr we collected in the background task + if !collected_stderr.is_empty() { + msg.push_str(":\n\n"); + msg.push_str(&collected_stderr); + } + + Err(Error::new(msg)) + } } /// Assemble a command and schedule it for execution. @@ -118,28 +159,38 @@ impl<'ctx> Git<'ctx> { /// This is a convenience function that creates a command, passes it to the /// closure `f` for configuration, then passes it to the `spawn` function /// and returns the future. - pub async fn spawn_with(self, f: F) -> Result + pub async fn spawn_with( + self, + f: F, + throttle: Option>, + pb: Option, + ) -> Result where F: FnOnce(&mut Command) -> &mut Command, { let mut cmd = Command::new(self.git); cmd.current_dir(self.path); f(&mut cmd); - self.spawn(cmd, true).await + self.spawn(cmd, true, throttle, pb).await } /// Assemble a command and schedule it for execution. /// /// This is the same as `spawn_with()`, but returns the stdout regardless of /// whether the command failed or not. - pub async fn spawn_unchecked_with(self, f: F) -> Result + pub async fn spawn_unchecked_with( + self, + f: F, + throttle: Option>, + pb: Option, + ) -> Result where F: FnOnce(&mut Command) -> &mut Command, { let mut cmd = Command::new(self.git); cmd.current_dir(self.path); f(&mut cmd); - self.spawn(cmd, false).await + self.spawn(cmd, false, throttle, pb).await } /// Assemble a command and execute it interactively. @@ -158,26 +209,51 @@ impl<'ctx> Git<'ctx> { } /// Fetch the tags and refs of a remote. - pub async fn fetch(self, remote: &str) -> Result<()> { + pub async fn fetch( + self, + remote: &str, + throttle: Option>, + pb: Option, + ) -> Result<()> { let r1 = String::from(remote); let r2 = String::from(remote); self.clone() - .spawn_with(|c| c.arg("fetch").arg("--prune").arg(r1)) - .and_then(|_| self.spawn_with(|c| c.arg("fetch").arg("--tags").arg("--prune").arg(r2))) + .spawn_with( + |c| c.arg("fetch").arg("--prune").arg(r1).arg("--progress"), + throttle.clone(), + pb, + ) + .and_then(|_| { + self.spawn_with( + |c| c.arg("fetch").arg("--tags").arg("--prune").arg(r2), + throttle, + None, + ) + }) .await .map(|_| ()) } /// Fetch the specified ref of a remote. - pub async fn fetch_ref(self, remote: &str, reference: &str) -> Result<()> { - self.spawn_with(|c| c.arg("fetch").arg(remote).arg(reference)) - .await - .map(|_| ()) + pub async fn fetch_ref( + self, + remote: &str, + reference: &str, + throttle: Option>, + pb: Option, + ) -> Result<()> { + self.spawn_with( + |c| c.arg("fetch").arg(remote).arg(reference).arg("--progress"), + throttle, + pb, + ) + .await + .map(|_| ()) } /// Stage all local changes. pub async fn add_all(self) -> Result<()> { - self.spawn_with(|c| c.arg("add").arg("--all")) + self.spawn_with(|c| c.arg("add").arg("--all"), None, None) .await .map(|_| ()) } @@ -188,13 +264,17 @@ impl<'ctx> Git<'ctx> { pub async fn commit(self, message: Option<&String>) -> Result<()> { match message { Some(msg) => self - .spawn_with(|c| { - c.arg("-c") - .arg("commit.gpgsign=false") - .arg("commit") - .arg("-m") - .arg(msg) - }) + .spawn_with( + |c| { + c.arg("-c") + .arg("commit.gpgsign=false") + .arg("commit") + .arg("-m") + .arg(msg) + }, + None, + None, + ) .await .map(|_| ()), @@ -207,7 +287,7 @@ impl<'ctx> Git<'ctx> { /// List all refs and their hashes. pub async fn list_refs(self) -> Result> { - self.spawn_unchecked_with(|c| c.arg("show-ref").arg("--dereference")) + self.spawn_unchecked_with(|c| c.arg("show-ref").arg("--dereference"), None, None) .and_then(|raw| async move { let mut all_revs = raw .lines() @@ -242,21 +322,29 @@ impl<'ctx> Git<'ctx> { /// List all revisions. pub async fn list_revs(self) -> Result> { - self.spawn_with(|c| c.arg("rev-list").arg("--all").arg("--date-order")) - .await - .map(|raw| raw.lines().map(String::from).collect()) + self.spawn_with( + |c| c.arg("rev-list").arg("--all").arg("--date-order"), + None, + None, + ) + .await + .map(|raw| raw.lines().map(String::from).collect()) } /// Determine the currently checked out revision. pub async fn current_checkout(self) -> Result> { - self.spawn_with(|c| c.arg("rev-parse").arg("--revs-only").arg("HEAD^{commit}")) - .await - .map(|raw| raw.lines().take(1).map(String::from).next()) + self.spawn_with( + |c| c.arg("rev-parse").arg("--revs-only").arg("HEAD^{commit}"), + None, + None, + ) + .await + .map(|raw| raw.lines().take(1).map(String::from).next()) } /// Determine the url of a remote. pub async fn remote_url(self, remote: &str) -> Result { - self.spawn_with(|c| c.arg("remote").arg("get-url").arg(remote)) + self.spawn_with(|c| c.arg("remote").arg("get-url").arg(remote), None, None) .await .map(|raw| raw.lines().take(1).map(String::from).next().unwrap()) } @@ -269,20 +357,24 @@ impl<'ctx> Git<'ctx> { rev: R, path: Option

, ) -> Result> { - self.spawn_with(|c| { - c.arg("ls-tree").arg(rev); - if let Some(p) = path { - c.arg(p); - } - c - }) + self.spawn_with( + |c| { + c.arg("ls-tree").arg(rev); + if let Some(p) = path { + c.arg(p); + } + c + }, + None, + None, + ) .await .map(|raw| raw.lines().map(TreeEntry::parse).collect()) } /// Read the content of a file. pub async fn cat_file>(self, hash: O) -> Result { - self.spawn_with(|c| c.arg("cat-file").arg("blob").arg(hash)) + self.spawn_with(|c| c.arg("cat-file").arg("blob").arg(hash), None, None) .await } } diff --git a/src/main.rs b/src/main.rs index 311990c2..b96bf92a 100644 --- a/src/main.rs +++ b/src/main.rs @@ -32,13 +32,16 @@ pub mod error; pub mod cli; pub mod cmd; pub mod config; +pub mod diagnostic; pub mod git; pub mod lockfile; +pub mod progress; pub mod resolver; #[allow(clippy::bind_instead_of_map)] pub mod sess; pub mod src; pub mod target; +#[macro_use] pub mod util; fn main() { diff --git a/src/progress.rs b/src/progress.rs new file mode 100644 index 00000000..62bdec78 --- /dev/null +++ b/src/progress.rs @@ -0,0 +1,436 @@ +// Copyright (c) 2025 ETH Zurich +// Tim Fischer + +use crate::util::fmt_duration; + +use indexmap::IndexMap; +use owo_colors::OwoColorize; +use std::sync::OnceLock; +use std::time::Duration; + +use indicatif::{MultiProgress, ProgressBar, ProgressStyle}; +use regex::Regex; +use tokio::io::{AsyncReadExt, BufReader}; + +use crate::{fmt_completed, fmt_pkg, fmt_stage}; + +static RE_GIT: OnceLock = OnceLock::new(); + +/// The result of parsing a git progress line. +pub enum GitProgress { + SubmoduleRegistered { name: String }, + CloningInto { name: String }, + SubmoduleEnd { name: String }, + Receiving { percent: u8 }, + Resolving { percent: u8 }, + Checkout { percent: u8 }, + Error(String), + Other, +} + +/// Captures (dynamic) state information for a git operation's progress. +/// for instance, the actuall progress bars to update. +pub struct ProgressState { + /// The progress bar of the current package. + pb: ProgressBar, + /// The sub-progress bar (for submodules), if any. + pub sub_bars: IndexMap, + // The currently active submodule, if any. + pub active_sub: Option, + /// The start time of the operation. + start_time: std::time::Instant, +} + +/// Captures (static) information neeed to handle progress updates for a git operation. +pub struct ProgressHandler { + /// Reference to the multi-progress bar, which can manage multiple progress bars. + multiprogress: MultiProgress, + /// The type of git operation being performed. + git_op: GitProgressOps, + /// The name of the repository being processed. + name: String, +} + +/// The git operation types that currently support progress reporting. +#[derive(PartialEq)] +pub enum GitProgressOps { + Checkout, + Clone, + Fetch, + Submodule, +} + +/// Monitor the stderr stream of a git process and update progress bars +/// of a given handler accordingly. +pub async fn monitor_stderr( + stream: impl tokio::io::AsyncRead + Unpin, + handler: Option, +) -> String { + let mut reader = BufReader::new(stream); + let mut buffer = Vec::new(); // Buffer for accumulating bytes of a line + let mut raw_log = Vec::new(); // The full raw log output + + // Add a new progress bar and state if we have a handler + let mut state = handler.as_ref().map(|h| h.start()); + + // We loop over the stream reading byte by byte + // and process lines as they are completed. + while let Ok(byte) = reader.read_u8().await { + raw_log.push(byte); + + // We push bytes into the buffer until we hit a delimiter + if byte != b'\r' && byte != b'\n' { + buffer.push(byte); + continue; + } + + // Process the line, if we can parse it and have a handler + if let (Ok(line), Some(h)) = (std::str::from_utf8(&buffer), &handler) { + // Parse the line and update the progress bar accordingly + let progress = parse_git_line(line); + h.update_pb(progress, state.as_mut().unwrap()); + } + + // Always clear buffer after a delimiter + buffer.clear(); + } + + // Finalize the progress bar if we have a handler + if let Some(handler) = handler { + handler.finish(&mut state.unwrap()); + } + + // Return the full raw log as a string + String::from_utf8_lossy(&raw_log).to_string() +} + +impl ProgressHandler { + /// Create a new progress handler for a git operation. + pub fn new(multiprogress: MultiProgress, git_op: GitProgressOps, name: &str) -> Self { + Self { + multiprogress, + git_op, + name: name.to_string(), + } + } + + /// Adds a new progress bar to the multi-progress and returns the initial state + /// that is needed to track progress updates. + pub fn start(&self) -> ProgressState { + // Create and configure the main progress bar + let style = ProgressStyle::with_template( + "{spinner:.cyan} {prefix:<32!} {bar:40.cyan/blue} {percent:>3}% {msg}", + ) + .unwrap() + .progress_chars("-- ") + .tick_strings(&["⠋", "⠙", "⠹", "⠸", "⠼", "⠴", "⠦", "⠧", "⠇", "⠏"]); + + // Create and attach the progress bar to the multi-progress bar. + let pb = self + .multiprogress + .add(ProgressBar::new(100).with_style(style)); + + // Set the prefix based on the git operation + let prefix = match self.git_op { + GitProgressOps::Clone => "Cloning", + GitProgressOps::Fetch => "Fetching", + GitProgressOps::Checkout => "Checking out", + GitProgressOps::Submodule => "Updating Submodules", + }; + let prefix = format!("{} {}", fmt_stage!(prefix), fmt_pkg!(&self.name)); + pb.set_prefix(prefix); + + // Configure the spinners to automatically tick every 100ms + pb.enable_steady_tick(Duration::from_millis(100)); + + ProgressState { + pb, + sub_bars: IndexMap::new(), + active_sub: None, + start_time: std::time::Instant::now(), + } + } + + /// Update the progress bar(s) based on a parsed git progress line. + fn update_pb(&self, progress: GitProgress, state: &mut ProgressState) { + // Target the active submodule if one exists, otherwise the main bar + let target_pb = if let Some(name) = &state.active_sub { + state.sub_bars.get(name).unwrap_or(&state.pb) + } else { + &state.pb + }; + + match progress { + // This case is only relevant for submodule operations i.e. `git submodule update` + // It indicates that a new submodule has been registered, and we create a new progress bar for it. + GitProgress::SubmoduleRegistered { name } => { + if self.git_op == GitProgressOps::Submodule { + // The main bar simply becomes a spinner since the sub-bar will show progress + // on the subsequent line. + state.pb.set_style( + ProgressStyle::with_template("{spinner:.cyan} {prefix:<40!}").unwrap(), + ); + + // The submodule style is similar to the main bar, but indented and without spinner + let style = ProgressStyle::with_template( + " {prefix:<32!} {bar:40.cyan/blue} {percent:>3}% {msg}", + ) + .unwrap() + .progress_chars("-- "); + + // We can have multiple sub-bars, and we insert them after the last one. + // In order to maintain proper tree-like structure, we need to update the previous last bar + // to have a "T" connector (├─) instead of an "L" + let prev_bar = match state.sub_bars.last() { + Some((last_name, last_pb)) => { + let prev_prefix = format!("{} {}", "├─".dimmed(), last_name); + last_pb.set_prefix(prev_prefix); + last_pb // Insert the new one after this one + } + None => &state.pb, // Insert the first one after the main bar + }; + + // Create the new sub-bar and insert it in the multi-progress *after* the previous sub-bar + let sub_pb = self + .multiprogress + .insert_after(prev_bar, ProgressBar::new(100).with_style(style)); + // Set the prefix and initial message + let sub_prefix = format!("{} {}", "╰─".dimmed(), &name); + sub_pb.set_prefix(sub_prefix); + sub_pb.set_message(format!("{}", "Waiting...".dimmed())); + + // Store the sub-bar in the state for later updates + state.sub_bars.insert(name, sub_pb); + } + } + // This indicates that we are starting to clone a submodule. + // Again, it is only relevant for submodule operations. For normal + // clones, we just update the main bar. + GitProgress::CloningInto { name } => { + if self.git_op == GitProgressOps::Submodule { + // Logic to handle missing 'checked out' lines: + // If we are activating 'bar', but 'foo' was active, assume 'foo' is done. + if let Some(prev) = &state.active_sub { + if prev != &name { + if let Some(b) = state.sub_bars.get(prev) { + b.finish_and_clear(); + } + } + } + // Set the new bar to active + if let Some(bar) = state.sub_bars.get(&name) { + // Switch style to the active progress bar style + bar.set_message(format!("{}", "Cloning...".dimmed())); + } + state.active_sub = Some(name); + } + } + // Indicates that we have finished processing a submodule. + GitProgress::SubmoduleEnd { name } => { + // We finish and clear the sub-bar + if let Some(bar) = state.sub_bars.get(&name) { + bar.finish_and_clear(); + } + // If this was the active submodule, we clear the active state + if state.active_sub.as_ref() == Some(&name) { + state.active_sub = None; + } + } + // Update the progress percentage for receiving objects + GitProgress::Receiving { percent, .. } => { + target_pb.set_message(format!("{}", "Receiving objects".dimmed())); + target_pb.set_position(percent as u64); + } + // Update the progress percentage for resolving deltas + GitProgress::Resolving { percent, .. } => { + target_pb.set_message(format!("{}", "Resolving deltas".dimmed())); + target_pb.set_position(percent as u64); + } + // Update the progress percentage for checking out files + GitProgress::Checkout { percent, .. } => { + target_pb.set_message(format!("{}", "Checking out".dimmed())); + target_pb.set_position(percent as u64); + } + // Handle errors by finishing and clearing the target bar, then logging the error + GitProgress::Error(err_msg) => { + target_pb.finish_and_clear(); + errorln!( + "{} {}: {}", + "Error during git operation of", + fmt_pkg!(&self.name), + err_msg + ); + } + _ => {} + } + } + + // Finalize the progress bars and print a completion message. + pub fn finish(self, state: &mut ProgressState) { + // Clear all sub bars that might be lingering + for pb in state.sub_bars.values() { + pb.finish_and_clear(); + } + state.pb.finish_and_clear(); + + // Print a final message indicating completion + let op_str = match self.git_op { + GitProgressOps::Clone => "Cloned", + GitProgressOps::Fetch => "Fetched", + GitProgressOps::Checkout => "Checked out", + GitProgressOps::Submodule => "Updated Submodules", + }; + + // Print a completion message on top of active progress bars + self.multiprogress + .println(format!( + " {} {} {}", + fmt_completed!(op_str), + fmt_pkg!(&self.name), + fmt_duration(state.start_time.elapsed()).dimmed() + )) + .unwrap(); + } +} + +/// Parse a git progress line and return the corresponding `GitProgress` enum. +pub fn parse_git_line(line: &str) -> GitProgress { + let line = line.trim(); + let re = RE_GIT.get_or_init(|| { + Regex::new(r"(?x) + ^ # Start + (?: + # 1. Registration: Capture the path, ignore the descriptive name + Submodule\ '[^']+'\ .*\ registered\ for\ path\ '(?P[^']+)' | + + # 2. Cloning: Capture the path + Cloning\ into\ '(?P[^']+)'\.\.\. | + + # 3. Completion: Capture the name + Submodule\ path\ '(?P[^']+)':\ checked\ out\ '.* | + + # 4. Progress + (?PReceiving\ objects|Resolving\ deltas|Checking\ out\ files):\s+(?P\d+)% | + + # 5. Errors + (?Pfatal:.*|error:.*|remote:\ aborting.*) + ) + ").expect("Invalid Regex") + }); + + if let Some(caps) = re.captures(line) { + if let Some(path) = caps.name("reg_path") { + return GitProgress::SubmoduleRegistered { + name: path_to_name(path.as_str()), + }; + } + if let Some(path) = caps.name("clone_path") { + return GitProgress::CloningInto { + name: path_to_name(path.as_str()), + }; + } + if let Some(path) = caps.name("sub_end_name") { + return GitProgress::SubmoduleEnd { + name: path_to_name(path.as_str()), + }; + } + if let Some(err) = caps.name("error") { + return GitProgress::Error(err.as_str().to_string()); + } + if let Some(phase) = caps.name("phase") { + let percent = caps.name("percent").unwrap().as_str().parse().unwrap_or(0); + return match phase.as_str() { + "Receiving objects" => GitProgress::Receiving { percent }, + "Resolving deltas" => GitProgress::Resolving { percent }, + "Checking out files" => GitProgress::Checkout { percent }, + _ => GitProgress::Other, + }; + } + } + // Otherwise, we don't care + GitProgress::Other +} + +/// Helper to extract the name from a git path. +fn path_to_name(path: &str) -> String { + path.trim_end_matches('/') + .split('/') + .next_back() + .unwrap_or(path) + .to_string() +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_parsing_receiving() { + // Copy your existing unit tests here + let p = parse_git_line("Receiving objects: 34% (123/456)"); + match p { + GitProgress::Receiving { percent, .. } => assert_eq!(percent, 34), + _ => panic!("Failed to parse receiving"), + } + } + #[test] + fn test_parsing_receiving_done() { + // Copy your existing unit tests here + let p = + parse_git_line("Receiving objects: 100% (1955/1955), 1.51 MiB | 45.53 MiB/s, done."); + match p { + GitProgress::Receiving { percent, .. } => assert_eq!(percent, 100), + _ => panic!("Failed to parse receiving"), + } + } + #[test] + fn test_parsing_resolving() { + // Copy your existing unit tests here + let p = parse_git_line("Resolving deltas: 56% (789/1400)"); + match p { + GitProgress::Resolving { percent, .. } => assert_eq!(percent, 56), + _ => panic!("Failed to parse receiving"), + } + } + #[test] + fn test_parsing_resolving_deltas_done() { + // Copy your existing unit tests here + let p = parse_git_line("Resolving deltas: 100% (1122/1122), done."); + match p { + GitProgress::Resolving { percent, .. } => assert_eq!(percent, 100), + _ => panic!("Failed to parse receiving"), + } + } + #[test] + fn test_parsing_cloning_into() { + let p = parse_git_line("Cloning into 'myrepo'..."); + match p { + GitProgress::CloningInto { name } => assert_eq!(name, "myrepo"), + _ => panic!("Failed to parse cloning into"), + } + } + #[test] + fn test_parsing_submodule_registered() { + let p = parse_git_line("Submodule 'libs/mylib' ... registered for path 'libs/mylib'"); + match p { + GitProgress::SubmoduleRegistered { name } => assert_eq!(name, "mylib"), + _ => panic!("Failed to parse submodule registered"), + } + } + #[test] + fn test_parsing_submodule_end() { + let p = parse_git_line("Submodule path 'libs/mylib': checked out 'abc1234'"); + match p { + GitProgress::SubmoduleEnd { name } => assert_eq!(name, "mylib"), + _ => panic!("Failed to parse submodule end"), + } + } + #[test] + fn test_parsing_error() { + let p = parse_git_line("fatal: unable to access 'https://example.com/repo.git/': Could not resolve host: example.com"); + match p { + GitProgress::Error(msg) => assert!(msg.contains("fatal: unable to access")), + _ => panic!("Failed to parse error"), + } + } +} diff --git a/src/resolver.rs b/src/resolver.rs index 0a89164e..3af95c15 100644 --- a/src/resolver.rs +++ b/src/resolver.rs @@ -24,6 +24,7 @@ use tabwriter::TabWriter; use tokio::runtime::Runtime; use crate::config::{self, Locked, LockedPackage, LockedSource, Manifest}; +use crate::diagnostic::Warnings; use crate::error::*; use crate::sess::{ DependencyConstraint, DependencyRef, DependencySource, DependencyVersion, DependencyVersions, @@ -104,16 +105,10 @@ impl<'ctx> DependencyResolver<'ctx> { // - the dependency is not in a clean state (i.e., was modified) if !ignore_checkout { if !is_git_repo { - if !self.sess.suppress_warnings.contains("W06") { - warnln!("[W06] Dependency `{}` in checkout_dir `{}` is not a git repository. Setting as path dependency.\n\ - \tPlease use `bender clone` to work on git dependencies.\n\ - \tRun `bender update --ignore-checkout-dir` to overwrite this at your own risk.", - dir.as_ref().unwrap().path().file_name().unwrap().to_str().unwrap(), - &checkout.display()); - } + Warnings::NotAGitDependency(depname.clone(), checkout.clone()).emit(); self.checked_out.insert( depname, - config::Dependency::Path(dir.unwrap().path(), Vec::new()), + config::Dependency::Path(dir.unwrap().path(), vec![]), ); } else if !(SysCommand::new(&self.sess.config.git) // If not in a clean state .arg("status") @@ -123,16 +118,10 @@ impl<'ctx> DependencyResolver<'ctx> { .stdout .is_empty()) { - if !self.sess.suppress_warnings.contains("W06") { - warnln!("[W06] Dependency `{}` in checkout_dir `{}` is not in a clean state. Setting as path dependency.\n\ - \tPlease use `bender clone` to work on git dependencies.\n\ - \tRun `bender update --ignore-checkout-dir` to overwrite this at your own risk.", - dir.as_ref().unwrap().path().file_name().unwrap().to_str().unwrap(), - &checkout.display()); - } + Warnings::DirtyGitDependency(depname.clone(), checkout.clone()).emit(); self.checked_out.insert( depname, - config::Dependency::Path(dir.unwrap().path(), Vec::new()), + config::Dependency::Path(dir.unwrap().path(), vec![]), ); } } @@ -385,12 +374,10 @@ impl<'ctx> DependencyResolver<'ctx> { match &locked_package.revision { Some(r) => r.clone(), None => { - if !io.sess.suppress_warnings.contains("W21") { - warnln!( - "[W21] No revision found in lock file for git dependency `{}`", - name - ); + Warnings::NoRevisionInLockFile { + pkg: name.to_string(), } + .emit(); return None; } }, @@ -473,13 +460,11 @@ impl<'ctx> DependencyResolver<'ctx> { match gv.revs.iter().position(|rev| *rev == hash.unwrap()) { Some(index) => index, None => { - if !self.sess.suppress_warnings.contains("W23") { - warnln!( - "[W23] Locked revision `{:?}` for dependency `{}` not found in available revisions, allowing update.", - hash.unwrap(), - dep - ); + Warnings::LockedRevisionNotFound { + rev: hash.unwrap().to_string(), + pkg: dep.to_string(), } + .emit(); self.locked.get_mut(dep.as_str()).unwrap().3 = false; continue; } @@ -653,14 +638,7 @@ impl<'ctx> DependencyResolver<'ctx> { if id == con_src { return Err(e); } - if !self.sess.suppress_warnings.contains("W20") { - warnln!( - "[W20] Ignoring error for `{}` at `{}`: {}", - name, - self.sess.dependency_source(*con_src), - e - ); - } + Warnings::IgnoringError(name.to_string(), self.sess.dependency_source(*con_src).to_string(), e.to_string()).emit(); Ok((*id, IndexSet::new())) } } diff --git a/src/sess.rs b/src/sess.rs index 64f6cf73..73302cf3 100644 --- a/src/sess.rs +++ b/src/sess.rs @@ -25,14 +25,17 @@ use async_recursion::async_recursion; use futures::future::join_all; use futures::TryFutureExt; use indexmap::{IndexMap, IndexSet}; +use indicatif::MultiProgress; use semver::Version; use tokio::sync::Semaphore; use typed_arena::Arena; use crate::cli::read_manifest; use crate::config::{self, Config, Manifest, PartialManifest}; +use crate::diagnostic::{Diagnostics, Warnings}; use crate::error::*; use crate::git::Git; +use crate::progress::{GitProgressOps, ProgressHandler}; use crate::src::SourceGroup; use crate::target::TargetSet; use crate::util::try_modification_time; @@ -77,8 +80,8 @@ pub struct Session<'ctx> { pub git_throttle: Arc, /// A toggle to disable remote fetches & clones pub local_only: bool, - /// A list of warnings to suppress. - pub suppress_warnings: IndexSet, + /// The global progress bar manager. + pub multiprogress: MultiProgress, } impl<'ctx> Session<'ctx> { @@ -92,8 +95,13 @@ impl<'ctx> Session<'ctx> { local_only: bool, force_fetch: bool, git_throttle: usize, - suppress_warnings: IndexSet, ) -> Session<'ctx> { + // Create the global multi-progress bar manager. + let multiprogress = MultiProgress::new(); + + // Register it with the global diagnostics system + Diagnostics::set_multiprogress(Some(multiprogress.clone())); + Session { root, manifest, @@ -118,7 +126,7 @@ impl<'ctx> Session<'ctx> { cache: Default::default(), git_throttle: Arc::new(Semaphore::new(git_throttle)), local_only, - suppress_warnings, + multiprogress, } } @@ -140,15 +148,15 @@ impl<'ctx> Session<'ctx> { calling_package ); let src = DependencySource::from(cfg); - self.deps.lock().unwrap().add( - self.intern_dependency_entry(DependencyEntry { + self.deps + .lock() + .unwrap() + .add(self.intern_dependency_entry(DependencyEntry { name: name.into(), source: src, revision: None, version: None, - }), - &self.suppress_warnings, - ) + })) } /// Load a lock file. @@ -175,7 +183,6 @@ impl<'ctx> Session<'ctx> { .as_ref() .map(|s| semver::Version::parse(s).unwrap()), }), - &self.suppress_warnings, ); graph_names.insert(id, &pkg.dependencies); names.insert(name.clone(), id); @@ -538,15 +545,9 @@ impl<'io, 'sess: 'io, 'ctx: 'sess> SessionIo<'sess, 'ctx> { )) } }; - let git = Git::new( - db_dir, - &self.sess.config.git, - self.sess.git_throttle.clone(), - ); - let name2 = String::from(name); + let git = Git::new(db_dir, &self.sess.config.git); let url = String::from(url); let url2 = url.clone(); - let url3 = url.clone(); // Either initialize the repository or update it if needed. if !db_dir.join("config").exists() { @@ -558,33 +559,45 @@ impl<'io, 'sess: 'io, 'ctx: 'sess> SessionIo<'sess, 'ctx> { } // Initialize. self.sess.stats.num_database_init.increment(); - // TODO MICHAERO: May need throttle - stageln!("Cloning", "{} ({})", name2, url2); + // The progress bar object for cloning. We only use it for the + // last fetch operation, which is the only network operation here. + let pb = Some(ProgressHandler::new( + self.sess.multiprogress.clone(), + GitProgressOps::Clone, + name, + )); git.clone() - .spawn_with(|c| c.arg("init").arg("--bare")) + .spawn_with(|c| c.arg("init").arg("--bare"), None, None) .await?; git.clone() - .spawn_with(|c| c.arg("remote").arg("add").arg("origin").arg(url)) + .spawn_with( + |c| c.arg("remote").arg("add").arg("origin").arg(url), + None, + None, + ) .await?; git.clone() - .fetch("origin") + .fetch("origin", Some(self.sess.git_throttle.clone()), pb) .and_then(|_| async { if let Some(reference) = fetch_ref { - git.clone().fetch_ref("origin", reference).await + git.clone() + .fetch_ref( + "origin", + reference, + Some(self.sess.git_throttle.clone()), + None, + ) + .await } else { Ok(()) } }) .await .map_err(move |cause| { - if url3.contains("git@") && !self.sess.suppress_warnings.contains("W07") { - warnln!("[W07] Please ensure your public ssh key is added to the git server."); - } - if !self.sess.suppress_warnings.contains("W07") { - warnln!( - "[W07] Please ensure the url is correct and you have access to the repository." - ); + if url2.contains("git@") { + Warnings::SshKeyMaybeMissing.emit(); } + Warnings::UrlMaybeIncorrect.emit(); Error::chain( format!("Failed to initialize git database in {:?}.", db_dir), cause, @@ -599,27 +612,34 @@ impl<'io, 'sess: 'io, 'ctx: 'sess> SessionIo<'sess, 'ctx> { return Ok(git); } self.sess.stats.num_database_fetch.increment(); - // TODO MICHAERO: May need throttle - stageln!("Fetching", "{} ({})", name2, url2); + // The progress bar object for fetching. + let pb = Some(ProgressHandler::new( + self.sess.multiprogress.clone(), + GitProgressOps::Fetch, + name, + )); git.clone() - .fetch("origin") + .fetch("origin", Some(self.sess.git_throttle.clone()), pb) .and_then(|_| async { if let Some(reference) = fetch_ref { - git.clone().fetch_ref("origin", reference).await + git.clone() + .fetch_ref( + "origin", + reference, + Some(self.sess.git_throttle.clone()), + None, + ) + .await } else { Ok(()) } }) .await .map_err(move |cause| { - if url3.contains("git@") && !self.sess.suppress_warnings.contains("W07") { - warnln!("[W07] Please ensure your public ssh key is added to the git server."); - } - if !self.sess.suppress_warnings.contains("W07") { - warnln!( - "[W07] Please ensure the url is correct and you have access to the repository." - ); + if url2.contains("git@") { + Warnings::SshKeyMaybeMissing.emit(); } + Warnings::UrlMaybeIncorrect.emit(); Error::chain( format!("Failed to update git database in {:?}.", db_dir), cause, @@ -856,7 +876,7 @@ impl<'io, 'sess: 'io, 'ctx: 'sess> SessionIo<'sess, 'ctx> { ToCheckout, ToClone, } - let local_git = Git::new(path, &self.sess.config.git, self.sess.git_throttle.clone()); + let local_git = Git::new(path, &self.sess.config.git); let clear = if path.exists() { // Scrap checkouts with the wrong tag. let current_checkout = local_git.clone().current_checkout().await; @@ -897,31 +917,17 @@ impl<'io, 'sess: 'io, 'ctx: 'sess> SessionIo<'sess, 'ctx> { if checkout_already_good == CheckoutState::ToCheckout { if local_git .clone() - .spawn_with(|c| c.arg("status").arg("--porcelain")) + .spawn_with(|c| c.arg("status").arg("--porcelain"), None, None) .await .is_ok() { CheckoutState::ToCheckout } else { - if !self.sess.suppress_warnings.contains("W19") { - warnln!( - "[W19] Workspace checkout directory set and has uncommitted changes, not updating {} at {}.\n\ - \tRun `bender checkout --force` to overwrite the dependency at your own risk.", - name, - path.display() - ); - } + Warnings::CheckoutDirDirty(name.to_string(), path.to_path_buf()).emit(); CheckoutState::Clean } } else { - if !self.sess.suppress_warnings.contains("W19") { - warnln!( - "[W19] Workspace checkout directory set and remote url doesn't match, not updating {} at {}.\n\ - \tRun `bender checkout --force` to overwrite the dependency at your own risk.", - name, - path.display() - ); - } + Warnings::CheckoutDirUrlMismatch(name.to_string(), path.to_path_buf()).emit(); CheckoutState::Clean } } else { @@ -945,8 +951,6 @@ impl<'io, 'sess: 'io, 'ctx: 'sess> SessionIo<'sess, 'ctx> { // Perform the checkout if necessary. if clear != CheckoutState::Clean { - stageln!("Checkout", "{} ({})", name, url); - // First generate a tag to be cloned in the database. This is // necessary since `git clone` does not accept commits, but only // branches or tags for shallow clones. @@ -956,13 +960,17 @@ impl<'io, 'sess: 'io, 'ctx: 'sess> SessionIo<'sess, 'ctx> { let git = self.git_database(name, url, false, Some(revision)).await?; match git .clone() - .spawn_with(move |c| { - c.arg("tag") - .arg(tag_name_0) - .arg(revision) - .arg("--force") - .arg("--no-sign") - }) + .spawn_with( + move |c| { + c.arg("tag") + .arg(tag_name_0) + .arg(revision) + .arg("--force") + .arg("--no-sign") + }, + None, + None, + ) .await { Ok(r) => Ok(r), @@ -971,53 +979,120 @@ impl<'io, 'sess: 'io, 'ctx: 'sess> SessionIo<'sess, 'ctx> { "checkout_git: failed to tag commit {:?}, attempting fetch.", cause ); + let pb = Some(ProgressHandler::new( + self.sess.multiprogress.clone(), + GitProgressOps::Checkout, + name, + )); // Attempt to fetch from remote and retry, as commits seem unavailable. git.clone() - .spawn_with(move |c| c.arg("fetch").arg("--all")) + .spawn_with( + move |c| c.arg("fetch").arg("--all").arg("--progress"), + Some(self.sess.git_throttle.clone()), + pb, + ) .await?; - git.clone().spawn_with(move |c| c.arg("tag").arg(tag_name_1).arg(revision).arg("--force").arg("--no-sign")).map_err(|cause| { - if !self.sess.suppress_warnings.contains("W08") { - warnln!("[W08] Please ensure the commits are available on the remote or run bender update"); - } - Error::chain( - format!( - "Failed to checkout commit {} for {} given in Bender.lock.\n", - revision, name - ), - cause, + git.clone() + .spawn_with( + move |c| { + c.arg("tag") + .arg(tag_name_1) + .arg(revision) + .arg("--force") + .arg("--no-sign") + }, + None, + None, ) - }).await + .map_err(|cause| { + Warnings::RevisionNotFound(revision.to_string(), name.to_string()) + .emit(); + Error::chain( + format!( + "Failed to checkout commit {} for {} given in Bender.lock.\n", + revision, name + ), + cause, + ) + }) + .await } }?; if clear == CheckoutState::ToClone { + let pb = Some(ProgressHandler::new( + self.sess.multiprogress.clone(), + GitProgressOps::Checkout, + name, + )); git.clone() - .spawn_with(move |c| { - c.arg("clone") - .arg(git.path) - .arg(path) - .arg("--branch") - .arg(tag_name_2) - }) + .spawn_with( + move |c| { + c.arg("clone") + .arg(git.path) + .arg(path) + .arg("--branch") + .arg(tag_name_2) + .arg("--progress") + }, + None, + pb, + ) .await?; } else if clear == CheckoutState::ToCheckout { local_git .clone() - .spawn_with(move |c| c.arg("fetch").arg("--all").arg("--tags").arg("--prune")) + .spawn_with( + move |c| { + c.arg("fetch") + .arg("--all") + .arg("--tags") + .arg("--prune") + .arg("--progress") + }, + None, + None, + ) .await?; + let pb = Some(ProgressHandler::new( + self.sess.multiprogress.clone(), + GitProgressOps::Checkout, + name, + )); local_git .clone() - .spawn_with(move |c| c.arg("checkout").arg(tag_name_2).arg("--force")) + .spawn_with( + move |c| { + c.arg("checkout") + .arg(tag_name_2) + .arg("--force") + .arg("--progress") + }, + Some(self.sess.git_throttle.clone()), + pb, + ) + .await?; + } + if path.join(".gitmodules").exists() { + let pb = Some(ProgressHandler::new( + self.sess.multiprogress.clone(), + GitProgressOps::Submodule, + name, + )); + local_git + .clone() + .spawn_with( + move |c| { + c.arg("submodule") + .arg("update") + .arg("--init") + .arg("--recursive") + .arg("--progress") + }, + Some(self.sess.git_throttle.clone()), + pb, + ) .await?; } - local_git - .clone() - .spawn_with(move |c| { - c.arg("submodule") - .arg("update") - .arg("--init") - .arg("--recursive") - }) - .await?; } Ok(path) } @@ -1036,10 +1111,11 @@ impl<'io, 'sess: 'io, 'ctx: 'sess> SessionIo<'sess, 'ctx> { for dep in (dep_iter_mut).iter_mut() { if let (_, config::Dependency::Path(ref path, _)) = dep { if !path.starts_with("/") { - if !self.sess.suppress_warnings.contains("W09") { - warnln!("[W09] Path dependencies ({:?}) in git dependencies ({:?}) currently not fully supported. \ - Your mileage may vary.", dep.0, top_package_name); + Warnings::PathDepInGitDep { + pkg: dep.0.clone(), + top_pkg: top_package_name.clone(), } + .emit(); let sub_entries = db .clone() @@ -1092,9 +1168,8 @@ impl<'io, 'sess: 'io, 'ctx: 'sess> SessionIo<'sess, 'ctx> { cause, ) })?; - let mut full = partial - .validate_ignore_sources("", true, &self.sess.suppress_warnings) - .map_err(|cause| { + let mut full = + partial.validate_ignore_sources("", true).map_err(|cause| { Error::chain( format!( "Error in manifest of dependency `{}` at revision \ @@ -1155,18 +1230,19 @@ impl<'io, 'sess: 'io, 'ctx: 'sess> SessionIo<'sess, 'ctx> { use self::DependencyVersion as DepVer; match (&dep.source, version) { (DepSrc::Path(path), DepVer::Path) => { - if !path.starts_with("/") && !self.sess.suppress_warnings.contains("W10") { - warnln!("[W10] There may be issues in the path for {:?}.", dep.name); + if !path.is_absolute() { + Warnings::MaybePathIssues(dep.name.clone(), path.to_path_buf()).emit(); } let manifest_path = path.join("Bender.yml"); if manifest_path.exists() { - match read_manifest(&manifest_path, &self.sess.suppress_warnings) { + match read_manifest(&manifest_path) { Ok(m) => { - if dep.name != m.package.name - && !self.sess.suppress_warnings.contains("W11") - { - warnln!("[W11] Dependency name and package name do not match for {:?} / {:?}, this can cause unwanted behavior", - dep.name, m.package.name); // TODO: This should be an error + if dep.name != m.package.name { + Warnings::DepPkgNameNotMatching( + dep.name.clone(), + m.package.name.clone(), + ) + .emit(); } Ok(Some(self.sess.intern_manifest(m))) } @@ -1195,30 +1271,29 @@ impl<'io, 'sess: 'io, 'ctx: 'sess> SessionIo<'sess, 'ctx> { Error::chain(format!("Syntax error in manifest {:?}.", path), cause) })?; - match partial.validate_ignore_sources("", true, &self.sess.suppress_warnings) { + match partial.validate_ignore_sources("", true) { Ok(m) => { - if dep.name != m.package.name - && !self.sess.suppress_warnings.contains("W11") - { - warnln!("[W11] Dependency name and package name do not match for {:?} / {:?}, this can cause unwanted behavior", - dep.name, m.package.name); // TODO: This should be an error + if dep.name != m.package.name { + Warnings::DepPkgNameNotMatching( + dep.name.clone(), + m.package.name.clone(), + ) + .emit(); } Ok(Some(self.sess.intern_manifest(m))) } Err(e) => Err(e), } } else { - if !(self.sess.suppress_warnings.contains("E32") - && self.sess.suppress_warnings.contains("W32")) - { + if !(Diagnostics::is_suppressed("E32")) { if let DepSrc::Path(ref path) = dep.source { if !path.exists() { - if self.sess.suppress_warnings.contains("E32") { - warnln!( - "[W32] Path {:?} for dependency {:?} does not exist.", - path, - dep.name - ); + if Diagnostics::is_suppressed("E32") { + Warnings::DepPathMissing { + pkg: dep.name.clone(), + path: path.to_path_buf(), + } + .emit(); } else { return Err(Error::new(format!( "[E32] Path {:?} for dependency {:?} does not exist.", @@ -1228,13 +1303,11 @@ impl<'io, 'sess: 'io, 'ctx: 'sess> SessionIo<'sess, 'ctx> { } } } - if !self.sess.suppress_warnings.contains("W12") { - warnln!( - "[W12] Manifest not found for {:?} at {:?}", - dep.name, - dep.source - ); + Warnings::ManifestNotFound { + pkg: dep.name.clone(), + src: manifest_path.display().to_string(), } + .emit(); Ok(None) } } @@ -1263,9 +1336,8 @@ impl<'io, 'sess: 'io, 'ctx: 'sess> SessionIo<'sess, 'ctx> { cause, ) })?; - let mut full = partial - .validate_ignore_sources("", true, &self.sess.suppress_warnings) - .map_err(|cause| { + let mut full = + partial.validate_ignore_sources("", true).map_err(|cause| { Error::chain( format!( "Error in manifest of dependency `{}` at revision \ @@ -1290,9 +1362,11 @@ impl<'io, 'sess: 'io, 'ctx: 'sess> SessionIo<'sess, 'ctx> { Ok(Some(self.sess.intern_manifest(full))) } None => { - if !self.sess.suppress_warnings.contains("W12") { - warnln!("[W12] Manifest not found for {:?}", dep.name); + Warnings::ManifestNotFound { + pkg: dep.name.clone(), + src: url.to_string(), } + .emit(); Ok(None) } }; @@ -1303,18 +1377,12 @@ impl<'io, 'sess: 'io, 'ctx: 'sess> SessionIo<'sess, 'ctx> { .lock() .unwrap() .insert(cache_key, manifest); - if dep.name - != match manifest { - Some(x) => &x.package.name, - None => "dead", - } - && !self.sess.suppress_warnings.contains("W11") - { - warnln!("[W11] Dependency name and package name do not match for {:?} / {:?}, this can cause unwanted behavior", - dep.name, match manifest { - Some(x) => &x.package.name, - None => "dead" - }); // TODO (micprog): This should be an error + let pkg_name = match manifest { + Some(x) => x.package.name.clone(), + None => "dead".to_string(), + }; + if dep.name != pkg_name { + Warnings::DepPkgNameNotMatching(dep.name.clone(), pkg_name.clone()).emit(); } Ok(manifest) } @@ -1354,7 +1422,7 @@ impl<'io, 'sess: 'io, 'ctx: 'sess> SessionIo<'sess, 'ctx> { .and_then(move |path| { let manifest_path = path.join("Bender.yml"); if manifest_path.exists() { - match read_manifest(&manifest_path, &self.sess.suppress_warnings) { + match read_manifest(&manifest_path) { Ok(m) => Ok(Some(self.sess.intern_manifest(m))), Err(e) => Err(e), } @@ -1530,21 +1598,16 @@ impl<'io, 'sess: 'io, 'ctx: 'sess> SessionIo<'sess, 'ctx> { IndexMap::new(); export_include_dirs.insert( m.package.name.clone(), - m.export_include_dirs - .iter() - .map(PathBuf::as_path) - .collect(), + m.export_include_dirs.iter().map(PathBuf::as_path).collect(), ); if !m.dependencies.is_empty() { for i in m.dependencies.keys() { if !all_export_include_dirs.contains_key(i) { - if !self.sess.suppress_warnings.contains("W13") { - warnln!("[W13] Name issue with {:?}, `export_include_dirs` not handled\n\tCould relate to name mismatch, see `bender update`", i); - } - export_include_dirs.insert(i.clone(), IndexSet::new()); + Warnings::ExportDirNameIssue(i.clone()).emit(); + export_include_dirs.insert(i.to_string(), IndexSet::new()); } else { export_include_dirs.insert( - i.clone(), + i.to_string(), all_export_include_dirs[i].clone(), ); } @@ -1829,22 +1892,14 @@ impl<'ctx> DependencyTable<'ctx> { /// /// The reference with which the information can later be retrieved is /// returned. - pub fn add( - &mut self, - entry: &'ctx DependencyEntry, - suppress_warnings: &IndexSet, - ) -> DependencyRef { + pub fn add(&mut self, entry: &'ctx DependencyEntry) -> DependencyRef { if let Some(&id) = self.ids.get(&entry) { debugln!("sess: reusing {:?}", id); id } else { if let DependencySource::Path(path) = &entry.source { - if !path.exists() && !suppress_warnings.contains("W22") { - warnln!( - "[W22] Dependency `{}` has source path `{}` which does not exist", - entry.name, - path.display() - ); + if !path.exists() { + Warnings::DepSourcePathMissing(entry.name.clone(), path.clone()).emit(); } } let id = DependencyRef(self.list.len()); diff --git a/src/src.rs b/src/src.rs index 5c2dad83..7bf4d3ee 100644 --- a/src/src.rs +++ b/src/src.rs @@ -15,6 +15,7 @@ use indexmap::{IndexMap, IndexSet}; use serde::ser::{Serialize, Serializer}; use crate::config::Validate; +use crate::diagnostic::{Diagnostics, Warnings}; use crate::error::Error; use crate::sess::Session; use crate::target::{TargetSet, TargetSpec}; @@ -52,20 +53,19 @@ impl<'ctx> Validate for SourceGroup<'ctx> { self, package_name: &str, pre_output: bool, - suppress_warnings: &IndexSet, ) -> crate::error::Result> { Ok(SourceGroup { files: self .files .into_iter() - .map(|f| f.validate(package_name, pre_output, suppress_warnings)) + .map(|f| f.validate(package_name, pre_output)) .collect::, Error>>()?, include_dirs: self .include_dirs .into_iter() .map(|p| { - if !(suppress_warnings.contains("W24") || p.exists() && p.is_dir()) { - warnln!("[W24] Include directory {} doesn't exist.", p.display()); + if !p.exists() || !p.is_dir() { + Warnings::IncludeDirMissing(p.to_path_buf()).emit(); } Ok(p) }) @@ -469,23 +469,18 @@ impl<'ctx> From<&'ctx Path> for SourceFile<'ctx> { impl<'ctx> Validate for SourceFile<'ctx> { type Output = SourceFile<'ctx>; type Error = Error; - fn validate( - self, - package_name: &str, - pre_output: bool, - suppress_warnings: &IndexSet, - ) -> Result, Error> { + fn validate(self, package_name: &str, pre_output: bool) -> Result, Error> { match self { SourceFile::File(path, ty) => { let env_path_buf = crate::config::env_path_from_string(path.to_string_lossy().to_string())?; let exists = env_path_buf.exists() && env_path_buf.is_file(); - if exists || suppress_warnings.contains("E31") { - if !(exists || suppress_warnings.contains("W31")) { - warnln!( - "[W31] File {} doesn't exist.", - env_path_buf.to_string_lossy() - ); + if exists || Diagnostics::is_suppressed("E31") { + if !exists { + Warnings::FileMissing { + path: env_path_buf.clone(), + } + .emit(); } Ok(SourceFile::File(path, ty)) } else { @@ -495,11 +490,9 @@ impl<'ctx> Validate for SourceFile<'ctx> { ))) } } - SourceFile::Group(srcs) => Ok(SourceFile::Group(Box::new(srcs.validate( - package_name, - pre_output, - suppress_warnings, - )?))), + SourceFile::Group(srcs) => Ok(SourceFile::Group(Box::new( + srcs.validate(package_name, pre_output)?, + ))), } } } diff --git a/src/util.rs b/src/util.rs index f332c638..70389037 100644 --- a/src/util.rs +++ b/src/util.rs @@ -18,6 +18,9 @@ use semver::{Version, VersionReq}; use serde::de::{Deserialize, Deserializer}; use serde::ser::{Serialize, Serializer}; +/// Re-export owo_colors for use in macros. +pub use owo_colors::OwoColorize; + use crate::error::*; /// A type that cannot be materialized. @@ -427,3 +430,60 @@ pub fn version_req_bottom_bound(req: &VersionReq) -> Result> { Ok(None) } } + +/// Format time duration with proper units. +pub fn fmt_duration(duration: std::time::Duration) -> String { + match duration.as_millis() { + t if t < 1000 => format!("in {}ms", t), + t if t < 60_000 => format!("in {:.1}s", t as f64 / 1000.0), + t => format!("in {:.1}min", t as f64 / 60000.0), + } +} + +/// Format for `package` names in diagnostic messages. +#[macro_export] +macro_rules! fmt_pkg { + ($pkg:expr) => { + $crate::util::OwoColorize::bold(&$pkg) + }; +} + +/// Format for `path` and `url` fields in diagnostic messages. +#[macro_export] +macro_rules! fmt_path { + ($pkg:expr) => { + $crate::util::OwoColorize::underline(&$pkg) + }; +} + +/// Format for `field` names in diagnostic messages. +#[macro_export] +macro_rules! fmt_field { + ($field:expr) => { + $crate::util::OwoColorize::italic(&$field) + }; +} + +/// Format for `version` and `revision` fields in diagnostic messages. +#[macro_export] +macro_rules! fmt_version { + ($ver:expr) => { + $crate::util::OwoColorize::bold(&$ver) + }; +} + +/// Format for an ongoing progress stage in diagnostic messages. +#[macro_export] +macro_rules! fmt_stage { + ($stage:expr) => { + $crate::util::OwoColorize::cyan(&$stage).bold() + }; +} + +/// Format a completed progress stage in diagnostic messages. +#[macro_export] +macro_rules! fmt_completed { + ($stage:expr) => { + $crate::util::OwoColorize::green(&$stage).bold() + }; +}