diff --git a/Cargo.lock b/Cargo.lock index 8ef6323..60f0b22 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4,19 +4,13 @@ version = 3 [[package]] name = "addr2line" -version = "0.22.0" +version = "0.24.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e4503c46a5c0c7844e948c9a4d6acd9f50cccb4de1c48eb9e291ea17470c678" +checksum = "f5fb1d8e4442bd405fdfd1dacb42792696b0cf9cb15882e5d097b742a676d375" dependencies = [ "gimli", ] -[[package]] -name = "adler" -version = "1.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" - [[package]] name = "adler2" version = "2.0.0" @@ -52,9 +46,9 @@ dependencies = [ [[package]] name = "anstyle" -version = "1.0.7" +version = "1.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "038dfcf04a5feb68e9c60b21c9625a54c2c0616e79b72b0fd87075a056ae1d1b" +checksum = "1bec1de6f59aedf83baf9ff929c98f2ad654b97c9510f4e70cf6f661d49fd5b1" [[package]] name = "anyhow" @@ -81,18 +75,18 @@ checksum = "16e62a023e7c117e27523144c5d2459f4397fcc3cab0085af8e2224f643a0193" dependencies = [ "proc-macro2", "quote", - "syn 2.0.66", + "syn 2.0.77", ] [[package]] name = "async-trait" -version = "0.1.80" +version = "0.1.82" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c6fa2087f2753a7da8cc1c0dbfcf89579dd57458e36769de5ac750b4671737ca" +checksum = "a27b8a3a6e1a44fa4c8baf1f653e4172e81486d4941f2237e20dc2d0cf4ddff1" dependencies = [ "proc-macro2", "quote", - "syn 2.0.66", + "syn 2.0.77", ] [[package]] @@ -164,17 +158,17 @@ dependencies = [ [[package]] name = "backtrace" -version = "0.3.73" +version = "0.3.74" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5cc23269a4f8976d0a4d2e7109211a419fe30e8d88d677cd60b6bc79c5732e0a" +checksum = "8d82cb332cdfaed17ae235a638438ac4d4839913cc2af585c3c6746e8f8bee1a" dependencies = [ "addr2line", - "cc", "cfg-if", "libc", - "miniz_oxide 0.7.3", + "miniz_oxide", "object", "rustc-demangle", + "windows-targets 0.52.6", ] [[package]] @@ -215,9 +209,9 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" [[package]] name = "bitflags" -version = "2.5.0" +version = "2.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf4b9d6a944f767f8e5e0db018570623c85f3d925ac718db4e06d0187adb21c1" +checksum = "b048fb63fd8b5923fc5aa7b340d8e156aec7ec02f0c78fa8a6ddc2613f6f71de" dependencies = [ "bytemuck", "serde", @@ -273,7 +267,7 @@ dependencies = [ "proc-macro-crate", "proc-macro2", "quote", - "syn 2.0.66", + "syn 2.0.77", "syn_derive", ] @@ -285,22 +279,22 @@ checksum = "79296716171880943b8470b5f8d03aa55eb2e645a4874bdbb28adb49162e012c" [[package]] name = "bytemuck" -version = "1.16.0" +version = "1.18.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78834c15cb5d5efe3452d58b1e8ba890dd62d21907f867f383358198e56ebca5" +checksum = "94bbb0ad554ad961ddc5da507a12a29b14e4ae5bda06b19f575a3e6079d2e2ae" dependencies = [ "bytemuck_derive", ] [[package]] name = "bytemuck_derive" -version = "1.7.0" +version = "1.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ee891b04274a59bd38b412188e24b849617b2e45a0fd8d057deb63e7403761b" +checksum = "0cc8b54b395f2fcfbb3d90c47b01c7f444d94d05bdeb775811dec868ac3bbc26" dependencies = [ "proc-macro2", "quote", - "syn 2.0.66", + "syn 2.0.77", ] [[package]] @@ -311,18 +305,21 @@ checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" [[package]] name = "bytes" -version = "1.6.0" +version = "1.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "514de17de45fdb8dc022b1a7975556c53c86f9f0aa5f534b98977b171857c2c9" +checksum = "428d9aa8fbc0670b7b8d6030a7fadd0f86151cae55e4dbbece15f3780a3dfaf3" dependencies = [ "serde", ] [[package]] name = "cc" -version = "1.0.99" +version = "1.1.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "96c51067fd44124faa7f870b4b1c969379ad32b2ba805aa959430ceaa384f695" +checksum = "07b1695e2c7e8fc85310cde85aeaab7e3097f593c91d209d3f9df76c928100f0" +dependencies = [ + "shlex", +] [[package]] name = "cfg-if" @@ -345,14 +342,14 @@ dependencies = [ "android-tzdata", "iana-time-zone", "num-traits", - "windows-targets 0.52.5", + "windows-targets 0.52.6", ] [[package]] name = "clap" -version = "4.5.7" +version = "4.5.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5db83dced34638ad474f39f250d7fea9598bdd239eaced1bdf45d597da0f433f" +checksum = "3e5a21b8495e732f1b3c364c9949b201ca7bae518c502c80256c96ad79eaf6ac" dependencies = [ "clap_builder", "clap_derive", @@ -360,9 +357,9 @@ dependencies = [ [[package]] name = "clap_builder" -version = "4.5.7" +version = "4.5.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f7e204572485eb3fbf28f871612191521df159bc3e15a9f5064c66dba3a8c05f" +checksum = "8cf2dd12af7a047ad9d6da2b6b249759a22a7abc0f474c1dae1777afa4b21a73" dependencies = [ "anstyle", "clap_lex", @@ -370,21 +367,21 @@ dependencies = [ [[package]] name = "clap_derive" -version = "4.5.5" +version = "4.5.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c780290ccf4fb26629baa7a1081e68ced113f1d3ec302fa5948f1c381ebf06c6" +checksum = "501d359d5f3dcaf6ecdeee48833ae73ec6e42723a1e52419c79abf9507eec0a0" dependencies = [ "heck", "proc-macro2", "quote", - "syn 2.0.66", + "syn 2.0.77", ] [[package]] name = "clap_lex" -version = "0.7.1" +version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b82cf0babdbd58558212896d1a4272303a57bdb245c2bf1147185fb45640e70" +checksum = "1462739cb27611015575c0c11df5df7601141071f07518d56fcc1be504cbec97" [[package]] name = "const_format" @@ -418,15 +415,15 @@ dependencies = [ [[package]] name = "core-foundation-sys" -version = "0.8.6" +version = "0.8.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "06ea2b9bc92be3c2baa9334a323ebca2d6f074ff852cd1d7b11064035cd3868f" +checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b" [[package]] name = "cpufeatures" -version = "0.2.12" +version = "0.2.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "53fe5e26ff1b7aef8bca9c6080520cfb8d9333c7568e1829cef191a9723e5504" +checksum = "608697df725056feaccfa42cffdaeeec3fccc4ffc38358ecd19b243e716a78e0" dependencies = [ "libc", ] @@ -524,7 +521,6 @@ dependencies = [ "cuprate-p2p-core", "cuprate-pruning", "cuprate-test-utils", - "cuprate-wire", "futures", "indexmap", "rand", @@ -549,7 +545,7 @@ dependencies = [ name = "cuprate-blockchain" version = "0.0.0" dependencies = [ - "bitflags 2.5.0", + "bitflags 2.6.0", "bytemuck", "cuprate-database", "cuprate-database-service", @@ -563,6 +559,7 @@ dependencies = [ "monero-serai", "pretty_assertions", "proptest", + "rand", "rayon", "serde", "tempfile", @@ -721,6 +718,7 @@ version = "0.1.0" dependencies = [ "chrono", "crossbeam", + "curve25519-dalek", "dirs", "futures", "libc", @@ -744,8 +742,9 @@ dependencies = [ name = "cuprate-levin" version = "0.1.0" dependencies = [ - "bitflags 2.5.0", + "bitflags 2.6.0", "bytes", + "cfg-if", "cuprate-helper", "futures", "proptest", @@ -862,7 +861,6 @@ version = "0.1.0" dependencies = [ "async-trait", "borsh", - "bytes", "cuprate-helper", "cuprate-p2p-core", "cuprate-types", @@ -886,7 +884,7 @@ dependencies = [ name = "cuprate-txpool" version = "0.0.0" dependencies = [ - "bitflags 2.5.0", + "bitflags 2.6.0", "bytemuck", "cuprate-database", "cuprate-database-service", @@ -923,7 +921,7 @@ dependencies = [ name = "cuprate-wire" version = "0.1.0" dependencies = [ - "bitflags 2.5.0", + "bitflags 2.6.0", "bytes", "cuprate-epee-encoding", "cuprate-fixed-bytes", @@ -940,7 +938,7 @@ version = "0.1.0" dependencies = [ "anyhow", "async-trait", - "bitflags 2.5.0", + "bitflags 2.6.0", "borsh", "bytemuck", "bytes", @@ -1027,7 +1025,7 @@ checksum = "f46882e17999c6cc590af592290432be3bce0428cb0d5f8b6715e4dc7b383eb3" dependencies = [ "proc-macro2", "quote", - "syn 2.0.66", + "syn 2.0.77", ] [[package]] @@ -1110,17 +1108,6 @@ dependencies = [ "windows-sys 0.48.0", ] -[[package]] -name = "displaydoc" -version = "0.2.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "487585f4d0c6655fe74905e2504d8ad6908e4db67f744eb140876906c2f3175d" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.66", -] - [[package]] name = "doxygen-rs" version = "0.4.2" @@ -1132,9 +1119,9 @@ dependencies = [ [[package]] name = "either" -version = "1.12.0" +version = "1.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3dca9240753cf90908d7e4aac30f630662b02aebaa1b58a3cadabdb23385b58b" +checksum = "60b1af1c220855b6ceac025d3f6ecdd2b7c4894bfe9cd9bda4fbb4bc7c0d4cf0" [[package]] name = "equivalent" @@ -1154,9 +1141,9 @@ dependencies = [ [[package]] name = "fastrand" -version = "2.1.0" +version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9fc0510504f03c51ada170672ac806f1f105a88aa97a5281117e1ddc3368e51a" +checksum = "e8c02a5121d4ea3eb16a80748c74f5549a5665e4c21333c6098f283870fbdea6" [[package]] name = "ff" @@ -1182,7 +1169,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "324a1be68054ef05ad64b861cc9eaf1d623d2d8cb25b4bf2cb9cdd902b4bf253" dependencies = [ "crc32fast", - "miniz_oxide 0.8.0", + "miniz_oxide", ] [[package]] @@ -1263,7 +1250,7 @@ checksum = "87750cf4b7a4c0625b1529e4c543c2182106e4dedc60a2a6455e00d212c489ac" dependencies = [ "proc-macro2", "quote", - "syn 2.0.66", + "syn 2.0.77", ] [[package]] @@ -1319,9 +1306,9 @@ dependencies = [ [[package]] name = "gimli" -version = "0.29.0" +version = "0.31.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "40ecd4077b5ae9fd2e9e169b102c6c330d0605168eb0e8bf79952b256dbefffd" +checksum = "32085ea23f3234fc7846555e85283ba4de91e21016dc0455a16286d87a292d64" [[package]] name = "group" @@ -1370,11 +1357,11 @@ checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" [[package]] name = "heed" -version = "0.20.2" +version = "0.20.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f60d7cff16094be9627830b399c087a25017e93fb3768b87cd656a68ccb1ebe8" +checksum = "7d4f449bab7320c56003d37732a917e18798e2f1709d80263face2b4f9436ddb" dependencies = [ - "bitflags 2.5.0", + "bitflags 2.6.0", "byteorder", "heed-traits", "heed-types", @@ -1395,9 +1382,9 @@ checksum = "eb3130048d404c57ce5a1ac61a903696e8fcde7e8c2991e9fcfc1f27c3ef74ff" [[package]] name = "heed-types" -version = "0.20.0" +version = "0.20.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3cb0d6ba3700c9a57e83c013693e3eddb68a6d9b6781cacafc62a0d992e8ddb3" +checksum = "9d3f528b053a6d700b2734eabcd0fd49cb8230647aa72958467527b0b7917114" dependencies = [ "bincode", "byteorder", @@ -1437,9 +1424,9 @@ dependencies = [ [[package]] name = "http-body" -version = "1.0.0" +version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1cac85db508abc24a2e48553ba12a996e87244a0395ce011e62b37158745d643" +checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184" dependencies = [ "bytes", "http", @@ -1460,9 +1447,9 @@ dependencies = [ [[package]] name = "httparse" -version = "1.9.3" +version = "1.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d0e7a4dd27b9476dc40cb050d3632d3bba3a70ddbff012285f7f8559a1e7e545" +checksum = "0fcc0b4a115bf80b728eb8ea024ad5bd707b615bfed49e0665b6e0f86fd082d9" [[package]] name = "httpdate" @@ -1472,9 +1459,9 @@ checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" [[package]] name = "hyper" -version = "1.3.1" +version = "1.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fe575dd17d0862a9a33781c8c4696a55c320909004a67a00fb286ba8b1bc496d" +checksum = "50dfd22e0e76d0f662d429a5f80fcaf3855009297eab6a0a9f8543834744ba05" dependencies = [ "bytes", "futures-channel", @@ -1493,9 +1480,9 @@ dependencies = [ [[package]] name = "hyper-rustls" -version = "0.27.2" +version = "0.27.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ee4be2c948921a1a5320b629c4193916ed787a7f7f293fd3f7f5a6c9de74155" +checksum = "08afdbb5c31130e3034af566421053ab03787c640246a446327f550d11bcb333" dependencies = [ "futures-util", "http", @@ -1511,9 +1498,9 @@ dependencies = [ [[package]] name = "hyper-util" -version = "0.1.5" +version = "0.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b875924a60b96e5d7b9ae7b066540b1dd1cbd90d1828f54c92e02a283351c56" +checksum = "da62f120a8a37763efb0cf8fdf264b884c7b8b9ac8660b900c8661030c00e6ba" dependencies = [ "bytes", "futures-channel", @@ -1531,9 +1518,9 @@ dependencies = [ [[package]] name = "iana-time-zone" -version = "0.1.60" +version = "0.1.61" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e7ffbb5a1b541ea2561f8c41c087286cc091e21e556a4f09a8f6cbf17b69b141" +checksum = "235e081f3925a06703c2d0117ea8b91f042756fd6e7a6e5d901e8ca1a996b220" dependencies = [ "android_system_properties", "core-foundation-sys", @@ -1552,141 +1539,21 @@ dependencies = [ "cc", ] -[[package]] -name = "icu_collections" -version = "1.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db2fa452206ebee18c4b5c2274dbf1de17008e874b4dc4f0aea9d01ca79e4526" -dependencies = [ - "displaydoc", - "yoke", - "zerofrom", - "zerovec", -] - -[[package]] -name = "icu_locid" -version = "1.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "13acbb8371917fc971be86fc8057c41a64b521c184808a698c02acc242dbf637" -dependencies = [ - "displaydoc", - "litemap", - "tinystr", - "writeable", - "zerovec", -] - -[[package]] -name = "icu_locid_transform" -version = "1.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "01d11ac35de8e40fdeda00d9e1e9d92525f3f9d887cdd7aa81d727596788b54e" -dependencies = [ - "displaydoc", - "icu_locid", - "icu_locid_transform_data", - "icu_provider", - "tinystr", - "zerovec", -] - -[[package]] -name = "icu_locid_transform_data" -version = "1.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fdc8ff3388f852bede6b579ad4e978ab004f139284d7b28715f773507b946f6e" - -[[package]] -name = "icu_normalizer" -version = "1.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "19ce3e0da2ec68599d193c93d088142efd7f9c5d6fc9b803774855747dc6a84f" -dependencies = [ - "displaydoc", - "icu_collections", - "icu_normalizer_data", - "icu_properties", - "icu_provider", - "smallvec", - "utf16_iter", - "utf8_iter", - "write16", - "zerovec", -] - -[[package]] -name = "icu_normalizer_data" -version = "1.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8cafbf7aa791e9b22bec55a167906f9e1215fd475cd22adfcf660e03e989516" - -[[package]] -name = "icu_properties" -version = "1.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1f8ac670d7422d7f76b32e17a5db556510825b29ec9154f235977c9caba61036" -dependencies = [ - "displaydoc", - "icu_collections", - "icu_locid_transform", - "icu_properties_data", - "icu_provider", - "tinystr", - "zerovec", -] - -[[package]] -name = "icu_properties_data" -version = "1.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "67a8effbc3dd3e4ba1afa8ad918d5684b8868b3b26500753effea8d2eed19569" - -[[package]] -name = "icu_provider" -version = "1.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6ed421c8a8ef78d3e2dbc98a973be2f3770cb42b606e3ab18d6237c4dfde68d9" -dependencies = [ - "displaydoc", - "icu_locid", - "icu_provider_macros", - "stable_deref_trait", - "tinystr", - "writeable", - "yoke", - "zerofrom", - "zerovec", -] - -[[package]] -name = "icu_provider_macros" -version = "1.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ec89e9337638ecdc08744df490b221a7399bf8d164eb52a665454e60e075ad6" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.66", -] - [[package]] name = "idna" -version = "1.0.0" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4716a3a0933a1d01c2f72450e89596eb51dd34ef3c211ccd875acdf1f8fe47ed" +checksum = "634d9b1461af396cad843f47fdba5597a4f9e6ddd4bfb6ff5d85028c25cb12f6" dependencies = [ - "icu_normalizer", - "icu_properties", - "smallvec", - "utf8_iter", + "unicode-bidi", + "unicode-normalization", ] [[package]] name = "indexmap" -version = "2.2.6" +version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "168fb715dda47215e360912c096649d23d58bf392ac62f73919e831745e40f26" +checksum = "68b900aa2f7301e21c36462b170ee99994de34dff39a4a6a528e80e7376d07e5" dependencies = [ "equivalent", "hashbrown", @@ -1700,9 +1567,9 @@ checksum = "49f1f14873335454500d59611f1cf4a4b0f786f9ac11f4312a78e4cf2566695b" [[package]] name = "js-sys" -version = "0.3.69" +version = "0.3.70" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "29c15563dc2726973df627357ce0c9ddddbea194836909d655df6a75d2cf296d" +checksum = "1868808506b929d7b0cfa8f75951347aa71bb21144b7791bae35d9bccfcfe37a" dependencies = [ "wasm-bindgen", ] @@ -1718,15 +1585,15 @@ dependencies = [ [[package]] name = "lazy_static" -version = "1.4.0" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" +checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" [[package]] name = "libc" -version = "0.2.155" +version = "0.2.158" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97b3888a4aecf77e811145cadf6eef5901f4782c53886191b2f693f24761847c" +checksum = "d8adc4bb1803a324070e64a98ae98f38934d91957a99cfb3a43dcbc01bc56439" [[package]] name = "libm" @@ -1740,7 +1607,7 @@ version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c0ff37bd590ca25063e35af745c343cb7a0271906fb7b37e4813e8f79f00268d" dependencies = [ - "bitflags 2.5.0", + "bitflags 2.6.0", "libc", ] @@ -1750,17 +1617,11 @@ version = "0.4.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "78b3ae25bc7c8c38cec158d1f2757ee79e9b3740fbc7ccf0e59e4b08d793fa89" -[[package]] -name = "litemap" -version = "0.7.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "643cb0b8d4fcc284004d5fd0d67ccf61dfffadb7f75e1e71bc420f4688a3a704" - [[package]] name = "lmdb-master-sys" -version = "0.2.1" +version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a5142795c220effa4c8f4813537bd4c88113a07e45e93100ccb2adc5cec6c7f3" +checksum = "472c3760e2a8d0f61f322fb36788021bb36d573c502b50fa3e2bcaac3ec326c9" dependencies = [ "cc", "doxygen-rs", @@ -1779,9 +1640,9 @@ dependencies = [ [[package]] name = "log" -version = "0.4.21" +version = "0.4.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "90ed8c1e510134f979dbc4f070f87d4313098b704861a105fe34231c70a3901c" +checksum = "a7a70ba024b9dc04c27ea2f0c0548feb474ec5c54bba33a7f72f873a39d07b24" [[package]] name = "matchit" @@ -1801,9 +1662,9 @@ dependencies = [ [[package]] name = "memchr" -version = "2.7.2" +version = "2.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c8640c5d730cb13ebd907d8d04b52f55ac9a2eec55b440c8892f40d56c76c1d" +checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3" [[package]] name = "merlin" @@ -1823,15 +1684,6 @@ version = "0.3.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" -[[package]] -name = "miniz_oxide" -version = "0.7.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87dfd01fe195c66b572b37921ad8803d010623c0aca821bea2302239d155cdae" -dependencies = [ - "adler", -] - [[package]] name = "miniz_oxide" version = "0.8.0" @@ -1843,13 +1695,14 @@ dependencies = [ [[package]] name = "mio" -version = "0.8.11" +version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4a650543ca06a924e8b371db273b2756685faae30f8487da1b56505a8f78b0c" +checksum = "80e04d1dcff3aae0704555fe5fee3bcfaf3d1fdf8a7e521d5b9d2b42acb52cec" dependencies = [ + "hermit-abi", "libc", "wasi", - "windows-sys 0.48.0", + "windows-sys 0.52.0", ] [[package]] @@ -2021,21 +1874,11 @@ dependencies = [ "libm", ] -[[package]] -name = "num_cpus" -version = "1.16.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43" -dependencies = [ - "hermit-abi", - "libc", -] - [[package]] name = "object" -version = "0.36.0" +version = "0.36.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "576dfe1fc8f9df304abb159d767a29d0476f7750fbf8aa7ad07816004a207434" +checksum = "084f1a5821ac4c651660a94a7153d27ac9d8a53736203f58b31945ded098070a" dependencies = [ "memchr", ] @@ -2088,7 +1931,7 @@ dependencies = [ "libc", "redox_syscall", "smallvec", - "windows-targets 0.52.5", + "windows-targets 0.52.6", ] [[package]] @@ -2133,7 +1976,7 @@ dependencies = [ "phf_shared", "proc-macro2", "quote", - "syn 2.0.66", + "syn 2.0.77", ] [[package]] @@ -2162,7 +2005,7 @@ checksum = "2f38a4412a78282e09a2cf38d195ea5420d15ba0602cb375210efbc877243965" dependencies = [ "proc-macro2", "quote", - "syn 2.0.66", + "syn 2.0.77", ] [[package]] @@ -2179,15 +2022,18 @@ checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" [[package]] name = "ppv-lite86" -version = "0.2.17" +version = "0.2.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de" +checksum = "77957b295656769bb8ad2b6a6b09d897d94f05c41b069aede1fcdaa675eaea04" +dependencies = [ + "zerocopy", +] [[package]] name = "pretty_assertions" -version = "1.4.0" +version = "1.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af7cee1a6c8a5b9208b3cb1061f10c0cb689087b3d8ce85fb9d2dd7a29b6ba66" +checksum = "3ae130e2f271fbc2ac3a40fb1d07180839cdbbe443c7a27e1e3c13c5cac0116d" dependencies = [ "diff", "yansi", @@ -2195,9 +2041,9 @@ dependencies = [ [[package]] name = "proc-macro-crate" -version = "3.1.0" +version = "3.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6d37c51ca738a55da99dc0c4a34860fd675453b8b36209178c2249bb13651284" +checksum = "8ecf48c7ca261d60b74ab1a7b20da18bede46776b2e55535cb958eb595c5fa7b" dependencies = [ "toml_edit", ] @@ -2227,22 +2073,22 @@ dependencies = [ [[package]] name = "proc-macro2" -version = "1.0.85" +version = "1.0.86" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "22244ce15aa966053a896d1accb3a6e68469b97c7f33f284b99f0d576879fc23" +checksum = "5e719e8df665df0d1c8fbfd238015744736151d4445ec0836b8e628aae103b77" dependencies = [ "unicode-ident", ] [[package]] name = "proptest" -version = "1.4.0" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "31b476131c3c86cb68032fdc5cb6d5a1045e3e42d96b69fa599fd77701e1f5bf" +checksum = "b4c2511913b88df1637da85cc8d96ec8e43a3f8bb8ccb71ee1ac240d6f3df58d" dependencies = [ "bit-set", "bit-vec", - "bitflags 2.5.0", + "bitflags 2.6.0", "lazy_static", "num-traits", "rand", @@ -2273,9 +2119,9 @@ checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0" [[package]] name = "quote" -version = "1.0.36" +version = "1.0.37" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0fa76aaf39101c457836aec0ce2316dbdc3ab723cdda1c6bd4e6ad4208acaca7" +checksum = "b5b9d34b8991d19d98081b46eacdd8eb58c6f2b201139f7c5f643cc155a633af" dependencies = [ "proc-macro2", ] @@ -2367,27 +2213,27 @@ dependencies = [ [[package]] name = "redb" -version = "2.1.1" +version = "2.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a6dd20d3cdeb9c7d2366a0b16b93b35b75aec15309fbeb7ce477138c9f68c8c0" +checksum = "e4760ad04a88ef77075ba86ba9ea79b919e6bab29c1764c5747237cd6eaedcaa" dependencies = [ "libc", ] [[package]] name = "redox_syscall" -version = "0.5.1" +version = "0.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "469052894dcb553421e483e4209ee581a45100d31b4018de03e5a7ad86374a7e" +checksum = "0884ad60e090bf1345b93da0a5de8923c93884cd03f40dfcfddd3b4bee661853" dependencies = [ - "bitflags 2.5.0", + "bitflags 2.6.0", ] [[package]] name = "redox_users" -version = "0.4.5" +version = "0.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd283d9651eeda4b2a83a43c1c91b266c40fd76ecd39a50a8c630ae69dc72891" +checksum = "ba009ff324d1fc1b900bd1fdb31564febe58a8ccc8a6fdbb93b543d33b13ca43" dependencies = [ "getrandom", "libredox", @@ -2411,7 +2257,7 @@ checksum = "bcc303e793d3734489387d205e9b186fac9c6cfacedd98cbb2e8a5943595f3e6" dependencies = [ "proc-macro2", "quote", - "syn 2.0.66", + "syn 2.0.77", ] [[package]] @@ -2443,20 +2289,20 @@ checksum = "719b953e2095829ee67db738b3bfa9fa368c94900df327b3f07fe6e794d2fe1f" [[package]] name = "rustc_version" -version = "0.4.0" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366" +checksum = "cfcb3a22ef46e85b45de6ee7e79d063319ebb6594faafcf1c225ea92ab6e9b92" dependencies = [ "semver", ] [[package]] name = "rustix" -version = "0.38.34" +version = "0.38.37" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "70dc5ec042f7a43c4a73241207cecc9873a06d45debb38b329f8541d85c2730f" +checksum = "8acb788b847c24f28525660c4d7758620a7210875711f79e7f663cc152726811" dependencies = [ - "bitflags 2.5.0", + "bitflags 2.6.0", "errno", "libc", "linux-raw-sys", @@ -2465,9 +2311,9 @@ dependencies = [ [[package]] name = "rustls" -version = "0.23.10" +version = "0.23.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05cff451f60db80f490f3c182b77c35260baace73209e9cdbbe526bfe3a4d402" +checksum = "f2dabaac7466917e566adb06783a81ca48944c6898a1b08b9374106dd671f4c8" dependencies = [ "log", "once_cell", @@ -2480,9 +2326,9 @@ dependencies = [ [[package]] name = "rustls-native-certs" -version = "0.7.0" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f1fb85efa936c42c6d5fc28d2629bb51e4b2f4b8a5211e297d599cc5a093792" +checksum = "fcaf18a4f2be7326cd874a5fa579fae794320a0f388d365dca7e480e55f83f8a" dependencies = [ "openssl-probe", "rustls-pemfile", @@ -2493,9 +2339,9 @@ dependencies = [ [[package]] name = "rustls-pemfile" -version = "2.1.2" +version = "2.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "29993a25686778eb88d4189742cd713c9bce943bc54251a33509dc63cbacf73d" +checksum = "196fe16b00e106300d3e45ecfcb764fa292a535d7326a29a5875c579c7417425" dependencies = [ "base64", "rustls-pki-types", @@ -2509,9 +2355,9 @@ checksum = "fc0a2ce646f8655401bb81e7927b812614bd5d91dbc968696be50603510fcaf0" [[package]] name = "rustls-webpki" -version = "0.102.4" +version = "0.102.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ff448f7e92e913c4b7d4c6d8e4540a1724b319b4152b8aef6d4cf8339712b33e" +checksum = "64ca1bc8749bd4cf37b5ce386cc146580777b4e8572c7b97baf22c83f444bee9" dependencies = [ "ring", "rustls-pki-types", @@ -2544,11 +2390,11 @@ checksum = "f3cb5ba0dc43242ce17de99c180e96db90b235b8a9fdc9543c96d2209116bd9f" [[package]] name = "schannel" -version = "0.1.23" +version = "0.1.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fbc91545643bcf3a0bbb6569265615222618bdf33ce4ffbbd13c4bbd4c093534" +checksum = "e9aaafd5a2b6e3d657ff009d82fbd630b6bd54dd4eb06f21693925cdf80f9b8b" dependencies = [ - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] @@ -2559,11 +2405,11 @@ checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" [[package]] name = "security-framework" -version = "2.11.0" +version = "2.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c627723fd09706bacdb5cf41499e95098555af3c3c29d014dc3c458ef6be11c0" +checksum = "897b2245f0b511c87893af39b033e5ca9cce68824c4d7e7630b5a1d339658d02" dependencies = [ - "bitflags 2.5.0", + "bitflags 2.6.0", "core-foundation", "core-foundation-sys", "libc", @@ -2572,9 +2418,9 @@ dependencies = [ [[package]] name = "security-framework-sys" -version = "2.11.0" +version = "2.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "317936bbbd05227752583946b9e66d7ce3b489f84e11a94a510b4437fef407d7" +checksum = "75da29fe9b9b08fe9d6b22b5b4bcbc75d8db3aa31e639aa56bb62e9d46bfceaf" dependencies = [ "core-foundation-sys", "libc", @@ -2588,9 +2434,9 @@ checksum = "61697e0a1c7e512e84a621326239844a24d8207b4669b41bc18b32ea5cbf988b" [[package]] name = "serde" -version = "1.0.203" +version = "1.0.210" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7253ab4de971e72fb7be983802300c30b5a7f0c2e56fab8abfc6a214307c0094" +checksum = "c8e3592472072e6e22e0a54d5904d9febf8508f65fb8552499a1abc7d1078c3a" dependencies = [ "serde_derive", ] @@ -2606,22 +2452,23 @@ dependencies = [ [[package]] name = "serde_derive" -version = "1.0.203" +version = "1.0.210" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "500cbc0ebeb6f46627f50f3f5811ccf6bf00643be300b4c3eabc0ef55dc5b5ba" +checksum = "243902eda00fad750862fc144cea25caca5e20d615af0a81bee94ca738f1df1f" dependencies = [ "proc-macro2", "quote", - "syn 2.0.66", + "syn 2.0.77", ] [[package]] name = "serde_json" -version = "1.0.117" +version = "1.0.128" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "455182ea6142b14f93f4bc5320a2b31c1f266b66a4a5c858b013302a5d8cbfc3" +checksum = "6ff5456707a1de34e7e37f2a6fd3d3f808c318259cbd01ab6377795054b483d8" dependencies = [ "itoa", + "memchr", "ryu", "serde", ] @@ -2669,6 +2516,12 @@ dependencies = [ "keccak", ] +[[package]] +name = "shlex" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" + [[package]] name = "signal-hook-registry" version = "1.4.2" @@ -2728,12 +2581,6 @@ version = "0.9.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67" -[[package]] -name = "stable_deref_trait" -version = "1.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" - [[package]] name = "std-shims" version = "0.1.1" @@ -2745,9 +2592,9 @@ dependencies = [ [[package]] name = "subtle" -version = "2.5.0" +version = "2.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81cdd64d312baedb58e21336b31bc043b77e01cc99033ce76ef539f78e965ebc" +checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" [[package]] name = "syn" @@ -2762,9 +2609,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.66" +version = "2.0.77" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c42f3f41a2de00b01c0aaad383c5a45241efc8b2d1eda5661812fda5f3cdcff5" +checksum = "9f35bcdf61fd8e7be6caf75f429fdca8beb3ed76584befb503b1569faee373ed" dependencies = [ "proc-macro2", "quote", @@ -2780,7 +2627,7 @@ dependencies = [ "proc-macro-error", "proc-macro2", "quote", - "syn 2.0.66", + "syn 2.0.77", ] [[package]] @@ -2804,17 +2651,6 @@ dependencies = [ "crossbeam-queue", ] -[[package]] -name = "synstructure" -version = "0.13.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c8af7666ab7b6390ab78131fb5b0fce11d6b7a6951602017c35fa82800708971" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.66", -] - [[package]] name = "tap" version = "1.0.1" @@ -2823,34 +2659,35 @@ checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" [[package]] name = "tempfile" -version = "3.10.1" +version = "3.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85b77fafb263dd9d05cbeac119526425676db3784113aa9295c88498cbf8bff1" +checksum = "04cbcdd0c794ebb0d4cf35e88edd2f7d2c4c3e9a5a6dab322839b321c6a87a64" dependencies = [ "cfg-if", "fastrand", + "once_cell", "rustix", - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] name = "thiserror" -version = "1.0.61" +version = "1.0.63" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c546c80d6be4bc6a00c0f01730c08df82eaa7a7a61f11d656526506112cc1709" +checksum = "c0342370b38b6a11b6cc11d6a805569958d54cfa061a29969c3b5ce2ea405724" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.61" +version = "1.0.63" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "46c3384250002a6d5af4d114f2845d37b57521033f30d5c3f46c4d70e1197533" +checksum = "a4558b58466b9ad7ca0f102865eccc95938dca1a74a856f2b57b6629050da261" dependencies = [ "proc-macro2", "quote", - "syn 2.0.66", + "syn 2.0.77", ] [[package]] @@ -2864,43 +2701,47 @@ dependencies = [ ] [[package]] -name = "tinystr" -version = "0.7.6" +name = "tinyvec" +version = "1.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9117f5d4db391c1cf6927e7bea3db74b9a1c1add8f7eda9ffd5364f40f57b82f" +checksum = "445e881f4f6d382d5f27c034e25eb92edd7c784ceab92a0937db7f2e9471b938" dependencies = [ - "displaydoc", - "zerovec", + "tinyvec_macros", ] [[package]] -name = "tokio" -version = "1.38.0" +name = "tinyvec_macros" +version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba4f4a02a7a80d6f274636f0aa95c7e383b912d41fe721a31f29e29698585a4a" +checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" + +[[package]] +name = "tokio" +version = "1.40.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e2b070231665d27ad9ec9b8df639893f46727666c6767db40317fbe920a5d998" dependencies = [ "backtrace", "bytes", "libc", "mio", - "num_cpus", "parking_lot", "pin-project-lite", "signal-hook-registry", "socket2", "tokio-macros", - "windows-sys 0.48.0", + "windows-sys 0.52.0", ] [[package]] name = "tokio-macros" -version = "2.3.0" +version = "2.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f5ae998a069d4b5aba8ee9dad856af7d520c3699e6159b185c2acd48155d39a" +checksum = "693d596312e88961bc67d7f1f97af8a70227d9f90c31bba5806eec004978d752" dependencies = [ "proc-macro2", "quote", - "syn 2.0.66", + "syn 2.0.77", ] [[package]] @@ -2916,9 +2757,9 @@ dependencies = [ [[package]] name = "tokio-stream" -version = "0.1.15" +version = "0.1.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "267ac89e0bec6e691e5813911606935d77c476ff49024f98abcea3e7b15e37af" +checksum = "4f4e6ce100d0eb49a2734f8c0812bcd324cf357d21810932c5df6b96ef2b86f1" dependencies = [ "futures-core", "pin-project-lite", @@ -2941,9 +2782,9 @@ dependencies = [ [[package]] name = "tokio-util" -version = "0.7.11" +version = "0.7.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9cf6b47b3771c49ac75ad09a6162f53ad4b8088b76ac60e8ec1455b31a189fe1" +checksum = "61e7c3654c13bcd040d4a03abee2c75b1d14a37b423cf5a813ceae1cc903ec6a" dependencies = [ "bytes", "futures-core", @@ -2956,15 +2797,15 @@ dependencies = [ [[package]] name = "toml_datetime" -version = "0.6.6" +version = "0.6.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4badfd56924ae69bcc9039335b2e017639ce3f9b001c393c1b2d1ef846ce2cbf" +checksum = "0dd7358ecb8fc2f8d014bf86f6f638ce72ba252a2c3a2572f2a795f1d23efb41" [[package]] name = "toml_edit" -version = "0.21.1" +version = "0.22.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6a8534fd7f78b5405e860340ad6575217ce99f38d4d5c8f2442cb5ecb50090e1" +checksum = "3b072cee73c449a636ffd6f32bd8de3a9f7119139aff882f44943ce2986dc5cf" dependencies = [ "indexmap", "toml_datetime", @@ -2990,15 +2831,15 @@ dependencies = [ [[package]] name = "tower-layer" -version = "0.3.2" +version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c20c8dbed6283a09604c3e69b4b7eeb54e298b8a600d4d5ecb5ad39de609f1d0" +checksum = "121c2a6cda46980bb0fcd1647ffaf6cd3fc79a013de288782836f6df9c48780e" [[package]] name = "tower-service" -version = "0.3.2" +version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6bc1c9ce2b5135ac7f93c72918fc37feb872bdc6a5533a8b85eb4b86bfdae52" +checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3" [[package]] name = "tracing" @@ -3020,7 +2861,7 @@ checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.66", + "syn 2.0.77", ] [[package]] @@ -3060,10 +2901,25 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "eaea85b334db583fe3274d12b4cd1880032beab409c0d774be044d4480ab9a94" [[package]] -name = "unicode-ident" -version = "1.0.12" +name = "unicode-bidi" +version = "0.3.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" +checksum = "08f95100a766bf4f8f28f90d77e0a5461bbdb219042e7679bebe79004fed8d75" + +[[package]] +name = "unicode-ident" +version = "1.0.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e91b56cd4cadaeb79bbf1a5645f6b4f8dc5bde8834ad5894a8db35fda9efa1fe" + +[[package]] +name = "unicode-normalization" +version = "0.1.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5033c97c4262335cded6d6fc3e5c18ab755e1a3dc96376350f3d8e9f009ad956" +dependencies = [ + "tinyvec", +] [[package]] name = "unicode-xid" @@ -3097,32 +2953,20 @@ dependencies = [ [[package]] name = "url" -version = "2.5.1" +version = "2.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f7c25da092f0a868cdf09e8674cd3b7ef3a7d92a24253e663a2fb85e2496de56" +checksum = "22784dbdf76fdde8af1aeda5622b546b422b6fc585325248a2bf9f5e41e94d6c" dependencies = [ "form_urlencoded", "idna", "percent-encoding", ] -[[package]] -name = "utf16_iter" -version = "1.0.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c8232dd3cdaed5356e0f716d285e4b40b932ac434100fe9b7e0e8e935b9e6246" - -[[package]] -name = "utf8_iter" -version = "1.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be" - [[package]] name = "version_check" -version = "0.9.4" +version = "0.9.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" +checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" [[package]] name = "wait-timeout" @@ -3150,34 +2994,35 @@ checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" [[package]] name = "wasm-bindgen" -version = "0.2.92" +version = "0.2.93" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4be2531df63900aeb2bca0daaaddec08491ee64ceecbee5076636a3b026795a8" +checksum = "a82edfc16a6c469f5f44dc7b571814045d60404b55a0ee849f9bcfa2e63dd9b5" dependencies = [ "cfg-if", + "once_cell", "wasm-bindgen-macro", ] [[package]] name = "wasm-bindgen-backend" -version = "0.2.92" +version = "0.2.93" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "614d787b966d3989fa7bb98a654e369c762374fd3213d212cfc0251257e747da" +checksum = "9de396da306523044d3302746f1208fa71d7532227f15e347e2d93e4145dd77b" dependencies = [ "bumpalo", "log", "once_cell", "proc-macro2", "quote", - "syn 2.0.66", + "syn 2.0.77", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-macro" -version = "0.2.92" +version = "0.2.93" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1f8823de937b71b9460c0c34e25f3da88250760bec0ebac694b49997550d726" +checksum = "585c4c91a46b072c92e908d99cb1dcdf95c5218eeb6f3bf1efa991ee7a68cccf" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -3185,22 +3030,22 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.92" +version = "0.2.93" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e94f17b526d0a461a191c78ea52bbce64071ed5c04c9ffe424dcb38f74171bb7" +checksum = "afc340c74d9005395cf9dd098506f7f44e38f2b4a21c6aaacf9a105ea5e1e836" dependencies = [ "proc-macro2", "quote", - "syn 2.0.66", + "syn 2.0.77", "wasm-bindgen-backend", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-shared" -version = "0.2.92" +version = "0.2.93" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af190c94f2773fdb3729c55b007a722abb5384da03bc0986df4c289bf5567e96" +checksum = "c62a0a307cb4a311d3a07867860911ca130c3494e8c2719593806c08bc5d0484" [[package]] name = "webpki-roots" @@ -3235,12 +3080,12 @@ checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" [[package]] name = "windows" -version = "0.57.0" +version = "0.58.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "12342cb4d8e3b046f3d80effd474a7a02447231330ef77d71daa6fbc40681143" +checksum = "dd04d41d93c4992d421894c18c8b43496aa748dd4c081bac0dc93eb0489272b6" dependencies = [ - "windows-core 0.57.0", - "windows-targets 0.52.5", + "windows-core 0.58.0", + "windows-targets 0.52.6", ] [[package]] @@ -3249,50 +3094,61 @@ version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "33ab640c8d7e35bf8ba19b884ba838ceb4fba93a4e8c65a9059d08afcfc683d9" dependencies = [ - "windows-targets 0.52.5", + "windows-targets 0.52.6", ] [[package]] name = "windows-core" -version = "0.57.0" +version = "0.58.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d2ed2439a290666cd67ecce2b0ffaad89c2a56b976b736e6ece670297897832d" +checksum = "6ba6d44ec8c2591c134257ce647b7ea6b20335bf6379a27dac5f1641fcf59f99" dependencies = [ "windows-implement", "windows-interface", "windows-result", - "windows-targets 0.52.5", + "windows-strings", + "windows-targets 0.52.6", ] [[package]] name = "windows-implement" -version = "0.57.0" +version = "0.58.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9107ddc059d5b6fbfbffdfa7a7fe3e22a226def0b2608f72e9d552763d3e1ad7" +checksum = "2bbd5b46c938e506ecbce286b6628a02171d56153ba733b6c741fc627ec9579b" dependencies = [ "proc-macro2", "quote", - "syn 2.0.66", + "syn 2.0.77", ] [[package]] name = "windows-interface" -version = "0.57.0" +version = "0.58.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "29bee4b38ea3cde66011baa44dba677c432a78593e202392d1e9070cf2a7fca7" +checksum = "053c4c462dc91d3b1504c6fe5a726dd15e216ba718e84a0e46a88fbe5ded3515" dependencies = [ "proc-macro2", "quote", - "syn 2.0.66", + "syn 2.0.77", ] [[package]] name = "windows-result" -version = "0.1.2" +version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5e383302e8ec8515204254685643de10811af0ed97ea37210dc26fb0032647f8" +checksum = "1d1043d8214f791817bab27572aaa8af63732e11bf84aa21a45a78d6c317ae0e" dependencies = [ - "windows-targets 0.52.5", + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-strings" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4cd9b125c486025df0eabcb585e62173c6c9eddcec5d117d3b6e8c30e2ee4d10" +dependencies = [ + "windows-result", + "windows-targets 0.52.6", ] [[package]] @@ -3310,7 +3166,16 @@ version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" dependencies = [ - "windows-targets 0.52.5", + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-sys" +version = "0.59.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" +dependencies = [ + "windows-targets 0.52.6", ] [[package]] @@ -3330,18 +3195,18 @@ dependencies = [ [[package]] name = "windows-targets" -version = "0.52.5" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6f0713a46559409d202e70e28227288446bf7841d3211583a4b53e3f6d96e7eb" +checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" dependencies = [ - "windows_aarch64_gnullvm 0.52.5", - "windows_aarch64_msvc 0.52.5", - "windows_i686_gnu 0.52.5", + "windows_aarch64_gnullvm 0.52.6", + "windows_aarch64_msvc 0.52.6", + "windows_i686_gnu 0.52.6", "windows_i686_gnullvm", - "windows_i686_msvc 0.52.5", - "windows_x86_64_gnu 0.52.5", - "windows_x86_64_gnullvm 0.52.5", - "windows_x86_64_msvc 0.52.5", + "windows_i686_msvc 0.52.6", + "windows_x86_64_gnu 0.52.6", + "windows_x86_64_gnullvm 0.52.6", + "windows_x86_64_msvc 0.52.6", ] [[package]] @@ -3352,9 +3217,9 @@ checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" [[package]] name = "windows_aarch64_gnullvm" -version = "0.52.5" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7088eed71e8b8dda258ecc8bac5fb1153c5cffaf2578fc8ff5d61e23578d3263" +checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" [[package]] name = "windows_aarch64_msvc" @@ -3364,9 +3229,9 @@ checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" [[package]] name = "windows_aarch64_msvc" -version = "0.52.5" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9985fd1504e250c615ca5f281c3f7a6da76213ebd5ccc9561496568a2752afb6" +checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" [[package]] name = "windows_i686_gnu" @@ -3376,15 +3241,15 @@ checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" [[package]] name = "windows_i686_gnu" -version = "0.52.5" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "88ba073cf16d5372720ec942a8ccbf61626074c6d4dd2e745299726ce8b89670" +checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" [[package]] name = "windows_i686_gnullvm" -version = "0.52.5" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87f4261229030a858f36b459e748ae97545d6f1ec60e5e0d6a3d32e0dc232ee9" +checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" [[package]] name = "windows_i686_msvc" @@ -3394,9 +3259,9 @@ checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" [[package]] name = "windows_i686_msvc" -version = "0.52.5" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db3c2bf3d13d5b658be73463284eaf12830ac9a26a90c717b7f771dfe97487bf" +checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" [[package]] name = "windows_x86_64_gnu" @@ -3406,9 +3271,9 @@ checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" [[package]] name = "windows_x86_64_gnu" -version = "0.52.5" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4e4246f76bdeff09eb48875a0fd3e2af6aada79d409d33011886d3e1581517d9" +checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" [[package]] name = "windows_x86_64_gnullvm" @@ -3418,9 +3283,9 @@ checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" [[package]] name = "windows_x86_64_gnullvm" -version = "0.52.5" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "852298e482cd67c356ddd9570386e2862b5673c85bd5f88df9ab6802b334c596" +checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" [[package]] name = "windows_x86_64_msvc" @@ -3430,31 +3295,19 @@ checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" [[package]] name = "windows_x86_64_msvc" -version = "0.52.5" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bec47e5bfd1bff0eeaf6d8b485cc1074891a197ab4225d504cb7a1ab88b02bf0" +checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" [[package]] name = "winnow" -version = "0.5.40" +version = "0.6.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f593a95398737aeed53e489c785df13f3618e41dbcd6718c6addbf1395aa6876" +checksum = "68a9bda4691f099d435ad181000724da8e5899daa10713c2d432552b9ccd3a6f" dependencies = [ "memchr", ] -[[package]] -name = "write16" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d1890f4022759daae28ed4fe62859b1236caebfc61ede2f63ed4e695f3f6d936" - -[[package]] -name = "writeable" -version = "0.5.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e9df38ee2d2c3c5948ea468a8406ff0db0b29ae1ffde1bcf20ef305bcc95c51" - [[package]] name = "wyz" version = "0.5.1" @@ -3466,73 +3319,29 @@ dependencies = [ [[package]] name = "yansi" -version = "0.5.1" +version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09041cd90cf85f7f8b2df60c646f853b7f535ce68f85244eb6731cf89fa498ec" - -[[package]] -name = "yoke" -version = "0.7.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c5b1314b079b0930c31e3af543d8ee1757b1951ae1e1565ec704403a7240ca5" -dependencies = [ - "serde", - "stable_deref_trait", - "yoke-derive", - "zerofrom", -] - -[[package]] -name = "yoke-derive" -version = "0.7.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "28cc31741b18cb6f1d5ff12f5b7523e3d6eb0852bbbad19d73905511d9849b95" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.66", - "synstructure", -] +checksum = "cfe53a6657fd280eaa890a3bc59152892ffa3e30101319d168b781ed6529b049" [[package]] name = "zerocopy" -version = "0.7.34" +version = "0.7.35" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ae87e3fcd617500e5d106f0380cf7b77f3c6092aae37191433159dda23cfb087" +checksum = "1b9b4fd18abc82b8136838da5d50bae7bdea537c574d8dc1a34ed098d6c166f0" dependencies = [ + "byteorder", "zerocopy-derive", ] [[package]] name = "zerocopy-derive" -version = "0.7.34" +version = "0.7.35" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "15e934569e47891f7d9411f1a451d947a60e000ab3bd24fbb970f000387d1b3b" +checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e" dependencies = [ "proc-macro2", "quote", - "syn 2.0.66", -] - -[[package]] -name = "zerofrom" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91ec111ce797d0e0784a1116d0ddcdbea84322cd79e5d5ad173daeba4f93ab55" -dependencies = [ - "zerofrom-derive", -] - -[[package]] -name = "zerofrom-derive" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0ea7b4a3637ea8669cedf0f1fd5c286a17f3de97b8dd5a70a6c167a1730e63a5" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.66", - "synstructure", + "syn 2.0.77", ] [[package]] @@ -3552,27 +3361,5 @@ checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" dependencies = [ "proc-macro2", "quote", - "syn 2.0.66", -] - -[[package]] -name = "zerovec" -version = "0.10.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aa2b893d79df23bfb12d5461018d408ea19dfafe76c2c7ef6d4eba614f8ff079" -dependencies = [ - "yoke", - "zerofrom", - "zerovec-derive", -] - -[[package]] -name = "zerovec-derive" -version = "0.10.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6eafa6dfb17584ea3e2bd6e76e0cc15ad7af12b09abdd1ca55961bed9b1063c6" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.66", + "syn 2.0.77", ] diff --git a/Cargo.toml b/Cargo.toml index d2b3397..3c69240 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -211,7 +211,6 @@ unseparated_literal_suffix = "deny" unnecessary_safety_doc = "deny" unnecessary_safety_comment = "deny" unnecessary_self_imports = "deny" -tests_outside_test_module = "deny" string_to_string = "deny" rest_pat_in_fully_bound_structs = "deny" redundant_type_annotations = "deny" @@ -265,6 +264,7 @@ empty_enum_variants_with_brackets = "deny" empty_drop = "deny" clone_on_ref_ptr = "deny" upper_case_acronyms = "deny" +allow_attributes = "deny" # Hot # inline_always = "deny" diff --git a/consensus/src/transactions.rs b/consensus/src/transactions.rs index 91de67c..09f6884 100644 --- a/consensus/src/transactions.rs +++ b/consensus/src/transactions.rs @@ -393,6 +393,11 @@ async fn verify_transactions_decoy_info( where D: Database + Clone + Sync + Send + 'static, { + // Decoy info is not validated for V1 txs. + if hf == HardFork::V1 || txs.is_empty() { + return Ok(()); + } + batch_get_decoy_info(&txs, hf, database) .await? .try_for_each(|decoy_info| decoy_info.and_then(|di| Ok(check_decoy_info(&di, &hf)?)))?; diff --git a/consensus/src/transactions/free.rs b/consensus/src/transactions/free.rs index 02c5235..67b675a 100644 --- a/consensus/src/transactions/free.rs +++ b/consensus/src/transactions/free.rs @@ -78,7 +78,8 @@ pub fn tx_fee(tx: &Transaction) -> Result { } for output in &prefix.outputs { - fee.checked_sub(output.amount.unwrap_or(0)) + fee = fee + .checked_sub(output.amount.unwrap_or(0)) .ok_or(TransactionError::OutputsTooHigh)?; } } diff --git a/helper/Cargo.toml b/helper/Cargo.toml index c74e40f..baa3f23 100644 --- a/helper/Cargo.toml +++ b/helper/Cargo.toml @@ -9,8 +9,8 @@ repository = "https://github.com/Cuprate/cuprate/tree/main/consensus" [features] -# All features on by default. -default = ["std", "atomic", "asynch", "cast", "fs", "num", "map", "time", "thread", "constants"] +# All features off by default. +default = [] std = [] atomic = ["dep:crossbeam"] asynch = ["dep:futures", "dep:rayon"] @@ -21,6 +21,7 @@ num = [] map = ["cast", "dep:monero-serai"] time = ["dep:chrono", "std"] thread = ["std", "dep:target_os_lib"] +tx = ["dep:monero-serai"] [dependencies] crossbeam = { workspace = true, optional = true } @@ -39,7 +40,8 @@ target_os_lib = { package = "windows", version = ">=0.51", features = ["Win32_Sy target_os_lib = { package = "libc", version = "0.2.151", optional = true } [dev-dependencies] -tokio = { workspace = true, features = ["full"] } +tokio = { workspace = true, features = ["full"] } +curve25519-dalek = { workspace = true } [lints] workspace = true \ No newline at end of file diff --git a/helper/src/atomic.rs b/helper/src/atomic.rs index 4795896..aa66c0c 100644 --- a/helper/src/atomic.rs +++ b/helper/src/atomic.rs @@ -5,9 +5,6 @@ //---------------------------------------------------------------------------------------------------- Use use crossbeam::atomic::AtomicCell; -#[allow(unused_imports)] // docs -use std::sync::atomic::{Ordering, Ordering::Acquire, Ordering::Release}; - //---------------------------------------------------------------------------------------------------- Atomic Float /// Compile-time assertion that our floats are /// lock-free for the target we're building for. @@ -31,9 +28,13 @@ const _: () = { /// This is an alias for /// [`crossbeam::atomic::AtomicCell`](https://docs.rs/crossbeam/latest/crossbeam/atomic/struct.AtomicCell.html). /// -/// Note that there are no [`Ordering`] parameters, -/// atomic loads use [`Acquire`], -/// and atomic stores use [`Release`]. +/// Note that there are no [Ordering] parameters, +/// atomic loads use [Acquire], +/// and atomic stores use [Release]. +/// +/// [Ordering]: std::sync::atomic::Ordering +/// [Acquire]: std::sync::atomic::Ordering::Acquire +/// [Release]: std::sync::atomic::Ordering::Release pub type AtomicF32 = AtomicCell; /// An atomic [`f64`]. @@ -41,9 +42,13 @@ pub type AtomicF32 = AtomicCell; /// This is an alias for /// [`crossbeam::atomic::AtomicCell`](https://docs.rs/crossbeam/latest/crossbeam/atomic/struct.AtomicCell.html). /// -/// Note that there are no [`Ordering`] parameters, -/// atomic loads use [`Acquire`], -/// and atomic stores use [`Release`]. +/// Note that there are no [Ordering] parameters, +/// atomic loads use [Acquire], +/// and atomic stores use [Release]. +/// +/// [Ordering]: std::sync::atomic::Ordering +/// [Acquire]: std::sync::atomic::Ordering::Acquire +/// [Release]: std::sync::atomic::Ordering::Release pub type AtomicF64 = AtomicCell; //---------------------------------------------------------------------------------------------------- TESTS diff --git a/helper/src/lib.rs b/helper/src/lib.rs index de0d955..f29c499 100644 --- a/helper/src/lib.rs +++ b/helper/src/lib.rs @@ -31,6 +31,8 @@ pub mod thread; #[cfg(feature = "time")] pub mod time; +#[cfg(feature = "tx")] +pub mod tx; //---------------------------------------------------------------------------------------------------- Private Usage //---------------------------------------------------------------------------------------------------- diff --git a/helper/src/map.rs b/helper/src/map.rs index 7805ea6..8cf0978 100644 --- a/helper/src/map.rs +++ b/helper/src/map.rs @@ -29,7 +29,7 @@ use crate::cast::{u64_to_usize, usize_to_u64}; /// ``` #[inline] pub const fn split_u128_into_low_high_bits(value: u128) -> (u64, u64) { - #[allow(clippy::cast_possible_truncation)] + #[expect(clippy::cast_possible_truncation)] (value as u64, (value >> 64) as u64) } diff --git a/helper/src/num.rs b/helper/src/num.rs index 674ed35..399c38d 100644 --- a/helper/src/num.rs +++ b/helper/src/num.rs @@ -91,7 +91,7 @@ where /// /// # Invariant /// If not sorted the output will be invalid. -#[allow(clippy::debug_assert_with_mut_call)] +#[expect(clippy::debug_assert_with_mut_call)] pub fn median(array: impl AsRef<[T]>) -> T where T: Add diff --git a/helper/src/thread.rs b/helper/src/thread.rs index 04a2606..8ba025d 100644 --- a/helper/src/thread.rs +++ b/helper/src/thread.rs @@ -6,7 +6,6 @@ use std::{cmp::max, num::NonZeroUsize}; //---------------------------------------------------------------------------------------------------- Thread Count & Percent -#[allow(non_snake_case)] /// Get the total amount of system threads. /// /// ```rust @@ -28,10 +27,15 @@ macro_rules! impl_thread_percent { $( $(#[$doc])* pub fn $fn_name() -> NonZeroUsize { - // unwrap here is okay because: - // - THREADS().get() is always non-zero - // - max() guards against 0 - #[allow(clippy::cast_possible_truncation, clippy::cast_sign_loss, clippy::cast_precision_loss)] + // unwrap here is okay because: + // - THREADS().get() is always non-zero + // - max() guards against 0 + #[expect( + clippy::cast_possible_truncation, + clippy::cast_sign_loss, + clippy::cast_precision_loss, + reason = "we need to round integers" + )] NonZeroUsize::new(max(1, (threads().get() as f64 * $percent).floor() as usize)).unwrap() } )* diff --git a/helper/src/time.rs b/helper/src/time.rs index ce39c2d..c7b12c2 100644 --- a/helper/src/time.rs +++ b/helper/src/time.rs @@ -129,7 +129,7 @@ pub const fn secs_to_clock(seconds: u32) -> (u8, u8, u8) { debug_assert!(m < 60); debug_assert!(s < 60); - #[allow(clippy::cast_possible_truncation)] // checked above + #[expect(clippy::cast_possible_truncation, reason = "checked above")] (h as u8, m, s) } @@ -154,7 +154,7 @@ pub fn time() -> u32 { /// /// This is guaranteed to return a value between `0..=86399` pub fn time_utc() -> u32 { - #[allow(clippy::cast_sign_loss)] // checked in function calls + #[expect(clippy::cast_sign_loss, reason = "checked in function calls")] unix_clock(chrono::offset::Local::now().timestamp() as u64) } diff --git a/helper/src/tx.rs b/helper/src/tx.rs new file mode 100644 index 0000000..53706ec --- /dev/null +++ b/helper/src/tx.rs @@ -0,0 +1,70 @@ +//! Utils for working with [`Transaction`] + +use monero_serai::transaction::{Input, Transaction}; + +/// Calculates the fee of the [`Transaction`]. +/// +/// # Panics +/// This will panic if the inputs overflow or the transaction outputs too much, so should only +/// be used on known to be valid txs. +pub fn tx_fee(tx: &Transaction) -> u64 { + let mut fee = 0_u64; + + match &tx { + Transaction::V1 { prefix, .. } => { + for input in &prefix.inputs { + match input { + Input::Gen(_) => return 0, + Input::ToKey { amount, .. } => { + fee = fee.checked_add(amount.unwrap_or(0)).unwrap(); + } + } + } + + for output in &prefix.outputs { + fee = fee.checked_sub(output.amount.unwrap_or(0)).unwrap(); + } + } + Transaction::V2 { proofs, .. } => { + fee = proofs.as_ref().unwrap().base.fee; + } + }; + + fee +} + +#[cfg(test)] +mod test { + use curve25519_dalek::{edwards::CompressedEdwardsY, EdwardsPoint}; + use monero_serai::transaction::{NotPruned, Output, Timelock, TransactionPrefix}; + + use super::*; + + #[test] + #[should_panic(expected = "called `Option::unwrap()` on a `None` value")] + fn tx_fee_panic() { + let input = Input::ToKey { + amount: Some(u64::MAX), + key_offsets: vec![], + key_image: EdwardsPoint::default(), + }; + + let output = Output { + amount: Some(u64::MAX), + key: CompressedEdwardsY::default(), + view_tag: None, + }; + + let tx = Transaction::::V1 { + prefix: TransactionPrefix { + additional_timelock: Timelock::None, + inputs: vec![input; 2], + outputs: vec![output], + extra: vec![], + }, + signatures: vec![], + }; + + tx_fee(&tx); + } +} diff --git a/net/epee-encoding/Cargo.toml b/net/epee-encoding/Cargo.toml index 85ee2c9..90a339f 100644 --- a/net/epee-encoding/Cargo.toml +++ b/net/epee-encoding/Cargo.toml @@ -25,3 +25,6 @@ thiserror = { workspace = true, optional = true} [dev-dependencies] hex = { workspace = true, features = ["default"] } + +[lints] +workspace = true \ No newline at end of file diff --git a/net/epee-encoding/src/container_as_blob.rs b/net/epee-encoding/src/container_as_blob.rs index 701ec66..83078c2 100644 --- a/net/epee-encoding/src/container_as_blob.rs +++ b/net/epee-encoding/src/container_as_blob.rs @@ -9,7 +9,7 @@ pub struct ContainerAsBlob(Vec); impl From> for ContainerAsBlob { fn from(value: Vec) -> Self { - ContainerAsBlob(value) + Self(value) } } @@ -36,9 +36,7 @@ impl EpeeValue for ContainerAsBlob { )); } - Ok(ContainerAsBlob( - bytes.chunks(T::SIZE).map(T::from_bytes).collect(), - )) + Ok(Self(bytes.chunks(T::SIZE).map(T::from_bytes).collect())) } fn should_write(&self) -> bool { @@ -46,10 +44,10 @@ impl EpeeValue for ContainerAsBlob { } fn epee_default_value() -> Option { - Some(ContainerAsBlob(vec![])) + Some(Self(vec![])) } - fn write(self, w: &mut B) -> crate::Result<()> { + fn write(self, w: &mut B) -> Result<()> { let mut buf = BytesMut::with_capacity(self.0.len() * T::SIZE); self.0.iter().for_each(|tt| tt.push_bytes(&mut buf)); buf.write(w) diff --git a/net/epee-encoding/src/error.rs b/net/epee-encoding/src/error.rs index 4b3c7b0..756cd13 100644 --- a/net/epee-encoding/src/error.rs +++ b/net/epee-encoding/src/error.rs @@ -7,6 +7,7 @@ use core::{ pub type Result = core::result::Result; #[cfg_attr(feature = "std", derive(thiserror::Error))] +#[expect(clippy::error_impl_error, reason = "FIXME: rename this type")] pub enum Error { #[cfg_attr(feature = "std", error("IO error: {0}"))] IO(&'static str), @@ -17,19 +18,18 @@ pub enum Error { } impl Error { - fn field_name(&self) -> &'static str { + const fn field_name(&self) -> &'static str { match self { - Error::IO(_) => "io", - Error::Format(_) => "format", - Error::Value(_) => "value", + Self::IO(_) => "io", + Self::Format(_) => "format", + Self::Value(_) => "value", } } fn field_data(&self) -> &str { match self { - Error::IO(data) => data, - Error::Format(data) => data, - Error::Value(data) => data, + Self::IO(data) | Self::Format(data) => data, + Self::Value(data) => data, } } } @@ -44,12 +44,12 @@ impl Debug for Error { impl From for Error { fn from(_: TryFromIntError) -> Self { - Error::Value("Int is too large".to_string()) + Self::Value("Int is too large".to_string()) } } impl From for Error { fn from(_: Utf8Error) -> Self { - Error::Value("Invalid utf8 str".to_string()) + Self::Value("Invalid utf8 str".to_string()) } } diff --git a/net/epee-encoding/src/io.rs b/net/epee-encoding/src/io.rs index 110a1ec..c118145 100644 --- a/net/epee-encoding/src/io.rs +++ b/net/epee-encoding/src/io.rs @@ -3,7 +3,7 @@ use bytes::{Buf, BufMut}; use crate::error::*; #[inline] -pub fn checked_read_primitive( +pub(crate) fn checked_read_primitive( b: &mut B, read: impl Fn(&mut B) -> R, ) -> Result { @@ -11,16 +11,20 @@ pub fn checked_read_primitive( } #[inline] -pub fn checked_read(b: &mut B, read: impl Fn(&mut B) -> R, size: usize) -> Result { +pub(crate) fn checked_read( + b: &mut B, + read: impl Fn(&mut B) -> R, + size: usize, +) -> Result { if b.remaining() < size { - Err(Error::IO("Not enough bytes in buffer to build object."))?; + Err(Error::IO("Not enough bytes in buffer to build object.")) + } else { + Ok(read(b)) } - - Ok(read(b)) } #[inline] -pub fn checked_write_primitive( +pub(crate) fn checked_write_primitive( b: &mut B, write: impl Fn(&mut B, T), t: T, @@ -29,16 +33,16 @@ pub fn checked_write_primitive( } #[inline] -pub fn checked_write( +pub(crate) fn checked_write( b: &mut B, write: impl Fn(&mut B, T), t: T, size: usize, ) -> Result<()> { if b.remaining_mut() < size { - Err(Error::IO("Not enough capacity to write object."))?; + Err(Error::IO("Not enough capacity to write object.")) + } else { + write(b, t); + Ok(()) } - - write(b, t); - Ok(()) } diff --git a/net/epee-encoding/src/lib.rs b/net/epee-encoding/src/lib.rs index fa3449b..d55a546 100644 --- a/net/epee-encoding/src/lib.rs +++ b/net/epee-encoding/src/lib.rs @@ -59,9 +59,12 @@ //! //! ``` +#[cfg(test)] +use hex as _; + extern crate alloc; -use core::{ops::Deref, str::from_utf8 as str_from_utf8}; +use core::str::from_utf8 as str_from_utf8; use bytes::{Buf, BufMut, Bytes, BytesMut}; @@ -130,7 +133,7 @@ pub fn to_bytes(val: T) -> Result { fn read_header(r: &mut B) -> Result<()> { let buf = checked_read(r, |b: &mut B| b.copy_to_bytes(HEADER.len()), HEADER.len())?; - if buf.deref() != HEADER { + if &*buf != HEADER { return Err(Error::Format("Data does not contain header")); } Ok(()) @@ -185,7 +188,7 @@ fn read_object(r: &mut B, skipped_objects: &mut u8) -> Re for _ in 0..number_o_field { let field_name_bytes = read_field_name_bytes(r)?; - let field_name = str_from_utf8(field_name_bytes.deref())?; + let field_name = str_from_utf8(&field_name_bytes)?; if !object_builder.add_field(field_name, r)? { skip_epee_value(r, skipped_objects)?; @@ -289,7 +292,7 @@ where B: BufMut, { write_varint(usize_to_u64(iterator.len()), w)?; - for item in iterator.into_iter() { + for item in iterator { item.write(w)?; } Ok(()) @@ -329,10 +332,7 @@ impl EpeeObject for SkipObject { fn skip_epee_value(r: &mut B, skipped_objects: &mut u8) -> Result<()> { let marker = read_marker(r)?; - let mut len = 1; - if marker.is_seq { - len = read_varint(r)?; - } + let len = if marker.is_seq { read_varint(r)? } else { 1 }; if let Some(size) = marker.inner_marker.size() { let bytes_to_skip = size diff --git a/net/epee-encoding/src/marker.rs b/net/epee-encoding/src/marker.rs index d8ffc4b..16eaa6a 100644 --- a/net/epee-encoding/src/marker.rs +++ b/net/epee-encoding/src/marker.rs @@ -19,13 +19,13 @@ pub enum InnerMarker { } impl InnerMarker { - pub fn size(&self) -> Option { + pub const fn size(&self) -> Option { Some(match self { - InnerMarker::I64 | InnerMarker::U64 | InnerMarker::F64 => 8, - InnerMarker::I32 | InnerMarker::U32 => 4, - InnerMarker::I16 | InnerMarker::U16 => 2, - InnerMarker::I8 | InnerMarker::U8 | InnerMarker::Bool => 1, - InnerMarker::String | InnerMarker::Object => return None, + Self::I64 | Self::U64 | Self::F64 => 8, + Self::I32 | Self::U32 => 4, + Self::I16 | Self::U16 => 2, + Self::I8 | Self::U8 | Self::Bool => 1, + Self::String | Self::Object => return None, }) } } @@ -40,23 +40,23 @@ pub struct Marker { impl Marker { pub(crate) const fn new(inner_marker: InnerMarker) -> Self { - Marker { + Self { inner_marker, is_seq: false, } } + + #[must_use] pub const fn into_seq(self) -> Self { - if self.is_seq { - panic!("Sequence of sequence not allowed!"); - } + assert!(!self.is_seq, "Sequence of sequence not allowed!"); if matches!(self.inner_marker, InnerMarker::U8) { - return Marker { + return Self { inner_marker: InnerMarker::String, is_seq: false, }; } - Marker { + Self { inner_marker: self.inner_marker, is_seq: true, } @@ -112,7 +112,7 @@ impl TryFrom for Marker { _ => return Err(Error::Format("Unknown value Marker")), }; - Ok(Marker { + Ok(Self { inner_marker, is_seq, }) diff --git a/net/epee-encoding/src/value.rs b/net/epee-encoding/src/value.rs index 000d89c..816203e 100644 --- a/net/epee-encoding/src/value.rs +++ b/net/epee-encoding/src/value.rs @@ -71,7 +71,7 @@ impl EpeeValue for Vec { let individual_marker = Marker::new(marker.inner_marker); - let mut res = Vec::with_capacity(len); + let mut res = Self::with_capacity(len); for _ in 0..len { res.push(T::read(r, &individual_marker)?); } @@ -83,7 +83,7 @@ impl EpeeValue for Vec { } fn epee_default_value() -> Option { - Some(Vec::new()) + Some(Self::new()) } fn write(self, w: &mut B) -> Result<()> { @@ -181,7 +181,7 @@ impl EpeeValue for Vec { } fn epee_default_value() -> Option { - Some(Vec::new()) + Some(Self::new()) } fn should_write(&self) -> bool { @@ -216,7 +216,7 @@ impl EpeeValue for Bytes { } fn epee_default_value() -> Option { - Some(Bytes::new()) + Some(Self::new()) } fn should_write(&self) -> bool { @@ -247,14 +247,14 @@ impl EpeeValue for BytesMut { return Err(Error::IO("Not enough bytes to fill object")); } - let mut bytes = BytesMut::zeroed(len); + let mut bytes = Self::zeroed(len); r.copy_to_slice(&mut bytes); Ok(bytes) } fn epee_default_value() -> Option { - Some(BytesMut::new()) + Some(Self::new()) } fn should_write(&self) -> bool { @@ -285,12 +285,11 @@ impl EpeeValue for ByteArrayVec { return Err(Error::IO("Not enough bytes to fill object")); } - ByteArrayVec::try_from(r.copy_to_bytes(len)) - .map_err(|_| Error::Format("Field has invalid length")) + Self::try_from(r.copy_to_bytes(len)).map_err(|_| Error::Format("Field has invalid length")) } fn epee_default_value() -> Option { - Some(ByteArrayVec::try_from(Bytes::new()).unwrap()) + Some(Self::try_from(Bytes::new()).unwrap()) } fn should_write(&self) -> bool { @@ -320,8 +319,7 @@ impl EpeeValue for ByteArray { return Err(Error::IO("Not enough bytes to fill object")); } - ByteArray::try_from(r.copy_to_bytes(N)) - .map_err(|_| Error::Format("Field has invalid length")) + Self::try_from(r.copy_to_bytes(N)).map_err(|_| Error::Format("Field has invalid length")) } fn write(self, w: &mut B) -> Result<()> { @@ -335,7 +333,7 @@ impl EpeeValue for String { fn read(r: &mut B, marker: &Marker) -> Result { let bytes = Vec::::read(r, marker)?; - String::from_utf8(bytes).map_err(|_| Error::Format("Invalid string")) + Self::from_utf8(bytes).map_err(|_| Error::Format("Invalid string")) } fn should_write(&self) -> bool { @@ -343,7 +341,7 @@ impl EpeeValue for String { } fn epee_default_value() -> Option { - Some(String::new()) + Some(Self::new()) } fn write(self, w: &mut B) -> Result<()> { @@ -383,7 +381,7 @@ impl EpeeValue for Vec<[u8; N]> { let individual_marker = Marker::new(marker.inner_marker); - let mut res = Vec::with_capacity(len); + let mut res = Self::with_capacity(len); for _ in 0..len { res.push(<[u8; N]>::read(r, &individual_marker)?); } @@ -395,7 +393,7 @@ impl EpeeValue for Vec<[u8; N]> { } fn epee_default_value() -> Option { - Some(Vec::new()) + Some(Self::new()) } fn write(self, w: &mut B) -> Result<()> { diff --git a/net/epee-encoding/src/varint.rs b/net/epee-encoding/src/varint.rs index ae9c569..3f191dc 100644 --- a/net/epee-encoding/src/varint.rs +++ b/net/epee-encoding/src/varint.rs @@ -21,14 +21,14 @@ const FITS_IN_FOUR_BYTES: u64 = 2_u64.pow(32 - SIZE_OF_SIZE_MARKER) - 1; /// ``` pub fn read_varint(r: &mut B) -> Result { if !r.has_remaining() { - Err(Error::IO("Not enough bytes to build VarInt"))? + return Err(Error::IO("Not enough bytes to build VarInt")); } let vi_start = r.get_u8(); let len = 1 << (vi_start & 0b11); if r.remaining() < len - 1 { - Err(Error::IO("Not enough bytes to build VarInt"))? + return Err(Error::IO("Not enough bytes to build VarInt")); } let mut vi = u64::from(vi_start >> 2); @@ -67,12 +67,15 @@ pub fn write_varint(number: u64, w: &mut B) -> Result<()> { }; if w.remaining_mut() < 1 << size_marker { - Err(Error::IO("Not enough capacity to write VarInt"))?; + return Err(Error::IO("Not enough capacity to write VarInt")); } let number = (number << 2) | size_marker; - // Although `as` is unsafe we just checked the length. + #[expect( + clippy::cast_possible_truncation, + reason = "Although `as` is unsafe we just checked the length." + )] match size_marker { 0 => w.put_u8(number as u8), 1 => w.put_u16_le(number as u16), diff --git a/net/epee-encoding/tests/alt_name.rs b/net/epee-encoding/tests/alt_name.rs index 8a9bc6f..3ddd1ef 100644 --- a/net/epee-encoding/tests/alt_name.rs +++ b/net/epee-encoding/tests/alt_name.rs @@ -1,3 +1,5 @@ +#![expect(unused_crate_dependencies, reason = "outer test module")] + use cuprate_epee_encoding::{epee_object, from_bytes, to_bytes}; struct AltName { diff --git a/net/epee-encoding/tests/duplicate_key.rs b/net/epee-encoding/tests/duplicate_key.rs index 0ed87af..fd8ccc9 100644 --- a/net/epee-encoding/tests/duplicate_key.rs +++ b/net/epee-encoding/tests/duplicate_key.rs @@ -1,3 +1,5 @@ +#![expect(unused_crate_dependencies, reason = "outer test module")] + use cuprate_epee_encoding::{epee_object, from_bytes}; struct T { diff --git a/net/epee-encoding/tests/epee_default.rs b/net/epee-encoding/tests/epee_default.rs index c221b28..778bbc0 100644 --- a/net/epee-encoding/tests/epee_default.rs +++ b/net/epee-encoding/tests/epee_default.rs @@ -1,3 +1,5 @@ +#![expect(unused_crate_dependencies, reason = "outer test module")] + use cuprate_epee_encoding::{epee_object, from_bytes, to_bytes}; pub struct Optional { @@ -58,7 +60,7 @@ fn epee_non_default_does_encode() { let val: Optional = from_bytes(&mut bytes).unwrap(); assert_eq!(val.optional_val, -3); - assert_eq!(val.val, 8) + assert_eq!(val.val, 8); } #[test] @@ -70,5 +72,5 @@ fn epee_value_not_present_with_default() { let val: Optional = from_bytes(&mut bytes).unwrap(); assert_eq!(val.optional_val, -4); - assert_eq!(val.val, 76) + assert_eq!(val.val, 76); } diff --git a/net/epee-encoding/tests/flattened.rs b/net/epee-encoding/tests/flattened.rs index a737370..dfb951f 100644 --- a/net/epee-encoding/tests/flattened.rs +++ b/net/epee-encoding/tests/flattened.rs @@ -1,3 +1,5 @@ +#![expect(unused_crate_dependencies, reason = "outer test module")] + use cuprate_epee_encoding::{epee_object, from_bytes, to_bytes}; struct Child { @@ -37,6 +39,7 @@ epee_object!( ); #[test] +#[expect(clippy::float_cmp)] fn epee_flatten() { let val2 = ParentChild { h: 38.9, diff --git a/net/epee-encoding/tests/options.rs b/net/epee-encoding/tests/options.rs index 5bae9a9..d242124 100644 --- a/net/epee-encoding/tests/options.rs +++ b/net/epee-encoding/tests/options.rs @@ -1,5 +1,6 @@ +#![expect(unused_crate_dependencies, reason = "outer test module")] + use cuprate_epee_encoding::{epee_object, from_bytes, to_bytes}; -use std::ops::Deref; #[derive(Clone)] struct T { @@ -28,6 +29,6 @@ fn optional_val_in_data() { ]; let t: T = from_bytes(&mut &bytes[..]).unwrap(); let bytes2 = to_bytes(t.clone()).unwrap(); - assert_eq!(bytes.as_slice(), bytes2.deref()); + assert_eq!(bytes.as_slice(), &*bytes2); assert_eq!(t.val.unwrap(), 21); } diff --git a/net/epee-encoding/tests/p2p.rs b/net/epee-encoding/tests/p2p.rs index 2f74ef6..ba17386 100644 --- a/net/epee-encoding/tests/p2p.rs +++ b/net/epee-encoding/tests/p2p.rs @@ -1,3 +1,5 @@ +#![expect(unused_crate_dependencies, reason = "outer test module")] + use cuprate_epee_encoding::{epee_object, from_bytes, to_bytes}; #[derive(Eq, PartialEq, Debug, Clone)] @@ -5,7 +7,7 @@ pub struct SupportFlags(u32); impl From for SupportFlags { fn from(value: u32) -> Self { - SupportFlags(value) + Self(value) } } diff --git a/net/epee-encoding/tests/rpc.rs b/net/epee-encoding/tests/rpc.rs index 973498e..b366854 100644 --- a/net/epee-encoding/tests/rpc.rs +++ b/net/epee-encoding/tests/rpc.rs @@ -1,3 +1,5 @@ +#![expect(unused_crate_dependencies, reason = "outer test module")] + use cuprate_epee_encoding::{epee_object, from_bytes, to_bytes}; #[derive(Clone, Debug, PartialEq)] diff --git a/net/epee-encoding/tests/seq.rs b/net/epee-encoding/tests/seq.rs index a4685d0..b4ae788 100644 --- a/net/epee-encoding/tests/seq.rs +++ b/net/epee-encoding/tests/seq.rs @@ -1,3 +1,5 @@ +#![expect(unused_crate_dependencies, reason = "outer test module")] + use cuprate_epee_encoding::{epee_object, from_bytes}; struct ObjSeq { diff --git a/net/epee-encoding/tests/stack_overflow.rs b/net/epee-encoding/tests/stack_overflow.rs index c53420a..78a1120 100644 --- a/net/epee-encoding/tests/stack_overflow.rs +++ b/net/epee-encoding/tests/stack_overflow.rs @@ -1,3 +1,5 @@ +#![expect(unused_crate_dependencies, reason = "outer test module")] + use cuprate_epee_encoding::{epee_object, from_bytes}; struct D { @@ -737,5 +739,5 @@ fn stack_overflow() { let obj: Result = from_bytes(&mut bytes.as_slice()); - assert!(obj.is_err()) + assert!(obj.is_err()); } diff --git a/net/fixed-bytes/Cargo.toml b/net/fixed-bytes/Cargo.toml index 4c5a1af..7844570 100644 --- a/net/fixed-bytes/Cargo.toml +++ b/net/fixed-bytes/Cargo.toml @@ -17,3 +17,6 @@ serde = { workspace = true, features = ["derive"], optional = true } [dev-dependencies] serde_json = { workspace = true, features = ["std"] } + +[lints] +workspace = true \ No newline at end of file diff --git a/net/fixed-bytes/src/lib.rs b/net/fixed-bytes/src/lib.rs index 210813c..6071338 100644 --- a/net/fixed-bytes/src/lib.rs +++ b/net/fixed-bytes/src/lib.rs @@ -22,17 +22,15 @@ pub enum FixedByteError { } impl FixedByteError { - fn field_name(&self) -> &'static str { + const fn field_name(&self) -> &'static str { match self { - FixedByteError::InvalidLength => "input", + Self::InvalidLength => "input", } } - fn field_data(&self) -> &'static str { + const fn field_data(&self) -> &'static str { match self { - FixedByteError::InvalidLength => { - "Cannot create fix byte array, input has invalid length." - } + Self::InvalidLength => "Cannot create fix byte array, input has invalid length.", } } } @@ -82,7 +80,7 @@ impl ByteArray { impl From<[u8; N]> for ByteArray { fn from(value: [u8; N]) -> Self { - ByteArray(Bytes::copy_from_slice(&value)) + Self(Bytes::copy_from_slice(&value)) } } @@ -101,7 +99,7 @@ impl TryFrom for ByteArray { if value.len() != N { return Err(FixedByteError::InvalidLength); } - Ok(ByteArray(value)) + Ok(Self(value)) } } @@ -112,7 +110,7 @@ impl TryFrom> for ByteArray { if value.len() != N { return Err(FixedByteError::InvalidLength); } - Ok(ByteArray(Bytes::from(value))) + Ok(Self(Bytes::from(value))) } } @@ -142,11 +140,11 @@ impl<'de, const N: usize> Deserialize<'de> for ByteArrayVec { } impl ByteArrayVec { - pub fn len(&self) -> usize { + pub const fn len(&self) -> usize { self.0.len() / N } - pub fn is_empty(&self) -> bool { + pub const fn is_empty(&self) -> bool { self.len() == 0 } @@ -182,6 +180,7 @@ impl ByteArrayVec { /// /// # Panics /// Panics if at > len. + #[must_use] pub fn split_off(&mut self, at: usize) -> Self { Self(self.0.split_off(at * N)) } @@ -189,9 +188,9 @@ impl ByteArrayVec { impl From<&ByteArrayVec> for Vec<[u8; N]> { fn from(value: &ByteArrayVec) -> Self { - let mut out = Vec::with_capacity(value.len()); + let mut out = Self::with_capacity(value.len()); for i in 0..value.len() { - out.push(value[i]) + out.push(value[i]); } out @@ -201,11 +200,11 @@ impl From<&ByteArrayVec> for Vec<[u8; N]> { impl From> for ByteArrayVec { fn from(value: Vec<[u8; N]>) -> Self { let mut bytes = BytesMut::with_capacity(N * value.len()); - for i in value.into_iter() { - bytes.extend_from_slice(&i) + for i in value { + bytes.extend_from_slice(&i); } - ByteArrayVec(bytes.freeze()) + Self(bytes.freeze()) } } @@ -217,13 +216,13 @@ impl TryFrom for ByteArrayVec { return Err(FixedByteError::InvalidLength); } - Ok(ByteArrayVec(value)) + Ok(Self(value)) } } impl From<[u8; N]> for ByteArrayVec { fn from(value: [u8; N]) -> Self { - ByteArrayVec(Bytes::copy_from_slice(value.as_slice())) + Self(Bytes::copy_from_slice(value.as_slice())) } } @@ -231,11 +230,11 @@ impl From<[[u8; N]; LEN]> for ByteArrayVec fn from(value: [[u8; N]; LEN]) -> Self { let mut bytes = BytesMut::with_capacity(N * LEN); - for val in value.into_iter() { + for val in value { bytes.put_slice(val.as_slice()); } - ByteArrayVec(bytes.freeze()) + Self(bytes.freeze()) } } @@ -247,7 +246,7 @@ impl TryFrom> for ByteArrayVec { return Err(FixedByteError::InvalidLength); } - Ok(ByteArrayVec(Bytes::from(value))) + Ok(Self(Bytes::from(value))) } } @@ -255,9 +254,12 @@ impl Index for ByteArrayVec { type Output = [u8; N]; fn index(&self, index: usize) -> &Self::Output { - if (index + 1) * N > self.0.len() { - panic!("Index out of range, idx: {}, length: {}", index, self.len()); - } + assert!( + (index + 1) * N <= self.0.len(), + "Index out of range, idx: {}, length: {}", + index, + self.len() + ); self.0[index * N..(index + 1) * N] .as_ref() diff --git a/net/levin/Cargo.toml b/net/levin/Cargo.toml index 1c585b9..68c32e5 100644 --- a/net/levin/Cargo.toml +++ b/net/levin/Cargo.toml @@ -14,6 +14,7 @@ tracing = ["dep:tracing", "tokio-util/tracing"] [dependencies] cuprate-helper = { path = "../../helper", default-features = false, features = ["cast"] } +cfg-if = { workspace = true } thiserror = { workspace = true } bytes = { workspace = true, features = ["std"] } bitflags = { workspace = true } @@ -26,4 +27,7 @@ proptest = { workspace = true } rand = { workspace = true, features = ["std", "std_rng"] } tokio-util = { workspace = true, features = ["io-util"]} tokio = { workspace = true, features = ["full"] } -futures = { workspace = true, features = ["std"] } \ No newline at end of file +futures = { workspace = true, features = ["std"] } + +[lints] +workspace = true \ No newline at end of file diff --git a/net/levin/src/codec.rs b/net/levin/src/codec.rs index 1177733..4c7695e 100644 --- a/net/levin/src/codec.rs +++ b/net/levin/src/codec.rs @@ -47,7 +47,7 @@ pub struct LevinBucketCodec { impl Default for LevinBucketCodec { fn default() -> Self { - LevinBucketCodec { + Self { state: LevinBucketState::WaitingForHeader, protocol: Protocol::default(), handshake_message_seen: false, @@ -56,8 +56,8 @@ impl Default for LevinBucketCodec { } impl LevinBucketCodec { - pub fn new(protocol: Protocol) -> Self { - LevinBucketCodec { + pub const fn new(protocol: Protocol) -> Self { + Self { state: LevinBucketState::WaitingForHeader, protocol, handshake_message_seen: false, @@ -112,8 +112,10 @@ impl Decoder for LevinBucketCodec { } } - let _ = - std::mem::replace(&mut self.state, LevinBucketState::WaitingForBody(head)); + drop(std::mem::replace( + &mut self.state, + LevinBucketState::WaitingForBody(head), + )); } LevinBucketState::WaitingForBody(head) => { let body_len = u64_to_usize(head.size); @@ -145,7 +147,7 @@ impl Encoder> for LevinBucketCodec { type Error = BucketError; fn encode(&mut self, item: Bucket, dst: &mut BytesMut) -> Result<(), Self::Error> { if let Some(additional) = (HEADER_SIZE + item.body.len()).checked_sub(dst.capacity()) { - dst.reserve(additional) + dst.reserve(additional); } item.header.write_bytes_into(dst); diff --git a/net/levin/src/header.rs b/net/levin/src/header.rs index 7acd085..057eee8 100644 --- a/net/levin/src/header.rs +++ b/net/levin/src/header.rs @@ -13,7 +13,7 @@ // copies or substantial portions of the Software. // -//! This module provides a struct BucketHead for the header of a levin protocol +//! This module provides a struct `BucketHead` for the header of a levin protocol //! message. use bitflags::bitflags; @@ -62,7 +62,7 @@ bitflags! { impl From for Flags { fn from(value: u32) -> Self { - Flags(value) + Self(value) } } @@ -99,9 +99,9 @@ impl BucketHead { /// /// # Panics /// This function will panic if there aren't enough bytes to fill the header. - /// Currently [HEADER_SIZE] - pub fn from_bytes(buf: &mut BytesMut) -> BucketHead { - BucketHead { + /// Currently [`HEADER_SIZE`] + pub fn from_bytes(buf: &mut BytesMut) -> Self { + Self { signature: buf.get_u64_le(), size: buf.get_u64_le(), have_to_return_data: buf.get_u8() != 0, diff --git a/net/levin/src/lib.rs b/net/levin/src/lib.rs index ab03bfb..a3f4b69 100644 --- a/net/levin/src/lib.rs +++ b/net/levin/src/lib.rs @@ -33,6 +33,16 @@ #![deny(unused_mut)] //#![deny(missing_docs)] +cfg_if::cfg_if! { + // Used in `tests/`. + if #[cfg(test)] { + use futures as _; + use proptest as _; + use rand as _; + use tokio as _; + } +} + use std::fmt::Debug; use bytes::{Buf, Bytes}; @@ -99,7 +109,7 @@ pub struct Protocol { impl Default for Protocol { fn default() -> Self { - Protocol { + Self { version: MONERO_PROTOCOL_VERSION, signature: MONERO_LEVIN_SIGNATURE, max_packet_size_before_handshake: MONERO_MAX_PACKET_SIZE_BEFORE_HANDSHAKE, @@ -130,22 +140,22 @@ pub enum MessageType { impl MessageType { /// Returns if the message requires a response - pub fn have_to_return_data(&self) -> bool { + pub const fn have_to_return_data(&self) -> bool { match self { - MessageType::Request => true, - MessageType::Response | MessageType::Notification => false, + Self::Request => true, + Self::Response | Self::Notification => false, } } - /// Returns the `MessageType` given the flags and have_to_return_data fields - pub fn from_flags_and_have_to_return( + /// Returns the `MessageType` given the flags and `have_to_return_data` fields + pub const fn from_flags_and_have_to_return( flags: Flags, have_to_return: bool, ) -> Result { Ok(match (flags, have_to_return) { - (Flags::REQUEST, true) => MessageType::Request, - (Flags::REQUEST, false) => MessageType::Notification, - (Flags::RESPONSE, false) => MessageType::Response, + (Flags::REQUEST, true) => Self::Request, + (Flags::REQUEST, false) => Self::Notification, + (Flags::RESPONSE, false) => Self::Response, _ => { return Err(BucketError::InvalidHeaderFlags( "Unable to assign a message type to this bucket", @@ -154,10 +164,10 @@ impl MessageType { }) } - pub fn as_flags(&self) -> header::Flags { + pub const fn as_flags(&self) -> Flags { match self { - MessageType::Request | MessageType::Notification => header::Flags::REQUEST, - MessageType::Response => header::Flags::RESPONSE, + Self::Request | Self::Notification => Flags::REQUEST, + Self::Response => Flags::RESPONSE, } } } @@ -173,7 +183,7 @@ pub struct BucketBuilder { } impl BucketBuilder { - pub fn new(protocol: &Protocol) -> Self { + pub const fn new(protocol: &Protocol) -> Self { Self { signature: Some(protocol.signature), ty: None, @@ -185,27 +195,27 @@ impl BucketBuilder { } pub fn set_signature(&mut self, sig: u64) { - self.signature = Some(sig) + self.signature = Some(sig); } pub fn set_message_type(&mut self, ty: MessageType) { - self.ty = Some(ty) + self.ty = Some(ty); } pub fn set_command(&mut self, command: C) { - self.command = Some(command) + self.command = Some(command); } pub fn set_return_code(&mut self, code: i32) { - self.return_code = Some(code) + self.return_code = Some(code); } pub fn set_protocol_version(&mut self, version: u32) { - self.protocol_version = Some(version) + self.protocol_version = Some(version); } pub fn set_body(&mut self, body: Bytes) { - self.body = Some(body) + self.body = Some(body); } pub fn finish(self) -> Bucket { diff --git a/net/levin/src/message.rs b/net/levin/src/message.rs index 19aa1b5..32be653 100644 --- a/net/levin/src/message.rs +++ b/net/levin/src/message.rs @@ -33,13 +33,13 @@ pub enum LevinMessage { impl From for LevinMessage { fn from(value: T) -> Self { - LevinMessage::Body(value) + Self::Body(value) } } impl From> for LevinMessage { fn from(value: Bucket) -> Self { - LevinMessage::Bucket(value) + Self::Bucket(value) } } @@ -58,7 +58,7 @@ pub struct Dummy(pub usize); impl From for LevinMessage { fn from(value: Dummy) -> Self { - LevinMessage::Dummy(value.0) + Self::Dummy(value.0) } } @@ -76,12 +76,11 @@ pub fn make_fragmented_messages( fragment_size: usize, message: T, ) -> Result>, BucketError> { - if fragment_size * 2 < HEADER_SIZE { - panic!( - "Fragment size: {fragment_size}, is too small, must be at least {}", - 2 * HEADER_SIZE - ); - } + assert!( + fragment_size * 2 >= HEADER_SIZE, + "Fragment size: {fragment_size}, is too small, must be at least {}", + 2 * HEADER_SIZE + ); let mut builder = BucketBuilder::new(protocol); message.encode(&mut builder)?; diff --git a/net/levin/tests/fragmented_message.rs b/net/levin/tests/fragmented_message.rs index 512fd46..f34b145 100644 --- a/net/levin/tests/fragmented_message.rs +++ b/net/levin/tests/fragmented_message.rs @@ -1,3 +1,9 @@ +#![expect( + clippy::tests_outside_test_module, + unused_crate_dependencies, + reason = "outer test module" +)] + use bytes::{Buf, BufMut, Bytes, BytesMut}; use futures::{SinkExt, StreamExt}; use proptest::{prelude::any_with, prop_assert_eq, proptest, sample::size_range}; @@ -58,12 +64,12 @@ impl LevinBody for TestBody { ) -> Result { let size = u64_to_usize(body.get_u64_le()); // bucket - Ok(TestBody::Bytes(size, body.copy_to_bytes(size))) + Ok(Self::Bytes(size, body.copy_to_bytes(size))) } fn encode(self, builder: &mut BucketBuilder) -> Result<(), BucketError> { match self { - TestBody::Bytes(len, bytes) => { + Self::Bytes(len, bytes) => { let mut buf = BytesMut::new(); buf.put_u64_le(len as u64); buf.extend_from_slice(bytes.as_ref()); @@ -141,12 +147,12 @@ proptest! { message2.extend_from_slice(&fragments[0].body[(33 + 8)..]); for frag in fragments.iter().skip(1) { - message2.extend_from_slice(frag.body.as_ref()) + message2.extend_from_slice(frag.body.as_ref()); } prop_assert_eq!(message.as_slice(), &message2[0..message.len()], "numb_fragments: {}", fragments.len()); - for byte in message2[message.len()..].iter(){ + for byte in &message2[message.len()..]{ prop_assert_eq!(*byte, 0); } } diff --git a/net/wire/Cargo.toml b/net/wire/Cargo.toml index cbeb551..0b77cf1 100644 --- a/net/wire/Cargo.toml +++ b/net/wire/Cargo.toml @@ -15,7 +15,7 @@ cuprate-levin = { path = "../levin" } cuprate-epee-encoding = { path = "../epee-encoding" } cuprate-fixed-bytes = { path = "../fixed-bytes" } cuprate-types = { path = "../../types", default-features = false, features = ["epee"] } -cuprate-helper = { path = "../../helper", default-features = false, features = ["cast"] } +cuprate-helper = { path = "../../helper", default-features = false, features = ["map"] } bitflags = { workspace = true, features = ["std"] } bytes = { workspace = true, features = ["std"] } @@ -24,3 +24,5 @@ thiserror = { workspace = true } [dev-dependencies] hex = { workspace = true, features = ["std"]} +[lints] +workspace = true diff --git a/net/wire/src/network_address.rs b/net/wire/src/network_address.rs index 632739a..ad599b7 100644 --- a/net/wire/src/network_address.rs +++ b/net/wire/src/network_address.rs @@ -51,38 +51,38 @@ impl EpeeObject for NetworkAddress { } impl NetworkAddress { - pub fn get_zone(&self) -> NetZone { + pub const fn get_zone(&self) -> NetZone { match self { - NetworkAddress::Clear(_) => NetZone::Public, + Self::Clear(_) => NetZone::Public, } } - pub fn is_loopback(&self) -> bool { + pub const fn is_loopback(&self) -> bool { // TODO false } - pub fn is_local(&self) -> bool { + pub const fn is_local(&self) -> bool { // TODO false } - pub fn port(&self) -> u16 { + pub const fn port(&self) -> u16 { match self { - NetworkAddress::Clear(ip) => ip.port(), + Self::Clear(ip) => ip.port(), } } } impl From for NetworkAddress { fn from(value: net::SocketAddrV4) -> Self { - NetworkAddress::Clear(value.into()) + Self::Clear(value.into()) } } impl From for NetworkAddress { fn from(value: net::SocketAddrV6) -> Self { - NetworkAddress::Clear(value.into()) + Self::Clear(value.into()) } } diff --git a/net/wire/src/network_address/epee_builder.rs b/net/wire/src/network_address/epee_builder.rs index 36db824..c1d1742 100644 --- a/net/wire/src/network_address/epee_builder.rs +++ b/net/wire/src/network_address/epee_builder.rs @@ -74,7 +74,7 @@ impl From for TaggedNetworkAddress { fn from(value: NetworkAddress) -> Self { match value { NetworkAddress::Clear(addr) => match addr { - SocketAddr::V4(addr) => TaggedNetworkAddress { + SocketAddr::V4(addr) => Self { ty: Some(1), addr: Some(AllFieldsNetworkAddress { m_ip: Some(u32::from_be_bytes(addr.ip().octets())), @@ -82,7 +82,7 @@ impl From for TaggedNetworkAddress { addr: None, }), }, - SocketAddr::V6(addr) => TaggedNetworkAddress { + SocketAddr::V6(addr) => Self { ty: Some(2), addr: Some(AllFieldsNetworkAddress { addr: Some(addr.ip().octets()), diff --git a/net/wire/src/p2p.rs b/net/wire/src/p2p.rs index 3829d17..a7cd784 100644 --- a/net/wire/src/p2p.rs +++ b/net/wire/src/p2p.rs @@ -55,27 +55,27 @@ pub enum LevinCommand { impl std::fmt::Display for LevinCommand { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { - if let LevinCommand::Unknown(id) = self { - return f.write_str(&format!("unknown id: {}", id)); + if let Self::Unknown(id) = self { + return f.write_str(&format!("unknown id: {id}")); } f.write_str(match self { - LevinCommand::Handshake => "handshake", - LevinCommand::TimedSync => "timed sync", - LevinCommand::Ping => "ping", - LevinCommand::SupportFlags => "support flags", + Self::Handshake => "handshake", + Self::TimedSync => "timed sync", + Self::Ping => "ping", + Self::SupportFlags => "support flags", - LevinCommand::NewBlock => "new block", - LevinCommand::NewTransactions => "new transactions", - LevinCommand::GetObjectsRequest => "get objects request", - LevinCommand::GetObjectsResponse => "get objects response", - LevinCommand::ChainRequest => "chain request", - LevinCommand::ChainResponse => "chain response", - LevinCommand::NewFluffyBlock => "new fluffy block", - LevinCommand::FluffyMissingTxsRequest => "fluffy missing transaction request", - LevinCommand::GetTxPoolCompliment => "get transaction pool compliment", + Self::NewBlock => "new block", + Self::NewTransactions => "new transactions", + Self::GetObjectsRequest => "get objects request", + Self::GetObjectsResponse => "get objects response", + Self::ChainRequest => "chain request", + Self::ChainResponse => "chain response", + Self::NewFluffyBlock => "new fluffy block", + Self::FluffyMissingTxsRequest => "fluffy missing transaction request", + Self::GetTxPoolCompliment => "get transaction pool compliment", - LevinCommand::Unknown(_) => unreachable!(), + Self::Unknown(_) => unreachable!(), }) } } @@ -83,50 +83,51 @@ impl std::fmt::Display for LevinCommand { impl LevinCommandTrait for LevinCommand { fn bucket_size_limit(&self) -> u64 { // https://github.com/monero-project/monero/blob/00fd416a99686f0956361d1cd0337fe56e58d4a7/src/cryptonote_basic/connection_context.cpp#L37 + #[expect(clippy::match_same_arms, reason = "formatting is more clear")] match self { - LevinCommand::Handshake => 65536, - LevinCommand::TimedSync => 65536, - LevinCommand::Ping => 4096, - LevinCommand::SupportFlags => 4096, + Self::Handshake => 65536, + Self::TimedSync => 65536, + Self::Ping => 4096, + Self::SupportFlags => 4096, - LevinCommand::NewBlock => 1024 * 1024 * 128, // 128 MB (max packet is a bit less than 100 MB though) - LevinCommand::NewTransactions => 1024 * 1024 * 128, // 128 MB (max packet is a bit less than 100 MB though) - LevinCommand::GetObjectsRequest => 1024 * 1024 * 2, // 2 MB - LevinCommand::GetObjectsResponse => 1024 * 1024 * 128, // 128 MB (max packet is a bit less than 100 MB though) - LevinCommand::ChainRequest => 512 * 1024, // 512 kB - LevinCommand::ChainResponse => 1024 * 1024 * 4, // 4 MB - LevinCommand::NewFluffyBlock => 1024 * 1024 * 4, // 4 MB - LevinCommand::FluffyMissingTxsRequest => 1024 * 1024, // 1 MB - LevinCommand::GetTxPoolCompliment => 1024 * 1024 * 4, // 4 MB + Self::NewBlock => 1024 * 1024 * 128, // 128 MB (max packet is a bit less than 100 MB though) + Self::NewTransactions => 1024 * 1024 * 128, // 128 MB (max packet is a bit less than 100 MB though) + Self::GetObjectsRequest => 1024 * 1024 * 2, // 2 MB + Self::GetObjectsResponse => 1024 * 1024 * 128, // 128 MB (max packet is a bit less than 100 MB though) + Self::ChainRequest => 512 * 1024, // 512 kB + Self::ChainResponse => 1024 * 1024 * 4, // 4 MB + Self::NewFluffyBlock => 1024 * 1024 * 4, // 4 MB + Self::FluffyMissingTxsRequest => 1024 * 1024, // 1 MB + Self::GetTxPoolCompliment => 1024 * 1024 * 4, // 4 MB - LevinCommand::Unknown(_) => u64::MAX, + Self::Unknown(_) => u64::MAX, } } fn is_handshake(&self) -> bool { - matches!(self, LevinCommand::Handshake) + matches!(self, Self::Handshake) } } impl From for LevinCommand { fn from(value: u32) -> Self { match value { - 1001 => LevinCommand::Handshake, - 1002 => LevinCommand::TimedSync, - 1003 => LevinCommand::Ping, - 1007 => LevinCommand::SupportFlags, + 1001 => Self::Handshake, + 1002 => Self::TimedSync, + 1003 => Self::Ping, + 1007 => Self::SupportFlags, - 2001 => LevinCommand::NewBlock, - 2002 => LevinCommand::NewTransactions, - 2003 => LevinCommand::GetObjectsRequest, - 2004 => LevinCommand::GetObjectsResponse, - 2006 => LevinCommand::ChainRequest, - 2007 => LevinCommand::ChainResponse, - 2008 => LevinCommand::NewFluffyBlock, - 2009 => LevinCommand::FluffyMissingTxsRequest, - 2010 => LevinCommand::GetTxPoolCompliment, + 2001 => Self::NewBlock, + 2002 => Self::NewTransactions, + 2003 => Self::GetObjectsRequest, + 2004 => Self::GetObjectsResponse, + 2006 => Self::ChainRequest, + 2007 => Self::ChainResponse, + 2008 => Self::NewFluffyBlock, + 2009 => Self::FluffyMissingTxsRequest, + 2010 => Self::GetTxPoolCompliment, - x => LevinCommand::Unknown(x), + x => Self::Unknown(x), } } } @@ -191,19 +192,19 @@ pub enum ProtocolMessage { } impl ProtocolMessage { - pub fn command(&self) -> LevinCommand { + pub const fn command(&self) -> LevinCommand { use LevinCommand as C; match self { - ProtocolMessage::NewBlock(_) => C::NewBlock, - ProtocolMessage::NewFluffyBlock(_) => C::NewFluffyBlock, - ProtocolMessage::GetObjectsRequest(_) => C::GetObjectsRequest, - ProtocolMessage::GetObjectsResponse(_) => C::GetObjectsResponse, - ProtocolMessage::ChainRequest(_) => C::ChainRequest, - ProtocolMessage::ChainEntryResponse(_) => C::ChainResponse, - ProtocolMessage::NewTransactions(_) => C::NewTransactions, - ProtocolMessage::FluffyMissingTransactionsRequest(_) => C::FluffyMissingTxsRequest, - ProtocolMessage::GetTxPoolCompliment(_) => C::GetTxPoolCompliment, + Self::NewBlock(_) => C::NewBlock, + Self::NewFluffyBlock(_) => C::NewFluffyBlock, + Self::GetObjectsRequest(_) => C::GetObjectsRequest, + Self::GetObjectsResponse(_) => C::GetObjectsResponse, + Self::ChainRequest(_) => C::ChainRequest, + Self::ChainEntryResponse(_) => C::ChainResponse, + Self::NewTransactions(_) => C::NewTransactions, + Self::FluffyMissingTransactionsRequest(_) => C::FluffyMissingTxsRequest, + Self::GetTxPoolCompliment(_) => C::GetTxPoolCompliment, } } @@ -230,26 +231,26 @@ impl ProtocolMessage { use LevinCommand as C; match self { - ProtocolMessage::NewBlock(val) => build_message(C::NewBlock, val, builder)?, - ProtocolMessage::NewTransactions(val) => { - build_message(C::NewTransactions, val, builder)? + Self::NewBlock(val) => build_message(C::NewBlock, val, builder)?, + Self::NewTransactions(val) => { + build_message(C::NewTransactions, val, builder)?; } - ProtocolMessage::GetObjectsRequest(val) => { - build_message(C::GetObjectsRequest, val, builder)? + Self::GetObjectsRequest(val) => { + build_message(C::GetObjectsRequest, val, builder)?; } - ProtocolMessage::GetObjectsResponse(val) => { - build_message(C::GetObjectsResponse, val, builder)? + Self::GetObjectsResponse(val) => { + build_message(C::GetObjectsResponse, val, builder)?; } - ProtocolMessage::ChainRequest(val) => build_message(C::ChainRequest, val, builder)?, - ProtocolMessage::ChainEntryResponse(val) => { - build_message(C::ChainResponse, val, builder)? + Self::ChainRequest(val) => build_message(C::ChainRequest, val, builder)?, + Self::ChainEntryResponse(val) => { + build_message(C::ChainResponse, val, builder)?; } - ProtocolMessage::NewFluffyBlock(val) => build_message(C::NewFluffyBlock, val, builder)?, - ProtocolMessage::FluffyMissingTransactionsRequest(val) => { - build_message(C::FluffyMissingTxsRequest, val, builder)? + Self::NewFluffyBlock(val) => build_message(C::NewFluffyBlock, val, builder)?, + Self::FluffyMissingTransactionsRequest(val) => { + build_message(C::FluffyMissingTxsRequest, val, builder)?; } - ProtocolMessage::GetTxPoolCompliment(val) => { - build_message(C::GetTxPoolCompliment, val, builder)? + Self::GetTxPoolCompliment(val) => { + build_message(C::GetTxPoolCompliment, val, builder)?; } } Ok(()) @@ -265,14 +266,14 @@ pub enum AdminRequestMessage { } impl AdminRequestMessage { - pub fn command(&self) -> LevinCommand { + pub const fn command(&self) -> LevinCommand { use LevinCommand as C; match self { - AdminRequestMessage::Handshake(_) => C::Handshake, - AdminRequestMessage::Ping => C::Ping, - AdminRequestMessage::SupportFlags => C::SupportFlags, - AdminRequestMessage::TimedSync(_) => C::TimedSync, + Self::Handshake(_) => C::Handshake, + Self::Ping => C::Ping, + Self::SupportFlags => C::SupportFlags, + Self::TimedSync(_) => C::TimedSync, } } @@ -286,13 +287,13 @@ impl AdminRequestMessage { cuprate_epee_encoding::from_bytes::(buf) .map_err(|e| BucketError::BodyDecodingError(e.into()))?; - AdminRequestMessage::Ping + Self::Ping } C::SupportFlags => { cuprate_epee_encoding::from_bytes::(buf) .map_err(|e| BucketError::BodyDecodingError(e.into()))?; - AdminRequestMessage::SupportFlags + Self::SupportFlags } _ => return Err(BucketError::UnknownCommand), }) @@ -302,11 +303,11 @@ impl AdminRequestMessage { use LevinCommand as C; match self { - AdminRequestMessage::Handshake(val) => build_message(C::Handshake, val, builder)?, - AdminRequestMessage::TimedSync(val) => build_message(C::TimedSync, val, builder)?, - AdminRequestMessage::Ping => build_message(C::Ping, EmptyMessage, builder)?, - AdminRequestMessage::SupportFlags => { - build_message(C::SupportFlags, EmptyMessage, builder)? + Self::Handshake(val) => build_message(C::Handshake, val, builder)?, + Self::TimedSync(val) => build_message(C::TimedSync, val, builder)?, + Self::Ping => build_message(C::Ping, EmptyMessage, builder)?, + Self::SupportFlags => { + build_message(C::SupportFlags, EmptyMessage, builder)?; } } Ok(()) @@ -322,14 +323,14 @@ pub enum AdminResponseMessage { } impl AdminResponseMessage { - pub fn command(&self) -> LevinCommand { + pub const fn command(&self) -> LevinCommand { use LevinCommand as C; match self { - AdminResponseMessage::Handshake(_) => C::Handshake, - AdminResponseMessage::Ping(_) => C::Ping, - AdminResponseMessage::SupportFlags(_) => C::SupportFlags, - AdminResponseMessage::TimedSync(_) => C::TimedSync, + Self::Handshake(_) => C::Handshake, + Self::Ping(_) => C::Ping, + Self::SupportFlags(_) => C::SupportFlags, + Self::TimedSync(_) => C::TimedSync, } } @@ -349,11 +350,11 @@ impl AdminResponseMessage { use LevinCommand as C; match self { - AdminResponseMessage::Handshake(val) => build_message(C::Handshake, val, builder)?, - AdminResponseMessage::TimedSync(val) => build_message(C::TimedSync, val, builder)?, - AdminResponseMessage::Ping(val) => build_message(C::Ping, val, builder)?, - AdminResponseMessage::SupportFlags(val) => { - build_message(C::SupportFlags, val, builder)? + Self::Handshake(val) => build_message(C::Handshake, val, builder)?, + Self::TimedSync(val) => build_message(C::TimedSync, val, builder)?, + Self::Ping(val) => build_message(C::Ping, val, builder)?, + Self::SupportFlags(val) => { + build_message(C::SupportFlags, val, builder)?; } } Ok(()) @@ -368,23 +369,23 @@ pub enum Message { } impl Message { - pub fn is_request(&self) -> bool { - matches!(self, Message::Request(_)) + pub const fn is_request(&self) -> bool { + matches!(self, Self::Request(_)) } - pub fn is_response(&self) -> bool { - matches!(self, Message::Response(_)) + pub const fn is_response(&self) -> bool { + matches!(self, Self::Response(_)) } - pub fn is_protocol(&self) -> bool { - matches!(self, Message::Protocol(_)) + pub const fn is_protocol(&self) -> bool { + matches!(self, Self::Protocol(_)) } - pub fn command(&self) -> LevinCommand { + pub const fn command(&self) -> LevinCommand { match self { - Message::Request(mes) => mes.command(), - Message::Response(mes) => mes.command(), - Message::Protocol(mes) => mes.command(), + Self::Request(mes) => mes.command(), + Self::Response(mes) => mes.command(), + Self::Protocol(mes) => mes.command(), } } } @@ -398,27 +399,25 @@ impl LevinBody for Message { command: LevinCommand, ) -> Result { Ok(match typ { - MessageType::Request => Message::Request(AdminRequestMessage::decode(body, command)?), - MessageType::Response => { - Message::Response(AdminResponseMessage::decode(body, command)?) - } - MessageType::Notification => Message::Protocol(ProtocolMessage::decode(body, command)?), + MessageType::Request => Self::Request(AdminRequestMessage::decode(body, command)?), + MessageType::Response => Self::Response(AdminResponseMessage::decode(body, command)?), + MessageType::Notification => Self::Protocol(ProtocolMessage::decode(body, command)?), }) } fn encode(self, builder: &mut BucketBuilder) -> Result<(), BucketError> { match self { - Message::Protocol(pro) => { + Self::Protocol(pro) => { builder.set_message_type(MessageType::Notification); builder.set_return_code(0); pro.build(builder) } - Message::Request(req) => { + Self::Request(req) => { builder.set_message_type(MessageType::Request); builder.set_return_code(0); req.build(builder) } - Message::Response(res) => { + Self::Response(res) => { builder.set_message_type(MessageType::Response); builder.set_return_code(1); res.build(builder) diff --git a/net/wire/src/p2p/admin.rs b/net/wire/src/p2p/admin.rs index 173c293..67a8e21 100644 --- a/net/wire/src/p2p/admin.rs +++ b/net/wire/src/p2p/admin.rs @@ -45,7 +45,7 @@ pub struct HandshakeResponse { pub node_data: BasicNodeData, /// Core Sync Data pub payload_data: CoreSyncData, - /// PeerList + /// `PeerList` pub local_peerlist_new: Vec, } @@ -56,7 +56,7 @@ epee_object!( local_peerlist_new: Vec, ); -/// A TimedSync Request +/// A `TimedSync` Request #[derive(Debug, Clone, PartialEq, Eq)] pub struct TimedSyncRequest { /// Core Sync Data @@ -68,12 +68,12 @@ epee_object!( payload_data: CoreSyncData, ); -/// A TimedSync Response +/// A `TimedSync` Response #[derive(Debug, Clone, PartialEq, Eq)] pub struct TimedSyncResponse { /// Core Sync Data pub payload_data: CoreSyncData, - /// PeerList + /// `PeerList` pub local_peerlist_new: Vec, } diff --git a/net/wire/src/p2p/common.rs b/net/wire/src/p2p/common.rs index d585d07..d95a620 100644 --- a/net/wire/src/p2p/common.rs +++ b/net/wire/src/p2p/common.rs @@ -18,6 +18,7 @@ use bitflags::bitflags; use cuprate_epee_encoding::epee_object; +use cuprate_helper::map::split_u128_into_low_high_bits; pub use cuprate_types::{BlockCompleteEntry, PrunedTxBlobEntry, TransactionBlobs}; use crate::NetworkAddress; @@ -34,7 +35,7 @@ bitflags! { impl From for PeerSupportFlags { fn from(value: u32) -> Self { - PeerSupportFlags(value) + Self(value) } } @@ -113,16 +114,17 @@ epee_object! { } impl CoreSyncData { - pub fn new( + pub const fn new( cumulative_difficulty_128: u128, current_height: u64, pruning_seed: u32, top_id: [u8; 32], top_version: u8, - ) -> CoreSyncData { - let cumulative_difficulty = cumulative_difficulty_128 as u64; - let cumulative_difficulty_top64 = (cumulative_difficulty_128 >> 64) as u64; - CoreSyncData { + ) -> Self { + let (cumulative_difficulty, cumulative_difficulty_top64) = + split_u128_into_low_high_bits(cumulative_difficulty_128); + + Self { cumulative_difficulty, cumulative_difficulty_top64, current_height, @@ -139,7 +141,7 @@ impl CoreSyncData { } } -/// PeerListEntryBase, information kept on a peer which will be entered +/// `PeerListEntryBase`, information kept on a peer which will be entered /// in a peer list/store. #[derive(Clone, Copy, Debug, Eq, PartialEq)] pub struct PeerListEntryBase { diff --git a/net/wire/src/p2p/protocol.rs b/net/wire/src/p2p/protocol.rs index 73694d5..1d1d45a 100644 --- a/net/wire/src/p2p/protocol.rs +++ b/net/wire/src/p2p/protocol.rs @@ -127,7 +127,7 @@ pub struct ChainResponse { impl ChainResponse { #[inline] - pub fn cumulative_difficulty(&self) -> u128 { + pub const fn cumulative_difficulty(&self) -> u128 { let cumulative_difficulty = self.cumulative_difficulty_top64 as u128; cumulative_difficulty << 64 | self.cumulative_difficulty_low64 as u128 } @@ -159,7 +159,7 @@ epee_object!( current_blockchain_height: u64, ); -/// A request for Txs we are missing from our TxPool +/// A request for Txs we are missing from our `TxPool` #[derive(Debug, Clone, PartialEq, Eq)] pub struct FluffyMissingTransactionsRequest { /// The Block we are missing the Txs in @@ -177,7 +177,7 @@ epee_object!( missing_tx_indices: Vec as ContainerAsBlob, ); -/// TxPoolCompliment +/// `TxPoolCompliment` #[derive(Debug, Clone, PartialEq, Eq)] pub struct GetTxPoolCompliment { /// Tx Hashes diff --git a/p2p/address-book/Cargo.toml b/p2p/address-book/Cargo.toml index 9cff78a..0871163 100644 --- a/p2p/address-book/Cargo.toml +++ b/p2p/address-book/Cargo.toml @@ -8,7 +8,6 @@ authors = ["Boog900"] [dependencies] cuprate-pruning = { path = "../../pruning" } -cuprate-wire = { path= "../../net/wire" } cuprate-p2p-core = { path = "../p2p-core" } tower = { workspace = true, features = ["util"] } @@ -29,3 +28,6 @@ borsh = { workspace = true, features = ["derive", "std"]} cuprate-test-utils = {path = "../../test-utils"} tokio = { workspace = true, features = ["rt-multi-thread", "macros"]} + +[lints] +workspace = true \ No newline at end of file diff --git a/p2p/address-book/src/book.rs b/p2p/address-book/src/book.rs index 2f0ce6d..9c22981 100644 --- a/p2p/address-book/src/book.rs +++ b/p2p/address-book/src/book.rs @@ -36,7 +36,7 @@ use crate::{ mod tests; /// An entry in the connected list. -pub struct ConnectionPeerEntry { +pub(crate) struct ConnectionPeerEntry { addr: Option, id: u64, handle: ConnectionHandle, @@ -109,14 +109,14 @@ impl AddressBook { match handle.poll_unpin(cx) { Poll::Pending => return, Poll::Ready(Ok(Err(e))) => { - tracing::error!("Could not save peer list to disk, got error: {}", e) + tracing::error!("Could not save peer list to disk, got error: {e}"); } Poll::Ready(Err(e)) => { if e.is_panic() { panic::resume_unwind(e.into_panic()) } } - _ => (), + Poll::Ready(_) => (), } } // the task is finished. @@ -144,6 +144,7 @@ impl AddressBook { let mut internal_addr_disconnected = Vec::new(); let mut addrs_to_ban = Vec::new(); + #[expect(clippy::iter_over_hash_type, reason = "ordering doesn't matter here")] for (internal_addr, peer) in &mut self.connected_peers { if let Some(time) = peer.handle.check_should_ban() { match internal_addr { @@ -158,7 +159,7 @@ impl AddressBook { } } - for (addr, time) in addrs_to_ban.into_iter() { + for (addr, time) in addrs_to_ban { self.ban_peer(addr, time); } @@ -172,12 +173,7 @@ impl AddressBook { .remove(&addr); // If the amount of peers with this ban id is 0 remove the whole set. - if self - .connected_peers_ban_id - .get(&addr.ban_id()) - .unwrap() - .is_empty() - { + if self.connected_peers_ban_id[&addr.ban_id()].is_empty() { self.connected_peers_ban_id.remove(&addr.ban_id()); } // remove the peer from the anchor list. @@ -188,7 +184,7 @@ impl AddressBook { fn ban_peer(&mut self, addr: Z::Addr, time: Duration) { if self.banned_peers.contains_key(&addr.ban_id()) { - tracing::error!("Tried to ban peer twice, this shouldn't happen.") + tracing::error!("Tried to ban peer twice, this shouldn't happen."); } if let Some(connected_peers_with_ban_id) = self.connected_peers_ban_id.get(&addr.ban_id()) { @@ -242,10 +238,10 @@ impl AddressBook { peer_list.retain_mut(|peer| { peer.adr.make_canonical(); - if !peer.adr.should_add_to_peer_list() { - false - } else { + if peer.adr.should_add_to_peer_list() { !self.is_peer_banned(&peer.adr) + } else { + false } // TODO: check rpc/ p2p ports not the same }); @@ -391,7 +387,7 @@ impl Service> for AddressBook { rpc_credits_per_hash, }, ) - .map(|_| AddressBookResponse::Ok), + .map(|()| AddressBookResponse::Ok), AddressBookRequest::IncomingPeerList(peer_list) => { self.handle_incoming_peer_list(peer_list); Ok(AddressBookResponse::Ok) diff --git a/p2p/address-book/src/book/tests.rs b/p2p/address-book/src/book/tests.rs index 1abea04..aefbd84 100644 --- a/p2p/address-book/src/book/tests.rs +++ b/p2p/address-book/src/book/tests.rs @@ -109,7 +109,7 @@ async fn add_new_peer_already_connected() { }, ), Err(AddressBookError::PeerAlreadyConnected) - ) + ); } #[tokio::test] @@ -143,5 +143,5 @@ async fn banned_peer_removed_from_peer_lists() { .unwrap() .into_inner(), TestNetZoneAddr(1) - ) + ); } diff --git a/p2p/address-book/src/peer_list.rs b/p2p/address-book/src/peer_list.rs index f0a905a..9b98a8a 100644 --- a/p2p/address-book/src/peer_list.rs +++ b/p2p/address-book/src/peer_list.rs @@ -7,31 +7,31 @@ use cuprate_p2p_core::{services::ZoneSpecificPeerListEntryBase, NetZoneAddress, use cuprate_pruning::{PruningSeed, CRYPTONOTE_MAX_BLOCK_HEIGHT}; #[cfg(test)] -pub mod tests; +pub(crate) mod tests; /// A Peer list in the address book. /// /// This could either be the white list or gray list. #[derive(Debug)] -pub struct PeerList { +pub(crate) struct PeerList { /// The peers with their peer data. pub peers: IndexMap>, /// An index of Pruning seed to address, so can quickly grab peers with the blocks /// we want. /// - /// Pruning seeds are sorted by first their log_stripes and then their stripe. + /// Pruning seeds are sorted by first their `log_stripes` and then their stripe. /// This means the first peers in this list will store more blocks than peers /// later on. So when we need a peer with a certain block we look at the peers /// storing more blocks first then work our way to the peers storing less. /// pruning_seeds: BTreeMap>, - /// A hashmap linking ban_ids to addresses. + /// A hashmap linking `ban_ids` to addresses. ban_ids: HashMap<::BanID, Vec>, } impl PeerList { /// Creates a new peer list. - pub fn new(list: Vec>) -> PeerList { + pub(crate) fn new(list: Vec>) -> Self { let mut peers = IndexMap::with_capacity(list.len()); let mut pruning_seeds = BTreeMap::new(); let mut ban_ids = HashMap::with_capacity(list.len()); @@ -49,7 +49,7 @@ impl PeerList { peers.insert(peer.adr, peer); } - PeerList { + Self { peers, pruning_seeds, ban_ids, @@ -57,21 +57,20 @@ impl PeerList { } /// Gets the length of the peer list - pub fn len(&self) -> usize { + pub(crate) fn len(&self) -> usize { self.peers.len() } /// Adds a new peer to the peer list - pub fn add_new_peer(&mut self, peer: ZoneSpecificPeerListEntryBase) { + pub(crate) fn add_new_peer(&mut self, peer: ZoneSpecificPeerListEntryBase) { if self.peers.insert(peer.adr, peer).is_none() { - // It's more clear with this - #[allow(clippy::unwrap_or_default)] + #[expect(clippy::unwrap_or_default, reason = "It's more clear with this")] self.pruning_seeds .entry(peer.pruning_seed) .or_insert_with(Vec::new) .push(peer.adr); - #[allow(clippy::unwrap_or_default)] + #[expect(clippy::unwrap_or_default)] self.ban_ids .entry(peer.adr.ban_id()) .or_insert_with(Vec::new) @@ -85,7 +84,7 @@ impl PeerList { /// list. /// /// The given peer will be removed from the peer list. - pub fn take_random_peer( + pub(crate) fn take_random_peer( &mut self, r: &mut R, block_needed: Option, @@ -127,7 +126,7 @@ impl PeerList { None } - pub fn get_random_peers( + pub(crate) fn get_random_peers( &self, r: &mut R, len: usize, @@ -142,7 +141,7 @@ impl PeerList { } /// Returns a mutable reference to a peer. - pub fn get_peer_mut( + pub(crate) fn get_peer_mut( &mut self, peer: &Z::Addr, ) -> Option<&mut ZoneSpecificPeerListEntryBase> { @@ -150,7 +149,7 @@ impl PeerList { } /// Returns true if the list contains this peer. - pub fn contains_peer(&self, peer: &Z::Addr) -> bool { + pub(crate) fn contains_peer(&self, peer: &Z::Addr) -> bool { self.peers.contains_key(peer) } @@ -189,11 +188,11 @@ impl PeerList { /// MUST NOT BE USED ALONE fn remove_peer_from_all_idxs(&mut self, peer: &ZoneSpecificPeerListEntryBase) { self.remove_peer_pruning_idx(peer); - self.remove_peer_ban_idx(peer) + self.remove_peer_ban_idx(peer); } /// Removes a peer from the peer list - pub fn remove_peer( + pub(crate) fn remove_peer( &mut self, peer: &Z::Addr, ) -> Option> { @@ -203,7 +202,7 @@ impl PeerList { } /// Removes all peers with a specific ban id. - pub fn remove_peers_with_ban_id(&mut self, ban_id: &::BanID) { + pub(crate) fn remove_peers_with_ban_id(&mut self, ban_id: &::BanID) { let Some(addresses) = self.ban_ids.get(ban_id) else { // No peers to ban return; @@ -217,8 +216,8 @@ impl PeerList { /// Tries to reduce the peer list to `new_len`. /// /// This function could keep the list bigger than `new_len` if `must_keep_peers`s length - /// is larger than new_len, in that case we will remove as much as we can. - pub fn reduce_list(&mut self, must_keep_peers: &HashSet, new_len: usize) { + /// is larger than `new_len`, in that case we will remove as much as we can. + pub(crate) fn reduce_list(&mut self, must_keep_peers: &HashSet, new_len: usize) { if new_len >= self.len() { return; } diff --git a/p2p/address-book/src/peer_list/tests.rs b/p2p/address-book/src/peer_list/tests.rs index 8d2d220..4b13ae7 100644 --- a/p2p/address-book/src/peer_list/tests.rs +++ b/p2p/address-book/src/peer_list/tests.rs @@ -14,7 +14,7 @@ fn make_fake_peer( ) -> ZoneSpecificPeerListEntryBase { ZoneSpecificPeerListEntryBase { adr: TestNetZoneAddr(id), - id: id as u64, + id: u64::from(id), last_seen: 0, pruning_seed: PruningSeed::decompress(pruning_seed.unwrap_or(0)).unwrap(), rpc_port: 0, @@ -22,14 +22,14 @@ fn make_fake_peer( } } -pub fn make_fake_peer_list( +pub(crate) fn make_fake_peer_list( start_idx: u32, numb_o_peers: u32, ) -> PeerList> { let mut peer_list = Vec::with_capacity(numb_o_peers as usize); for idx in start_idx..(start_idx + numb_o_peers) { - peer_list.push(make_fake_peer(idx, None)) + peer_list.push(make_fake_peer(idx, None)); } PeerList::new(peer_list) @@ -50,7 +50,7 @@ fn make_fake_peer_list_with_random_pruning_seeds( } else { r.gen_range(384..=391) }), - )) + )); } PeerList::new(peer_list) } @@ -70,7 +70,7 @@ fn peer_list_reduce_length() { #[test] fn peer_list_reduce_length_with_peers_we_need() { let mut peer_list = make_fake_peer_list(0, 500); - let must_keep_peers = HashSet::from_iter(peer_list.peers.keys().copied()); + let must_keep_peers = peer_list.peers.keys().copied().collect::>(); let target_len = 49; @@ -92,7 +92,7 @@ fn peer_list_remove_specific_peer() { let peers = peer_list.peers; for (_, addrs) in pruning_idxs { - addrs.iter().for_each(|adr| assert_ne!(adr, &peer.adr)) + addrs.iter().for_each(|adr| assert_ne!(adr, &peer.adr)); } assert!(!peers.contains_key(&peer.adr)); @@ -104,13 +104,13 @@ fn peer_list_pruning_idxs_are_correct() { let mut total_len = 0; for (seed, list) in peer_list.pruning_seeds { - for peer in list.iter() { + for peer in &list { assert_eq!(peer_list.peers.get(peer).unwrap().pruning_seed, seed); total_len += 1; } } - assert_eq!(total_len, peer_list.peers.len()) + assert_eq!(total_len, peer_list.peers.len()); } #[test] @@ -122,11 +122,7 @@ fn peer_list_add_new_peer() { assert_eq!(peer_list.len(), 11); assert_eq!(peer_list.peers.get(&new_peer.adr), Some(&new_peer)); - assert!(peer_list - .pruning_seeds - .get(&new_peer.pruning_seed) - .unwrap() - .contains(&new_peer.adr)); + assert!(peer_list.pruning_seeds[&new_peer.pruning_seed].contains(&new_peer.adr)); } #[test] @@ -164,7 +160,7 @@ fn peer_list_get_peer_with_block() { assert!(peer .pruning_seed .get_next_unpruned_block(1, 1_000_000) - .is_ok()) + .is_ok()); } #[test] diff --git a/p2p/address-book/src/store.rs b/p2p/address-book/src/store.rs index abc42d6..07c117e 100644 --- a/p2p/address-book/src/store.rs +++ b/p2p/address-book/src/store.rs @@ -1,3 +1,8 @@ +#![expect( + single_use_lifetimes, + reason = "false positive on generated derive code on `SerPeerDataV1`" +)] + use std::fs; use borsh::{from_slice, to_vec, BorshDeserialize, BorshSerialize}; @@ -21,7 +26,7 @@ struct DeserPeerDataV1 { gray_list: Vec>, } -pub fn save_peers_to_disk( +pub(crate) fn save_peers_to_disk( cfg: &AddressBookConfig, white_list: &PeerList, gray_list: &PeerList, @@ -38,7 +43,7 @@ pub fn save_peers_to_disk( spawn_blocking(move || fs::write(&file, &data)) } -pub async fn read_peers_from_disk( +pub(crate) async fn read_peers_from_disk( cfg: &AddressBookConfig, ) -> Result< ( diff --git a/p2p/dandelion-tower/Cargo.toml b/p2p/dandelion-tower/Cargo.toml index 976dad6..92e4915 100644 --- a/p2p/dandelion-tower/Cargo.toml +++ b/p2p/dandelion-tower/Cargo.toml @@ -24,4 +24,7 @@ thiserror = { workspace = true } [dev-dependencies] tokio = { workspace = true, features = ["rt-multi-thread", "macros", "sync"] } -proptest = { workspace = true, features = ["default"] } \ No newline at end of file +proptest = { workspace = true, features = ["default"] } + +[lints] +workspace = true \ No newline at end of file diff --git a/p2p/dandelion-tower/src/config.rs b/p2p/dandelion-tower/src/config.rs index 6266d60..46c780a 100644 --- a/p2p/dandelion-tower/src/config.rs +++ b/p2p/dandelion-tower/src/config.rs @@ -8,7 +8,7 @@ use std::{ /// (1 - ep) is the probability that a transaction travels for `k` hops before a nodes embargo timeout fires, this constant is (1 - ep). const EMBARGO_FULL_TRAVEL_PROBABILITY: f64 = 0.90; -/// The graph type to use for dandelion routing, the dandelion paper recommends [Graph::FourRegular]. +/// The graph type to use for dandelion routing, the dandelion paper recommends [`Graph::FourRegular`]. /// /// The decision between line graphs and 4-regular graphs depend on the priorities of the system, if /// linkability of transactions is a first order concern then line graphs may be better, however 4-regular graphs @@ -66,7 +66,7 @@ impl DandelionConfig { /// Returns the number of outbound peers to use to stem transactions. /// /// This value depends on the [`Graph`] chosen. - pub fn number_of_stems(&self) -> usize { + pub const fn number_of_stems(&self) -> usize { match self.graph { Graph::Line => 1, Graph::FourRegular => 2, diff --git a/p2p/dandelion-tower/src/lib.rs b/p2p/dandelion-tower/src/lib.rs index 60b5ea5..2c8de71 100644 --- a/p2p/dandelion-tower/src/lib.rs +++ b/p2p/dandelion-tower/src/lib.rs @@ -26,7 +26,7 @@ //! The diffuse service should have a request of [`DiffuseRequest`](traits::DiffuseRequest) and it's error //! should be [`tower::BoxError`]. //! -//! ## Outbound Peer TryStream +//! ## Outbound Peer `TryStream` //! //! The outbound peer [`TryStream`](futures::TryStream) should provide a stream of randomly selected outbound //! peers, these peers will then be used to route stem txs to. @@ -37,7 +37,7 @@ //! ## Peer Service //! //! This service represents a connection to an individual peer, this should be returned from the Outbound Peer -//! TryStream. This should immediately send the transaction to the peer when requested, it should _not_ set +//! `TryStream`. This should immediately send the transaction to the peer when requested, it should _not_ set //! a timer. //! //! The peer service should have a request of [`StemRequest`](traits::StemRequest) and its error diff --git a/p2p/dandelion-tower/src/pool/incoming_tx.rs b/p2p/dandelion-tower/src/pool/incoming_tx.rs index c9a30de..13cdffe 100644 --- a/p2p/dandelion-tower/src/pool/incoming_tx.rs +++ b/p2p/dandelion-tower/src/pool/incoming_tx.rs @@ -30,7 +30,7 @@ pub struct IncomingTxBuilder impl IncomingTxBuilder { /// Creates a new [`IncomingTxBuilder`]. - pub fn new(tx: Tx, tx_id: TxId) -> Self { + pub const fn new(tx: Tx, tx_id: TxId) -> Self { Self { tx, tx_id, diff --git a/p2p/dandelion-tower/src/pool/manager.rs b/p2p/dandelion-tower/src/pool/manager.rs index 9e1572e..2ac3302 100644 --- a/p2p/dandelion-tower/src/pool/manager.rs +++ b/p2p/dandelion-tower/src/pool/manager.rs @@ -88,9 +88,7 @@ where .insert(peer.clone()); } - let state = from - .map(|from| TxState::Stem { from }) - .unwrap_or(TxState::Local); + let state = from.map_or(TxState::Local, |from| TxState::Stem { from }); let fut = self .dandelion_router @@ -280,13 +278,15 @@ where }; if let Err(e) = self.handle_incoming_tx(tx, routing_state, tx_id).await { + #[expect(clippy::let_underscore_must_use, reason = "dropped receivers can be ignored")] let _ = res_tx.send(()); tracing::error!("Error handling transaction in dandelion pool: {e}"); return; } - let _ = res_tx.send(()); + #[expect(clippy::let_underscore_must_use)] + let _ = res_tx.send(()); } } } diff --git a/p2p/dandelion-tower/src/router.rs b/p2p/dandelion-tower/src/router.rs index edeccae..88702be 100644 --- a/p2p/dandelion-tower/src/router.rs +++ b/p2p/dandelion-tower/src/router.rs @@ -140,7 +140,7 @@ where State::Stem }; - DandelionRouter { + Self { outbound_peer_discover: Box::pin(outbound_peer_discover), broadcast_svc, current_state, @@ -198,7 +198,7 @@ where fn stem_tx( &mut self, tx: Tx, - from: Id, + from: &Id, ) -> BoxFuture<'static, Result> { if self.stem_peers.is_empty() { tracing::debug!("Stem peers are empty, fluffing stem transaction."); @@ -216,7 +216,7 @@ where }); let Some(peer) = self.stem_peers.get_mut(stem_route) else { - self.stem_routes.remove(&from); + self.stem_routes.remove(from); continue; }; @@ -302,7 +302,7 @@ where tracing::debug!( parent: span, "Peer returned an error on `poll_ready`: {e}, removing from router.", - ) + ); }) .is_ok(), Poll::Pending => { @@ -341,7 +341,7 @@ where State::Stem => { tracing::trace!(parent: &self.span, "Steming transaction"); - self.stem_tx(req.tx, from) + self.stem_tx(req.tx, &from) } }, TxState::Local => { diff --git a/p2p/dandelion-tower/src/tests/mod.rs b/p2p/dandelion-tower/src/tests/mod.rs index 1c6a3e0..601ee25 100644 --- a/p2p/dandelion-tower/src/tests/mod.rs +++ b/p2p/dandelion-tower/src/tests/mod.rs @@ -12,7 +12,7 @@ use crate::{ OutboundPeer, State, }; -pub fn mock_discover_svc() -> ( +pub(crate) fn mock_discover_svc() -> ( impl Stream< Item = Result< OutboundPeer< @@ -49,7 +49,7 @@ pub fn mock_discover_svc() -> ( (discover, rx) } -pub fn mock_broadcast_svc() -> ( +pub(crate) fn mock_broadcast_svc() -> ( impl Service< Req, Future = impl Future> + Send + 'static, @@ -70,8 +70,8 @@ pub fn mock_broadcast_svc() -> ( ) } -#[allow(clippy::type_complexity)] // just test code. -pub fn mock_in_memory_backing_pool< +#[expect(clippy::type_complexity, reason = "just test code.")] +pub(crate) fn mock_in_memory_backing_pool< Tx: Clone + Send + 'static, TxID: Clone + Hash + Eq + Send + 'static, >() -> ( @@ -85,11 +85,11 @@ pub fn mock_in_memory_backing_pool< Arc>>, ) { let txs = Arc::new(std::sync::Mutex::new(HashMap::new())); - let txs_2 = txs.clone(); + let txs_2 = Arc::clone(&txs); ( service_fn(move |req: TxStoreRequest| { - let txs = txs.clone(); + let txs = Arc::clone(&txs); async move { match req { TxStoreRequest::Get(tx_id) => { diff --git a/p2p/dandelion-tower/src/tests/pool.rs b/p2p/dandelion-tower/src/tests/pool.rs index b7fa55e..70f642a 100644 --- a/p2p/dandelion-tower/src/tests/pool.rs +++ b/p2p/dandelion-tower/src/tests/pool.rs @@ -39,5 +39,5 @@ async fn basic_functionality() { // TODO: the DandelionPoolManager doesn't handle adding txs to the pool, add more tests here to test // all functionality. //assert!(pool.lock().unwrap().contains_key(&1)); - assert!(broadcast_rx.try_recv().is_ok()) + assert!(broadcast_rx.try_recv().is_ok()); } diff --git a/p2p/p2p-core/src/network_zones/clear.rs b/p2p/p2p-core/src/network_zones/clear.rs index 192e363..acde368 100644 --- a/p2p/p2p-core/src/network_zones/clear.rs +++ b/p2p/p2p-core/src/network_zones/clear.rs @@ -54,8 +54,13 @@ impl NetworkZone for ClearNet { const NAME: &'static str = "ClearNet"; const SEEDS: &'static [Self::Addr] = &[ - ip_v4(37, 187, 74, 171, 18080), + ip_v4(176, 9, 0, 187, 18080), + ip_v4(88, 198, 163, 90, 18080), + ip_v4(66, 85, 74, 134, 18080), + ip_v4(51, 79, 173, 165, 18080), ip_v4(192, 99, 8, 110, 18080), + ip_v4(37, 187, 74, 171, 18080), + ip_v4(77, 172, 183, 193, 18080), ]; const ALLOW_SYNC: bool = true; diff --git a/p2p/p2p/src/constants.rs b/p2p/p2p/src/constants.rs index 44dba91..4c08eb8 100644 --- a/p2p/p2p/src/constants.rs +++ b/p2p/p2p/src/constants.rs @@ -3,6 +3,12 @@ use std::time::Duration; /// The timeout we set on handshakes. pub(crate) const HANDSHAKE_TIMEOUT: Duration = Duration::from_secs(20); +/// The timeout we set on receiving ping requests +pub(crate) const PING_REQUEST_TIMEOUT: Duration = Duration::from_secs(5); + +/// The amount of concurrency (maximum number of simultaneous tasks) we allow for handling ping requests +pub(crate) const PING_REQUEST_CONCURRENCY: usize = 2; + /// The maximum amount of connections to make to seed nodes for when we need peers. pub(crate) const MAX_SEED_CONNECTIONS: usize = 3; diff --git a/p2p/p2p/src/inbound_server.rs b/p2p/p2p/src/inbound_server.rs index aa971a5..80ff38e 100644 --- a/p2p/p2p/src/inbound_server.rs +++ b/p2p/p2p/src/inbound_server.rs @@ -4,9 +4,10 @@ //! them to the handshaker service and then adds them to the client pool. use std::{pin::pin, sync::Arc}; -use futures::StreamExt; +use futures::{SinkExt, StreamExt}; use tokio::{ sync::Semaphore, + task::JoinSet, time::{sleep, timeout}, }; use tower::{Service, ServiceExt}; @@ -17,14 +18,22 @@ use cuprate_p2p_core::{ services::{AddressBookRequest, AddressBookResponse}, AddressBook, ConnectionDirection, NetworkZone, }; +use cuprate_wire::{ + admin::{PingResponse, PING_OK_RESPONSE_STATUS_TEXT}, + AdminRequestMessage, AdminResponseMessage, Message, +}; use crate::{ client_pool::ClientPool, - constants::{HANDSHAKE_TIMEOUT, INBOUND_CONNECTION_COOL_DOWN}, + constants::{ + HANDSHAKE_TIMEOUT, INBOUND_CONNECTION_COOL_DOWN, PING_REQUEST_CONCURRENCY, + PING_REQUEST_TIMEOUT, + }, P2PConfig, }; -/// Starts the inbound server. +/// Starts the inbound server. This function will listen to all incoming connections +/// and initiate handshake if needed, after verifying the address isn't banned. #[instrument(level = "warn", skip_all)] pub async fn inbound_server( client_pool: Arc>, @@ -40,6 +49,10 @@ where HS::Future: Send + 'static, A: AddressBook, { + // Copying the peer_id before borrowing for ping responses (Make us avoid a `clone()`). + let our_peer_id = config.basic_node_data().peer_id; + + // Mandatory. Extract server config from P2PConfig let Some(server_config) = config.server_config else { tracing::warn!("No inbound server config provided, not listening for inbound connections."); return Ok(()); @@ -53,13 +66,18 @@ where let mut listener = pin!(listener); + // Create semaphore for limiting to maximum inbound connections. let semaphore = Arc::new(Semaphore::new(config.max_inbound_connections)); + // Create ping request handling JoinSet + let mut ping_join_set = JoinSet::new(); + // Listen to incoming connections and extract necessary information. while let Some(connection) = listener.next().await { - let Ok((addr, peer_stream, peer_sink)) = connection else { + let Ok((addr, mut peer_stream, mut peer_sink)) = connection else { continue; }; + // If peer is banned, drop connection if let Some(addr) = &addr { let AddressBookResponse::IsPeerBanned(banned) = address_book .ready() @@ -75,11 +93,13 @@ where } } + // Create a new internal id for new peers let addr = match addr { Some(addr) => InternalPeerID::KnownAddr(addr), None => InternalPeerID::Unknown(rand::random()), }; + // If we're still behind our maximum limit, Initiate handshake. if let Ok(permit) = semaphore.clone().try_acquire_owned() { tracing::debug!("Permit free for incoming connection, attempting handshake."); @@ -102,8 +122,39 @@ where .instrument(Span::current()), ); } else { + // Otherwise check if the node is simply pinging us. tracing::debug!("No permit free for incoming connection."); - // TODO: listen for if the peer is just trying to ping us to see if we are reachable. + + // We only handle 2 ping request conccurently. Otherwise we drop the connection immediately. + if ping_join_set.len() < PING_REQUEST_CONCURRENCY { + ping_join_set.spawn( + async move { + // Await first message from node. If it is a ping request we respond back, otherwise we drop the connection. + let fut = timeout(PING_REQUEST_TIMEOUT, peer_stream.next()); + + // Ok if timeout did not elapsed -> Some if there is a message -> Ok if it has been decoded + if let Ok(Some(Ok(Message::Request(AdminRequestMessage::Ping)))) = fut.await + { + let response = peer_sink + .send( + Message::Response(AdminResponseMessage::Ping(PingResponse { + status: PING_OK_RESPONSE_STATUS_TEXT, + peer_id: our_peer_id, + })) + .into(), + ) + .await; + + if let Err(err) = response { + tracing::debug!( + "Unable to respond to ping request from peer ({addr}): {err}" + ) + } + } + } + .instrument(Span::current()), + ); + } } sleep(INBOUND_CONNECTION_COOL_DOWN).await; diff --git a/pruning/Cargo.toml b/pruning/Cargo.toml index 3f5bd27..497c04b 100644 --- a/pruning/Cargo.toml +++ b/pruning/Cargo.toml @@ -13,3 +13,6 @@ borsh = ["dep:borsh"] thiserror = { workspace = true } borsh = { workspace = true, features = ["derive", "std"], optional = true } + +[lints] +workspace = true \ No newline at end of file diff --git a/pruning/src/lib.rs b/pruning/src/lib.rs index fdd159c..1f5ee2a 100644 --- a/pruning/src/lib.rs +++ b/pruning/src/lib.rs @@ -71,7 +71,7 @@ impl PruningSeed { /// /// See: [`DecompressedPruningSeed::new`] pub fn new_pruned(stripe: u32, log_stripes: u32) -> Result { - Ok(PruningSeed::Pruned(DecompressedPruningSeed::new( + Ok(Self::Pruned(DecompressedPruningSeed::new( stripe, log_stripes, )?)) @@ -81,9 +81,7 @@ impl PruningSeed { /// /// An error means the pruning seed was invalid. pub fn decompress(seed: u32) -> Result { - Ok(DecompressedPruningSeed::decompress(seed)? - .map(PruningSeed::Pruned) - .unwrap_or(PruningSeed::NotPruned)) + Ok(DecompressedPruningSeed::decompress(seed)?.map_or(Self::NotPruned, Self::Pruned)) } /// Decompresses the seed, performing the same checks as [`PruningSeed::decompress`] and some more according to @@ -103,34 +101,34 @@ impl PruningSeed { } /// Compresses this pruning seed to a u32. - pub fn compress(&self) -> u32 { + pub const fn compress(&self) -> u32 { match self { - PruningSeed::NotPruned => 0, - PruningSeed::Pruned(seed) => seed.compress(), + Self::NotPruned => 0, + Self::Pruned(seed) => seed.compress(), } } /// Returns the `log_stripes` for this seed, if this seed is pruned otherwise [`None`] is returned. - pub fn get_log_stripes(&self) -> Option { + pub const fn get_log_stripes(&self) -> Option { match self { - PruningSeed::NotPruned => None, - PruningSeed::Pruned(seed) => Some(seed.log_stripes), + Self::NotPruned => None, + Self::Pruned(seed) => Some(seed.log_stripes), } } /// Returns the `stripe` for this seed, if this seed is pruned otherwise [`None`] is returned. - pub fn get_stripe(&self) -> Option { + pub const fn get_stripe(&self) -> Option { match self { - PruningSeed::NotPruned => None, - PruningSeed::Pruned(seed) => Some(seed.stripe), + Self::NotPruned => None, + Self::Pruned(seed) => Some(seed.stripe), } } /// Returns `true` if a peer with this pruning seed should have a non-pruned version of a block. - pub fn has_full_block(&self, height: usize, blockchain_height: usize) -> bool { + pub const fn has_full_block(&self, height: usize, blockchain_height: usize) -> bool { match self { - PruningSeed::NotPruned => true, - PruningSeed::Pruned(seed) => seed.has_full_block(height, blockchain_height), + Self::NotPruned => true, + Self::Pruned(seed) => seed.has_full_block(height, blockchain_height), } } @@ -155,10 +153,8 @@ impl PruningSeed { blockchain_height: usize, ) -> Result, PruningError> { Ok(match self { - PruningSeed::NotPruned => None, - PruningSeed::Pruned(seed) => { - seed.get_next_pruned_block(block_height, blockchain_height)? - } + Self::NotPruned => None, + Self::Pruned(seed) => seed.get_next_pruned_block(block_height, blockchain_height)?, }) } @@ -181,10 +177,8 @@ impl PruningSeed { blockchain_height: usize, ) -> Result { Ok(match self { - PruningSeed::NotPruned => block_height, - PruningSeed::Pruned(seed) => { - seed.get_next_unpruned_block(block_height, blockchain_height)? - } + Self::NotPruned => block_height, + Self::Pruned(seed) => seed.get_next_unpruned_block(block_height, blockchain_height)?, }) } } @@ -199,11 +193,11 @@ impl Ord for PruningSeed { fn cmp(&self, other: &Self) -> Ordering { match (self, other) { // Make sure pruning seeds storing more blocks are greater. - (PruningSeed::NotPruned, PruningSeed::NotPruned) => Ordering::Equal, - (PruningSeed::NotPruned, PruningSeed::Pruned(_)) => Ordering::Greater, - (PruningSeed::Pruned(_), PruningSeed::NotPruned) => Ordering::Less, + (Self::NotPruned, Self::NotPruned) => Ordering::Equal, + (Self::NotPruned, Self::Pruned(_)) => Ordering::Greater, + (Self::Pruned(_), Self::NotPruned) => Ordering::Less, - (PruningSeed::Pruned(seed1), PruningSeed::Pruned(seed2)) => seed1.cmp(seed2), + (Self::Pruned(seed1), Self::Pruned(seed2)) => seed1.cmp(seed2), } } } @@ -222,7 +216,7 @@ pub struct DecompressedPruningSeed { log_stripes: u32, /// The specific portion this peer keeps. /// - /// *MUST* be between 1..=2^log_stripes + /// *MUST* be between `1..=2^log_stripes` stripe: u32, } @@ -268,13 +262,13 @@ impl DecompressedPruningSeed { /// a valid seed you currently MUST pass in a number 1 to 8 for `stripe` /// and 3 for `log_stripes`.* /// - pub fn new(stripe: u32, log_stripes: u32) -> Result { + pub const fn new(stripe: u32, log_stripes: u32) -> Result { if log_stripes > PRUNING_SEED_LOG_STRIPES_MASK { Err(PruningError::LogStripesOutOfRange) } else if !(stripe > 0 && stripe <= (1 << log_stripes)) { Err(PruningError::StripeOutOfRange) } else { - Ok(DecompressedPruningSeed { + Ok(Self { log_stripes, stripe, }) @@ -286,7 +280,7 @@ impl DecompressedPruningSeed { /// Will return Ok(None) if the pruning seed means no pruning. /// /// An error means the pruning seed was invalid. - pub fn decompress(seed: u32) -> Result, PruningError> { + pub const fn decompress(seed: u32) -> Result, PruningError> { if seed == 0 { // No pruning. return Ok(None); @@ -299,20 +293,20 @@ impl DecompressedPruningSeed { return Err(PruningError::StripeOutOfRange); } - Ok(Some(DecompressedPruningSeed { + Ok(Some(Self { log_stripes, stripe, })) } /// Compresses the pruning seed into a u32. - pub fn compress(&self) -> u32 { + pub const fn compress(&self) -> u32 { (self.log_stripes << PRUNING_SEED_LOG_STRIPES_SHIFT) | ((self.stripe - 1) << PRUNING_SEED_STRIPE_SHIFT) } /// Returns `true` if a peer with this pruning seed should have a non-pruned version of a block. - pub fn has_full_block(&self, height: usize, blockchain_height: usize) -> bool { + pub const fn has_full_block(&self, height: usize, blockchain_height: usize) -> bool { match get_block_pruning_stripe(height, blockchain_height, self.log_stripes) { Some(block_stripe) => self.stripe == block_stripe, None => true, @@ -419,7 +413,7 @@ impl DecompressedPruningSeed { // We can get the end of our "non-pruning" cycle by getting the next stripe's first un-pruned block height. // So we calculate the next un-pruned block for the next stripe and return it as our next pruned block let next_stripe = 1 + (self.stripe & ((1 << self.log_stripes) - 1)); - let seed = DecompressedPruningSeed::new(next_stripe, self.log_stripes) + let seed = Self::new(next_stripe, self.log_stripes) .expect("We just made sure this stripe is in range for this log_stripe"); let calculated_height = seed.get_next_unpruned_block(block_height, blockchain_height)?; @@ -433,7 +427,7 @@ impl DecompressedPruningSeed { } } -fn get_block_pruning_stripe( +const fn get_block_pruning_stripe( block_height: usize, blockchain_height: usize, log_stripe: u32, @@ -441,9 +435,14 @@ fn get_block_pruning_stripe( if block_height + CRYPTONOTE_PRUNING_TIP_BLOCKS >= blockchain_height { None } else { + #[expect( + clippy::cast_possible_truncation, + clippy::cast_sign_loss, + reason = "it's trivial to prove it's ok to us `as` here" + )] Some( (((block_height / CRYPTONOTE_PRUNING_STRIPE_SIZE) & ((1 << log_stripe) as usize - 1)) - + 1) as u32, // it's trivial to prove it's ok to us `as` here + + 1) as u32, ) } } @@ -483,16 +482,17 @@ mod tests { #[test] fn get_pruning_log_stripe() { let all_valid_seeds = make_all_pruning_seeds(); - for seed in all_valid_seeds.iter() { - assert_eq!(seed.get_log_stripes().unwrap(), 3) + for seed in &all_valid_seeds { + assert_eq!(seed.get_log_stripes().unwrap(), 3); } } #[test] fn get_pruning_stripe() { let all_valid_seeds = make_all_pruning_seeds(); + #[expect(clippy::cast_possible_truncation)] for (i, seed) in all_valid_seeds.iter().enumerate() { - assert_eq!(seed.get_stripe().unwrap(), i as u32 + 1) + assert_eq!(seed.get_stripe().unwrap(), i as u32 + 1); } } @@ -554,7 +554,7 @@ mod tests { assert_eq!( seed.get_next_unpruned_block(0, blockchain_height).unwrap(), i * 4096 - ) + ); } for (i, seed) in all_valid_seeds.iter().enumerate() { @@ -562,7 +562,7 @@ mod tests { seed.get_next_unpruned_block((i + 1) * 4096, blockchain_height) .unwrap(), i * 4096 + 32768 - ) + ); } for (i, seed) in all_valid_seeds.iter().enumerate() { @@ -570,15 +570,15 @@ mod tests { seed.get_next_unpruned_block((i + 8) * 4096, blockchain_height) .unwrap(), i * 4096 + 32768 - ) + ); } - for seed in all_valid_seeds.iter() { + for seed in &all_valid_seeds { assert_eq!( seed.get_next_unpruned_block(76437863 - 1, blockchain_height) .unwrap(), 76437863 - 1 - ) + ); } let zero_seed = PruningSeed::NotPruned; @@ -591,7 +591,7 @@ mod tests { let seed = PruningSeed::decompress(384).unwrap(); // the next unpruned block is the first tip block - assert_eq!(seed.get_next_unpruned_block(5000, 11000).unwrap(), 5500) + assert_eq!(seed.get_next_unpruned_block(5000, 11000).unwrap(), 5500); } #[test] @@ -605,7 +605,7 @@ mod tests { .unwrap() .unwrap(), 0 - ) + ); } for (i, seed) in all_valid_seeds.iter().enumerate() { @@ -614,7 +614,7 @@ mod tests { .unwrap() .unwrap(), (i + 1) * 4096 - ) + ); } for (i, seed) in all_valid_seeds.iter().enumerate() { @@ -623,15 +623,15 @@ mod tests { .unwrap() .unwrap(), (i + 9) * 4096 - ) + ); } - for seed in all_valid_seeds.iter() { + for seed in &all_valid_seeds { assert_eq!( seed.get_next_pruned_block(76437863 - 1, blockchain_height) .unwrap(), None - ) + ); } let zero_seed = PruningSeed::NotPruned; @@ -644,6 +644,6 @@ mod tests { let seed = PruningSeed::decompress(384).unwrap(); // there is no next pruned block - assert_eq!(seed.get_next_pruned_block(5000, 10000).unwrap(), None) + assert_eq!(seed.get_next_pruned_block(5000, 10000).unwrap(), None); } } diff --git a/rpc/interface/src/route/bin.rs b/rpc/interface/src/route/bin.rs index 90d06c8..f7e3a01 100644 --- a/rpc/interface/src/route/bin.rs +++ b/rpc/interface/src/route/bin.rs @@ -28,7 +28,6 @@ macro_rules! generate_endpoints_with_input { ),*) => { paste::paste! { $( /// TODO - #[allow(unused_mut)] pub(crate) async fn $endpoint( State(handler): State, mut request: Bytes, @@ -55,7 +54,6 @@ macro_rules! generate_endpoints_with_no_input { ),*) => { paste::paste! { $( /// TODO - #[allow(unused_mut)] pub(crate) async fn $endpoint( State(handler): State, ) -> Result { diff --git a/rpc/interface/src/router_builder.rs b/rpc/interface/src/router_builder.rs index 2e80c43..d18a694 100644 --- a/rpc/interface/src/router_builder.rs +++ b/rpc/interface/src/router_builder.rs @@ -69,7 +69,6 @@ macro_rules! generate_router_builder { /// .all() /// .build(); /// ``` - #[allow(clippy::struct_excessive_bools)] #[derive(Clone)] pub struct RouterBuilder { router: Router, diff --git a/rpc/interface/src/rpc_handler_dummy.rs b/rpc/interface/src/rpc_handler_dummy.rs index 0b01835..9d5009e 100644 --- a/rpc/interface/src/rpc_handler_dummy.rs +++ b/rpc/interface/src/rpc_handler_dummy.rs @@ -57,7 +57,7 @@ impl Service for RpcHandlerDummy { use cuprate_rpc_types::json::JsonRpcRequest as Req; use cuprate_rpc_types::json::JsonRpcResponse as Resp; - #[allow(clippy::default_trait_access)] + #[expect(clippy::default_trait_access)] let resp = match req { Req::GetBlockCount(_) => Resp::GetBlockCount(Default::default()), Req::OnGetBlockHash(_) => Resp::OnGetBlockHash(Default::default()), @@ -112,7 +112,7 @@ impl Service for RpcHandlerDummy { use cuprate_rpc_types::bin::BinRequest as Req; use cuprate_rpc_types::bin::BinResponse as Resp; - #[allow(clippy::default_trait_access)] + #[expect(clippy::default_trait_access)] let resp = match req { Req::GetBlocks(_) => Resp::GetBlocks(Default::default()), Req::GetBlocksByHeight(_) => Resp::GetBlocksByHeight(Default::default()), @@ -142,7 +142,7 @@ impl Service for RpcHandlerDummy { use cuprate_rpc_types::other::OtherRequest as Req; use cuprate_rpc_types::other::OtherResponse as Resp; - #[allow(clippy::default_trait_access)] + #[expect(clippy::default_trait_access)] let resp = match req { Req::GetHeight(_) => Resp::GetHeight(Default::default()), Req::GetTransactions(_) => Resp::GetTransactions(Default::default()), diff --git a/rpc/json-rpc/src/tests.rs b/rpc/json-rpc/src/tests.rs index 3ee6088..99ce126 100644 --- a/rpc/json-rpc/src/tests.rs +++ b/rpc/json-rpc/src/tests.rs @@ -52,7 +52,7 @@ where } /// Tests an input JSON string matches an expected type `T`. -#[allow(clippy::needless_pass_by_value)] // serde signature +#[expect(clippy::needless_pass_by_value, reason = "serde signature")] fn assert_de(json: &'static str, expected: T) where T: DeserializeOwned + std::fmt::Debug + Clone + PartialEq, diff --git a/rpc/types/src/bin.rs b/rpc/types/src/bin.rs index 0dbddea..a68d3e1 100644 --- a/rpc/types/src/bin.rs +++ b/rpc/types/src/bin.rs @@ -138,7 +138,6 @@ define_request! { )] /// /// This response's variant depends upon [`PoolInfoExtent`]. -#[allow(dead_code, missing_docs)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] pub enum GetBlocksResponse { @@ -157,7 +156,6 @@ impl Default for GetBlocksResponse { } /// Data within [`GetBlocksResponse::PoolInfoNone`]. -#[allow(dead_code, missing_docs)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[derive(Clone, Default, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] pub struct GetBlocksResponsePoolInfoNone { @@ -183,7 +181,6 @@ epee_object! { } /// Data within [`GetBlocksResponse::PoolInfoIncremental`]. -#[allow(dead_code, missing_docs)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[derive(Clone, Default, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] pub struct GetBlocksResponsePoolInfoIncremental { @@ -215,7 +212,6 @@ epee_object! { } /// Data within [`GetBlocksResponse::PoolInfoFull`]. -#[allow(dead_code, missing_docs)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[derive(Clone, Default, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] pub struct GetBlocksResponsePoolInfoFull { @@ -248,7 +244,6 @@ epee_object! { /// [`EpeeObjectBuilder`] for [`GetBlocksResponse`]. /// /// Not for public usage. -#[allow(dead_code, missing_docs)] #[derive(Clone, Debug, Default, PartialEq, Eq, PartialOrd, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] pub struct __GetBlocksResponseEpeeBuilder { @@ -354,7 +349,6 @@ impl EpeeObjectBuilder for __GetBlocksResponseEpeeBuilder { } #[cfg(feature = "epee")] -#[allow(clippy::cognitive_complexity)] impl EpeeObject for GetBlocksResponse { type Builder = __GetBlocksResponseEpeeBuilder; @@ -397,7 +391,6 @@ impl EpeeObject for GetBlocksResponse { /// See also: [`BinResponse`]. #[cfg_attr(feature = "serde", derive(Deserialize, Serialize))] #[cfg_attr(feature = "serde", serde(untagged))] -#[allow(missing_docs)] #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] pub enum BinRequest { GetBlocks(GetBlocksRequest), @@ -444,7 +437,6 @@ impl RpcCallValue for BinRequest { #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Deserialize, Serialize))] #[cfg_attr(feature = "serde", serde(untagged))] -#[allow(missing_docs)] pub enum BinResponse { GetBlocks(GetBlocksResponse), GetBlocksByHeight(GetBlocksByHeightResponse), diff --git a/rpc/types/src/free.rs b/rpc/types/src/free.rs index 45fb2f7..a41c853 100644 --- a/rpc/types/src/free.rs +++ b/rpc/types/src/free.rs @@ -5,16 +5,16 @@ /// Returns `true` if the input `u` is equal to `0`. #[inline] -#[allow(clippy::trivially_copy_pass_by_ref)] // serde needs `&` -#[allow(dead_code)] // TODO: see if needed after handlers. +#[expect(clippy::trivially_copy_pass_by_ref, reason = "serde signature")] +#[expect(dead_code, reason = "TODO: see if needed after handlers.")] pub(crate) const fn is_zero(u: &u64) -> bool { *u == 0 } /// Returns `true` the input `u` is equal to `1`. #[inline] -#[allow(clippy::trivially_copy_pass_by_ref)] // serde needs `&` -#[allow(dead_code)] // TODO: see if needed after handlers. +#[expect(clippy::trivially_copy_pass_by_ref, reason = "serde signature")] +#[expect(dead_code, reason = "TODO: see if needed after handlers.")] pub(crate) const fn is_one(u: &u64) -> bool { *u == 1 } diff --git a/rpc/types/src/json.rs b/rpc/types/src/json.rs index c5138c2..f40f476 100644 --- a/rpc/types/src/json.rs +++ b/rpc/types/src/json.rs @@ -1590,7 +1590,6 @@ define_request_and_response! { feature = "serde", serde(rename_all = "snake_case", tag = "method", content = "params") )] -#[allow(missing_docs)] pub enum JsonRpcRequest { GetBlockCount(GetBlockCountRequest), OnGetBlockHash(OnGetBlockHashRequest), @@ -1723,7 +1722,6 @@ impl RpcCallValue for JsonRpcRequest { #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Deserialize, Serialize))] #[cfg_attr(feature = "serde", serde(untagged, rename_all = "snake_case"))] -#[allow(missing_docs)] pub enum JsonRpcResponse { GetBlockCount(GetBlockCountResponse), OnGetBlockHash(OnGetBlockHashResponse), diff --git a/rpc/types/src/lib.rs b/rpc/types/src/lib.rs index f2e1d4f..23764a3 100644 --- a/rpc/types/src/lib.rs +++ b/rpc/types/src/lib.rs @@ -11,6 +11,10 @@ unreachable_code, reason = "TODO: remove after cuprated RpcHandler impl" )] +#![allow( + clippy::allow_attributes, + reason = "macros (internal + serde) make this lint hard to satisfy" +)] mod constants; mod defaults; diff --git a/rpc/types/src/macros.rs b/rpc/types/src/macros.rs index 60ffa90..85f4272 100644 --- a/rpc/types/src/macros.rs +++ b/rpc/types/src/macros.rs @@ -94,6 +94,7 @@ macro_rules! define_request_and_response { } ) => { paste::paste! { $crate::macros::define_request! { + #[allow(dead_code, missing_docs, reason = "inside a macro")] #[doc = $crate::macros::define_request_and_response_doc!( "response" => [<$type_name Response>], $monero_daemon_rpc_doc_link, @@ -118,8 +119,7 @@ macro_rules! define_request_and_response { } $crate::macros::define_response! { - #[allow(dead_code)] - #[allow(missing_docs)] + #[allow(dead_code, missing_docs, reason = "inside a macro")] #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] #[derive(Clone, Debug, Default, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = $crate::macros::define_request_and_response_doc!( @@ -236,7 +236,7 @@ macro_rules! define_request { )* } ) => { - #[allow(dead_code, missing_docs)] + #[allow(dead_code, missing_docs, reason = "inside a macro")] #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] #[derive(Clone, Debug, Default, PartialEq, Eq, PartialOrd, Ord, Hash)] $( #[$attr] )* diff --git a/rpc/types/src/misc/distribution.rs b/rpc/types/src/misc/distribution.rs index 55d509e..faac7ad 100644 --- a/rpc/types/src/misc/distribution.rs +++ b/rpc/types/src/misc/distribution.rs @@ -76,7 +76,6 @@ impl Default for Distribution { } /// Data within [`Distribution::Uncompressed`]. -#[allow(dead_code, missing_docs)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[derive(Clone, Default, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] pub struct DistributionUncompressed { @@ -99,7 +98,6 @@ epee_object! { } /// Data within [`Distribution::CompressedBinary`]. -#[allow(dead_code, missing_docs)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[derive(Clone, Default, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] pub struct DistributionCompressedBinary { @@ -132,7 +130,7 @@ epee_object! { /// 1. Compresses the distribution array /// 2. Serializes the compressed data #[cfg(feature = "serde")] -#[allow(clippy::ptr_arg)] +#[expect(clippy::ptr_arg)] fn serialize_distribution_as_compressed_data(v: &Vec, s: S) -> Result where S: serde::Serializer, @@ -162,7 +160,6 @@ where /// [`EpeeObjectBuilder`] for [`Distribution`]. /// /// Not for public usage. -#[allow(dead_code, missing_docs)] #[derive(Clone, Debug, Default, PartialEq, Eq, PartialOrd, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] pub struct __DistributionEpeeBuilder { diff --git a/rpc/types/src/misc/mod.rs b/rpc/types/src/misc/mod.rs index 547682d..8f8ea18 100644 --- a/rpc/types/src/misc/mod.rs +++ b/rpc/types/src/misc/mod.rs @@ -15,7 +15,7 @@ mod binary_string; mod distribution; mod key_image_spent_status; -#[allow(clippy::module_inception)] +#[expect(clippy::module_inception)] mod misc; mod pool_info_extent; mod requested_info; diff --git a/rpc/types/src/other.rs b/rpc/types/src/other.rs index 28c95d2..5b04089 100644 --- a/rpc/types/src/other.rs +++ b/rpc/types/src/other.rs @@ -973,7 +973,6 @@ define_request_and_response! { #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Deserialize, Serialize))] #[cfg_attr(feature = "serde", serde(untagged))] -#[allow(missing_docs)] pub enum OtherRequest { GetHeight(GetHeightRequest), GetTransactions(GetTransactionsRequest), @@ -1092,7 +1091,6 @@ impl RpcCallValue for OtherRequest { #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Deserialize, Serialize))] #[cfg_attr(feature = "serde", serde(untagged))] -#[allow(missing_docs)] pub enum OtherResponse { GetHeight(GetHeightResponse), GetTransactions(GetTransactionsResponse), diff --git a/storage/blockchain/Cargo.toml b/storage/blockchain/Cargo.toml index e039903..6eecb89 100644 --- a/storage/blockchain/Cargo.toml +++ b/storage/blockchain/Cargo.toml @@ -15,21 +15,19 @@ default = ["heed", "service"] heed = ["cuprate-database/heed"] redb = ["cuprate-database/redb"] redb-memory = ["cuprate-database/redb-memory"] -service = ["dep:thread_local", "dep:rayon"] +service = ["dep:thread_local", "dep:rayon", "cuprate-helper/thread"] [dependencies] -# FIXME: -# We only need the `thread` feature if `service` is enabled. -# Figure out how to enable features of an already pulled in dependency conditionally. cuprate-database = { path = "../database" } cuprate-database-service = { path = "../service" } -cuprate-helper = { path = "../../helper", features = ["fs", "thread", "map"] } +cuprate-helper = { path = "../../helper", features = ["fs", "map"] } cuprate-types = { path = "../../types", features = ["blockchain"] } +cuprate-pruning = { path = "../../pruning" } bitflags = { workspace = true, features = ["std", "serde", "bytemuck"] } bytemuck = { workspace = true, features = ["must_cast", "derive", "min_const_generics", "extern_crate_alloc"] } curve25519-dalek = { workspace = true } -cuprate-pruning = { path = "../../pruning" } +rand = { workspace = true } monero-serai = { workspace = true, features = ["std"] } serde = { workspace = true, optional = true } diff --git a/storage/blockchain/src/ops/alt_block/block.rs b/storage/blockchain/src/ops/alt_block/block.rs new file mode 100644 index 0000000..6bd01cb --- /dev/null +++ b/storage/blockchain/src/ops/alt_block/block.rs @@ -0,0 +1,337 @@ +use bytemuck::TransparentWrapper; +use monero_serai::block::{Block, BlockHeader}; + +use cuprate_database::{DatabaseRo, DatabaseRw, RuntimeError, StorableVec}; +use cuprate_helper::map::{combine_low_high_bits_to_u128, split_u128_into_low_high_bits}; +use cuprate_types::{AltBlockInformation, Chain, ChainId, ExtendedBlockHeader, HardFork}; + +use crate::{ + ops::{ + alt_block::{add_alt_transaction_blob, get_alt_transaction, update_alt_chain_info}, + block::get_block_info, + macros::doc_error, + }, + tables::{Tables, TablesMut}, + types::{AltBlockHeight, BlockHash, BlockHeight, CompactAltBlockInfo}, +}; + +/// Flush all alt-block data from all the alt-block tables. +/// +/// This function completely empties the alt block tables. +pub fn flush_alt_blocks<'a, E: cuprate_database::EnvInner<'a>>( + env_inner: &E, + tx_rw: &mut E::Rw<'_>, +) -> Result<(), RuntimeError> { + use crate::tables::{ + AltBlockBlobs, AltBlockHeights, AltBlocksInfo, AltChainInfos, AltTransactionBlobs, + AltTransactionInfos, + }; + + env_inner.clear_db::(tx_rw)?; + env_inner.clear_db::(tx_rw)?; + env_inner.clear_db::(tx_rw)?; + env_inner.clear_db::(tx_rw)?; + env_inner.clear_db::(tx_rw)?; + env_inner.clear_db::(tx_rw) +} + +/// Add a [`AltBlockInformation`] to the database. +/// +/// This extracts all the data from the input block and +/// maps/adds them to the appropriate database tables. +/// +#[doc = doc_error!()] +/// +/// # Panics +/// This function will panic if: +/// - `alt_block.height` is == `0` +/// - `alt_block.txs.len()` != `alt_block.block.transactions.len()` +/// +pub fn add_alt_block( + alt_block: &AltBlockInformation, + tables: &mut impl TablesMut, +) -> Result<(), RuntimeError> { + let alt_block_height = AltBlockHeight { + chain_id: alt_block.chain_id.into(), + height: alt_block.height, + }; + + tables + .alt_block_heights_mut() + .put(&alt_block.block_hash, &alt_block_height)?; + + update_alt_chain_info(&alt_block_height, &alt_block.block.header.previous, tables)?; + + let (cumulative_difficulty_low, cumulative_difficulty_high) = + split_u128_into_low_high_bits(alt_block.cumulative_difficulty); + + let alt_block_info = CompactAltBlockInfo { + block_hash: alt_block.block_hash, + pow_hash: alt_block.pow_hash, + height: alt_block.height, + weight: alt_block.weight, + long_term_weight: alt_block.long_term_weight, + cumulative_difficulty_low, + cumulative_difficulty_high, + }; + + tables + .alt_blocks_info_mut() + .put(&alt_block_height, &alt_block_info)?; + + tables.alt_block_blobs_mut().put( + &alt_block_height, + StorableVec::wrap_ref(&alt_block.block_blob), + )?; + + assert_eq!(alt_block.txs.len(), alt_block.block.transactions.len()); + for tx in &alt_block.txs { + add_alt_transaction_blob(tx, tables)?; + } + + Ok(()) +} + +/// Retrieves an [`AltBlockInformation`] from the database. +/// +/// This function will look at only the blocks with the given [`AltBlockHeight::chain_id`], no others +/// even if they are technically part of this chain. +#[doc = doc_error!()] +pub fn get_alt_block( + alt_block_height: &AltBlockHeight, + tables: &impl Tables, +) -> Result { + let block_info = tables.alt_blocks_info().get(alt_block_height)?; + + let block_blob = tables.alt_block_blobs().get(alt_block_height)?.0; + + let block = Block::read(&mut block_blob.as_slice())?; + + let txs = block + .transactions + .iter() + .map(|tx_hash| get_alt_transaction(tx_hash, tables)) + .collect::>()?; + + Ok(AltBlockInformation { + block, + block_blob, + txs, + block_hash: block_info.block_hash, + pow_hash: block_info.pow_hash, + height: block_info.height, + weight: block_info.weight, + long_term_weight: block_info.long_term_weight, + cumulative_difficulty: combine_low_high_bits_to_u128( + block_info.cumulative_difficulty_low, + block_info.cumulative_difficulty_high, + ), + chain_id: alt_block_height.chain_id.into(), + }) +} + +/// Retrieves the hash of the block at the given `block_height` on the alt chain with +/// the given [`ChainId`]. +/// +/// This function will get blocks from the whole chain, for example if you were to ask for height +/// `0` with any [`ChainId`] (as long that chain actually exists) you will get the main chain genesis. +/// +#[doc = doc_error!()] +pub fn get_alt_block_hash( + block_height: &BlockHeight, + alt_chain: ChainId, + tables: &impl Tables, +) -> Result { + let alt_chains = tables.alt_chain_infos(); + + // First find what [`ChainId`] this block would be stored under. + let original_chain = { + let mut chain = alt_chain.into(); + loop { + let chain_info = alt_chains.get(&chain)?; + + if chain_info.common_ancestor_height < *block_height { + break Chain::Alt(chain.into()); + } + + match chain_info.parent_chain.into() { + Chain::Main => break Chain::Main, + Chain::Alt(alt_chain_id) => { + chain = alt_chain_id.into(); + continue; + } + } + } + }; + + // Get the block hash. + match original_chain { + Chain::Main => { + get_block_info(block_height, tables.block_infos()).map(|info| info.block_hash) + } + Chain::Alt(chain_id) => tables + .alt_blocks_info() + .get(&AltBlockHeight { + chain_id: chain_id.into(), + height: *block_height, + }) + .map(|info| info.block_hash), + } +} + +/// Retrieves the [`ExtendedBlockHeader`] of the alt-block with an exact [`AltBlockHeight`]. +/// +/// This function will look at only the blocks with the given [`AltBlockHeight::chain_id`], no others +/// even if they are technically part of this chain. +/// +#[doc = doc_error!()] +pub fn get_alt_block_extended_header_from_height( + height: &AltBlockHeight, + table: &impl Tables, +) -> Result { + let block_info = table.alt_blocks_info().get(height)?; + + let block_blob = table.alt_block_blobs().get(height)?.0; + + let block_header = BlockHeader::read(&mut block_blob.as_slice())?; + + Ok(ExtendedBlockHeader { + version: HardFork::from_version(block_header.hardfork_version) + .expect("Block in DB must have correct version"), + vote: block_header.hardfork_version, + timestamp: block_header.timestamp, + cumulative_difficulty: combine_low_high_bits_to_u128( + block_info.cumulative_difficulty_low, + block_info.cumulative_difficulty_high, + ), + block_weight: block_info.weight, + long_term_weight: block_info.long_term_weight, + }) +} + +#[cfg(test)] +mod tests { + use std::num::NonZero; + + use cuprate_database::{Env, EnvInner, TxRw}; + use cuprate_test_utils::data::{BLOCK_V16_TX0, BLOCK_V1_TX2, BLOCK_V9_TX3}; + use cuprate_types::{Chain, ChainId}; + + use crate::{ + ops::{ + alt_block::{ + add_alt_block, flush_alt_blocks, get_alt_block, + get_alt_block_extended_header_from_height, get_alt_block_hash, + get_alt_chain_history_ranges, + }, + block::{add_block, pop_block}, + }, + tables::{OpenTables, Tables}, + tests::{assert_all_tables_are_empty, map_verified_block_to_alt, tmp_concrete_env}, + types::AltBlockHeight, + }; + + #[expect(clippy::range_plus_one)] + #[test] + fn all_alt_blocks() { + let (env, _tmp) = tmp_concrete_env(); + let env_inner = env.env_inner(); + assert_all_tables_are_empty(&env); + + let chain_id = ChainId(NonZero::new(1).unwrap()); + + // Add initial block. + { + let tx_rw = env_inner.tx_rw().unwrap(); + let mut tables = env_inner.open_tables_mut(&tx_rw).unwrap(); + + let mut initial_block = BLOCK_V1_TX2.clone(); + initial_block.height = 0; + + add_block(&initial_block, &mut tables).unwrap(); + + drop(tables); + TxRw::commit(tx_rw).unwrap(); + } + + let alt_blocks = [ + map_verified_block_to_alt(BLOCK_V9_TX3.clone(), chain_id), + map_verified_block_to_alt(BLOCK_V16_TX0.clone(), chain_id), + ]; + + // Add alt-blocks + { + let tx_rw = env_inner.tx_rw().unwrap(); + let mut tables = env_inner.open_tables_mut(&tx_rw).unwrap(); + + let mut prev_hash = BLOCK_V1_TX2.block_hash; + for (i, mut alt_block) in alt_blocks.into_iter().enumerate() { + let height = i + 1; + + alt_block.height = height; + alt_block.block.header.previous = prev_hash; + alt_block.block_blob = alt_block.block.serialize(); + + add_alt_block(&alt_block, &mut tables).unwrap(); + + let alt_height = AltBlockHeight { + chain_id: chain_id.into(), + height, + }; + + let alt_block_2 = get_alt_block(&alt_height, &tables).unwrap(); + assert_eq!(alt_block.block, alt_block_2.block); + + let headers = get_alt_chain_history_ranges( + 0..(height + 1), + chain_id, + tables.alt_chain_infos(), + ) + .unwrap(); + + assert_eq!(headers.len(), 2); + assert_eq!(headers[1], (Chain::Main, 0..1)); + assert_eq!(headers[0], (Chain::Alt(chain_id), 1..(height + 1))); + + prev_hash = alt_block.block_hash; + + let header = + get_alt_block_extended_header_from_height(&alt_height, &tables).unwrap(); + + assert_eq!(header.timestamp, alt_block.block.header.timestamp); + assert_eq!(header.block_weight, alt_block.weight); + assert_eq!(header.long_term_weight, alt_block.long_term_weight); + assert_eq!( + header.cumulative_difficulty, + alt_block.cumulative_difficulty + ); + assert_eq!( + header.version.as_u8(), + alt_block.block.header.hardfork_version + ); + assert_eq!(header.vote, alt_block.block.header.hardfork_signal); + + let block_hash = get_alt_block_hash(&height, chain_id, &tables).unwrap(); + + assert_eq!(block_hash, alt_block.block_hash); + } + + drop(tables); + TxRw::commit(tx_rw).unwrap(); + } + + { + let mut tx_rw = env_inner.tx_rw().unwrap(); + + flush_alt_blocks(&env_inner, &mut tx_rw).unwrap(); + + let mut tables = env_inner.open_tables_mut(&tx_rw).unwrap(); + pop_block(None, &mut tables).unwrap(); + + drop(tables); + TxRw::commit(tx_rw).unwrap(); + } + + assert_all_tables_are_empty(&env); + } +} diff --git a/storage/blockchain/src/ops/alt_block/chain.rs b/storage/blockchain/src/ops/alt_block/chain.rs new file mode 100644 index 0000000..5b5f3cb --- /dev/null +++ b/storage/blockchain/src/ops/alt_block/chain.rs @@ -0,0 +1,117 @@ +use std::cmp::{max, min}; + +use cuprate_database::{DatabaseRo, DatabaseRw, RuntimeError}; +use cuprate_types::{Chain, ChainId}; + +use crate::{ + ops::macros::{doc_add_alt_block_inner_invariant, doc_error}, + tables::{AltChainInfos, TablesMut}, + types::{AltBlockHeight, AltChainInfo, BlockHash, BlockHeight}, +}; + +/// Updates the [`AltChainInfo`] with information on a new alt-block. +/// +#[doc = doc_add_alt_block_inner_invariant!()] +#[doc = doc_error!()] +/// +/// # Panics +/// +/// This will panic if [`AltBlockHeight::height`] == `0`. +pub fn update_alt_chain_info( + alt_block_height: &AltBlockHeight, + prev_hash: &BlockHash, + tables: &mut impl TablesMut, +) -> Result<(), RuntimeError> { + let parent_chain = match tables.alt_block_heights().get(prev_hash) { + Ok(alt_parent_height) => Chain::Alt(alt_parent_height.chain_id.into()), + Err(RuntimeError::KeyNotFound) => Chain::Main, + Err(e) => return Err(e), + }; + + // try update the info if one exists for this chain. + let update = tables + .alt_chain_infos_mut() + .update(&alt_block_height.chain_id, |mut info| { + if info.chain_height < alt_block_height.height + 1 { + // If the chain height is increasing we only need to update the chain height. + info.chain_height = alt_block_height.height + 1; + } else { + // If the chain height is not increasing we are popping blocks and need to update the + // split point. + info.common_ancestor_height = alt_block_height.height.checked_sub(1).unwrap(); + info.parent_chain = parent_chain.into(); + } + + info.chain_height = alt_block_height.height + 1; + Some(info) + }); + + match update { + Ok(()) => return Ok(()), + Err(RuntimeError::KeyNotFound) => (), + Err(e) => return Err(e), + } + + // If one doesn't already exist add it. + + tables.alt_chain_infos_mut().put( + &alt_block_height.chain_id, + &AltChainInfo { + parent_chain: parent_chain.into(), + common_ancestor_height: alt_block_height.height.checked_sub(1).unwrap(), + chain_height: alt_block_height.height + 1, + }, + ) +} + +/// Get the height history of an alt-chain in reverse chronological order. +/// +/// Height history is a list of height ranges with the corresponding [`Chain`] they are stored under. +/// For example if your range goes from height `0` the last entry in the list will be [`Chain::Main`] +/// upto the height where the first split occurs. +#[doc = doc_error!()] +pub fn get_alt_chain_history_ranges( + range: std::ops::Range, + alt_chain: ChainId, + alt_chain_infos: &impl DatabaseRo, +) -> Result)>, RuntimeError> { + let mut ranges = Vec::with_capacity(5); + + let mut i = range.end; + let mut current_chain_id = alt_chain.into(); + while i > range.start { + let chain_info = alt_chain_infos.get(¤t_chain_id)?; + + let start_height = max(range.start, chain_info.common_ancestor_height + 1); + let end_height = min(i, chain_info.chain_height); + + ranges.push(( + Chain::Alt(current_chain_id.into()), + start_height..end_height, + )); + i = chain_info.common_ancestor_height + 1; + + match chain_info.parent_chain.into() { + Chain::Main => { + ranges.push((Chain::Main, range.start..i)); + break; + } + Chain::Alt(alt_chain_id) => { + let alt_chain_id = alt_chain_id.into(); + + // This shouldn't be possible to hit, however in a test with custom (invalid) block data + // this caused an infinite loop. + if alt_chain_id == current_chain_id { + return Err(RuntimeError::Io(std::io::Error::other( + "Loop detected in ChainIDs, invalid alt chain.", + ))); + } + + current_chain_id = alt_chain_id; + continue; + } + } + } + + Ok(ranges) +} diff --git a/storage/blockchain/src/ops/alt_block/mod.rs b/storage/blockchain/src/ops/alt_block/mod.rs new file mode 100644 index 0000000..1654d27 --- /dev/null +++ b/storage/blockchain/src/ops/alt_block/mod.rs @@ -0,0 +1,58 @@ +//! Alternative Block/Chain Ops +//! +//! Alternative chains are chains that potentially have more proof-of-work than the main-chain +//! which we are tracking to potentially re-org to. +//! +//! Cuprate uses an ID system for alt-chains. When a split is made from the main-chain we generate +//! a random [`ChainID`](cuprate_types::ChainId) and assign it to the chain: +//! +//! ```text +//! | +//! | +//! | split +//! |------------- +//! | | +//! | | +//! \|/ \|/ +//! main-chain ChainID(X) +//! ``` +//! +//! In that example if we were to receive an alt-block which immediately follows the top block of `ChainID(X)` +//! then that block will also be stored under `ChainID(X)`. However, if it follows from another block from `ChainID(X)` +//! we will split into a chain with a different ID: +//! +//! ```text +//! | +//! | +//! | split +//! |------------- +//! | | split +//! | |-------------| +//! | | | +//! | | | +//! | | | +//! \|/ \|/ \|/ +//! main-chain ChainID(X) ChainID(Z) +//! ``` +//! +//! As you can see if we wanted to get all the alt-blocks in `ChainID(Z)` that now includes some blocks from `ChainID(X)` as well. +//! [`get_alt_chain_history_ranges`] covers this and is the method to get the ranges of heights needed from each [`ChainID`](cuprate_types::ChainId) +//! to get all the alt-blocks in a given [`ChainID`](cuprate_types::ChainId). +//! +//! Although this should be kept in mind as a possibility, because Cuprate's block downloader will only track a single chain it is +//! unlikely that we will be tracking [`ChainID`](cuprate_types::ChainId)s that don't immediately connect to the main-chain. +//! +//! ## Why not use the block's `previous` field? +//! +//! Although that would be easier, it makes getting a range of block extremely slow, as we have to build the weight cache to verify +//! blocks, roughly 100,000 block headers needed, this cost is too high. +mod block; +mod chain; +mod tx; + +pub use block::{ + add_alt_block, flush_alt_blocks, get_alt_block, get_alt_block_extended_header_from_height, + get_alt_block_hash, +}; +pub use chain::{get_alt_chain_history_ranges, update_alt_chain_info}; +pub use tx::{add_alt_transaction_blob, get_alt_transaction}; diff --git a/storage/blockchain/src/ops/alt_block/tx.rs b/storage/blockchain/src/ops/alt_block/tx.rs new file mode 100644 index 0000000..4185c6c --- /dev/null +++ b/storage/blockchain/src/ops/alt_block/tx.rs @@ -0,0 +1,76 @@ +use bytemuck::TransparentWrapper; +use monero_serai::transaction::Transaction; + +use cuprate_database::{DatabaseRo, DatabaseRw, RuntimeError, StorableVec}; +use cuprate_types::VerifiedTransactionInformation; + +use crate::{ + ops::macros::{doc_add_alt_block_inner_invariant, doc_error}, + tables::{Tables, TablesMut}, + types::{AltTransactionInfo, TxHash}, +}; + +/// Adds a [`VerifiedTransactionInformation`] from an alt-block +/// if it is not already in the DB. +/// +/// If the transaction is in the main-chain this function will still fill in the +/// [`AltTransactionInfos`](crate::tables::AltTransactionInfos) table, as that +/// table holds data which we don't keep around for main-chain txs. +/// +#[doc = doc_add_alt_block_inner_invariant!()] +#[doc = doc_error!()] +pub fn add_alt_transaction_blob( + tx: &VerifiedTransactionInformation, + tables: &mut impl TablesMut, +) -> Result<(), RuntimeError> { + tables.alt_transaction_infos_mut().put( + &tx.tx_hash, + &AltTransactionInfo { + tx_weight: tx.tx_weight, + fee: tx.fee, + tx_hash: tx.tx_hash, + }, + )?; + + if tables.tx_ids().get(&tx.tx_hash).is_ok() + || tables.alt_transaction_blobs().get(&tx.tx_hash).is_ok() + { + return Ok(()); + } + + tables + .alt_transaction_blobs_mut() + .put(&tx.tx_hash, StorableVec::wrap_ref(&tx.tx_blob))?; + + Ok(()) +} + +/// Retrieve a [`VerifiedTransactionInformation`] from the database. +/// +#[doc = doc_error!()] +pub fn get_alt_transaction( + tx_hash: &TxHash, + tables: &impl Tables, +) -> Result { + let tx_info = tables.alt_transaction_infos().get(tx_hash)?; + + let tx_blob = match tables.alt_transaction_blobs().get(tx_hash) { + Ok(blob) => blob.0, + Err(RuntimeError::KeyNotFound) => { + let tx_id = tables.tx_ids().get(tx_hash)?; + + let blob = tables.tx_blobs().get(&tx_id)?; + + blob.0 + } + Err(e) => return Err(e), + }; + + Ok(VerifiedTransactionInformation { + tx: Transaction::read(&mut tx_blob.as_slice()).unwrap(), + tx_blob, + tx_weight: tx_info.tx_weight, + fee: tx_info.fee, + tx_hash: tx_info.tx_hash, + }) +} diff --git a/storage/blockchain/src/ops/block.rs b/storage/blockchain/src/ops/block.rs index c56e770..d759ffd 100644 --- a/storage/blockchain/src/ops/block.rs +++ b/storage/blockchain/src/ops/block.rs @@ -2,16 +2,26 @@ //---------------------------------------------------------------------------------------------------- Import use bytemuck::TransparentWrapper; -use monero_serai::block::Block; +use monero_serai::{ + block::{Block, BlockHeader}, + transaction::Transaction, +}; use cuprate_database::{ RuntimeError, StorableVec, {DatabaseRo, DatabaseRw}, }; -use cuprate_helper::map::{combine_low_high_bits_to_u128, split_u128_into_low_high_bits}; -use cuprate_types::{ExtendedBlockHeader, HardFork, VerifiedBlockInformation}; +use cuprate_helper::{ + map::{combine_low_high_bits_to_u128, split_u128_into_low_high_bits}, + tx::tx_fee, +}; +use cuprate_types::{ + AltBlockInformation, ChainId, ExtendedBlockHeader, HardFork, VerifiedBlockInformation, + VerifiedTransactionInformation, +}; use crate::{ ops::{ + alt_block, blockchain::{chain_height, cumulative_generated_coins}, macros::doc_error, output::get_rct_num_outputs, @@ -35,11 +45,6 @@ use super::blockchain::top_block_height; /// This function will panic if: /// - `block.height > u32::MAX` (not normally possible) /// - `block.height` is not != [`chain_height`] -/// -/// # Already exists -/// This function will operate normally even if `block` already -/// exists, i.e., this function will not return `Err` even if you -/// call this function infinitely with the same block. // no inline, too big. pub fn add_block( block: &VerifiedBlockInformation, @@ -76,10 +81,10 @@ pub fn add_block( //------------------------------------------------------ Transaction / Outputs / Key Images // Add the miner transaction first. - { + let mining_tx_index = { let tx = &block.block.miner_transaction; - add_tx(tx, &tx.serialize(), &tx.hash(), &chain_height, tables)?; - } + add_tx(tx, &tx.serialize(), &tx.hash(), &chain_height, tables)? + }; for tx in &block.txs { add_tx(&tx.tx, &tx.tx_blob, &tx.tx_hash, &chain_height, tables)?; @@ -91,9 +96,10 @@ pub fn add_block( // RCT output count needs account for _this_ block's outputs. let cumulative_rct_outs = get_rct_num_outputs(tables.rct_outputs())?; + // `saturating_add` is used here as cumulative generated coins overflows due to tail emission. let cumulative_generated_coins = cumulative_generated_coins(&block.height.saturating_sub(1), tables.block_infos())? - + block.generated_coins; + .saturating_add(block.generated_coins); let (cumulative_difficulty_low, cumulative_difficulty_high) = split_u128_into_low_high_bits(block.cumulative_difficulty); @@ -108,16 +114,23 @@ pub fn add_block( cumulative_rct_outs, timestamp: block.block.header.timestamp, block_hash: block.block_hash, - // INVARIANT: #[cfg] @ lib.rs asserts `usize == u64` - weight: block.weight as u64, - long_term_weight: block.long_term_weight as u64, + weight: block.weight, + long_term_weight: block.long_term_weight, + mining_tx_index, }, )?; - // Block blobs. - tables - .block_blobs_mut() - .put(&block.height, StorableVec::wrap_ref(&block.block_blob))?; + // Block header blob. + tables.block_header_blobs_mut().put( + &block.height, + StorableVec::wrap_ref(&block.block.header.serialize()), + )?; + + // Block transaction hashes + tables.block_txs_hashes_mut().put( + &block.height, + StorableVec::wrap_ref(&block.block.transactions), + )?; // Block heights. tables @@ -131,37 +144,87 @@ pub fn add_block( /// Remove the top/latest block from the database. /// /// The removed block's data is returned. +/// +/// If a [`ChainId`] is specified the popped block will be added to the alt block tables under +/// that [`ChainId`]. Otherwise, the block will be completely removed from the DB. #[doc = doc_error!()] /// /// In `pop_block()`'s case, [`RuntimeError::KeyNotFound`] /// will be returned if there are no blocks left. // no inline, too big pub fn pop_block( + move_to_alt_chain: Option, tables: &mut impl TablesMut, ) -> Result<(BlockHeight, BlockHash, Block), RuntimeError> { //------------------------------------------------------ Block Info // Remove block data from tables. - let (block_height, block_hash) = { - let (block_height, block_info) = tables.block_infos_mut().pop_last()?; - (block_height, block_info.block_hash) - }; + let (block_height, block_info) = tables.block_infos_mut().pop_last()?; // Block heights. - tables.block_heights_mut().delete(&block_hash)?; + tables.block_heights_mut().delete(&block_info.block_hash)?; // Block blobs. - // We deserialize the block blob into a `Block`, such - // that we can remove the associated transactions later. - let block_blob = tables.block_blobs_mut().take(&block_height)?.0; - let block = Block::read(&mut block_blob.as_slice())?; + // + // We deserialize the block header blob and mining transaction blob + // to form a `Block`, such that we can remove the associated transactions + // later. + let block_header = tables.block_header_blobs_mut().take(&block_height)?.0; + let block_txs_hashes = tables.block_txs_hashes_mut().take(&block_height)?.0; + let miner_transaction = tables.tx_blobs().get(&block_info.mining_tx_index)?.0; + let block = Block { + header: BlockHeader::read(&mut block_header.as_slice())?, + miner_transaction: Transaction::read(&mut miner_transaction.as_slice())?, + transactions: block_txs_hashes, + }; //------------------------------------------------------ Transaction / Outputs / Key Images remove_tx(&block.miner_transaction.hash(), tables)?; - for tx_hash in &block.transactions { - remove_tx(tx_hash, tables)?; + + let remove_tx_iter = block.transactions.iter().map(|tx_hash| { + let (_, tx) = remove_tx(tx_hash, tables)?; + Ok::<_, RuntimeError>(tx) + }); + + if let Some(chain_id) = move_to_alt_chain { + let txs = remove_tx_iter + .map(|result| { + let tx = result?; + Ok(VerifiedTransactionInformation { + tx_weight: tx.weight(), + tx_blob: tx.serialize(), + tx_hash: tx.hash(), + fee: tx_fee(&tx), + tx, + }) + }) + .collect::, RuntimeError>>()?; + + alt_block::add_alt_block( + &AltBlockInformation { + block: block.clone(), + block_blob: block.serialize(), + txs, + block_hash: block_info.block_hash, + // We know the PoW is valid for this block so just set it so it will always verify as valid. + pow_hash: [0; 32], + height: block_height, + weight: block_info.weight, + long_term_weight: block_info.long_term_weight, + cumulative_difficulty: combine_low_high_bits_to_u128( + block_info.cumulative_difficulty_low, + block_info.cumulative_difficulty_high, + ), + chain_id, + }, + tables, + )?; + } else { + for result in remove_tx_iter { + drop(result?); + } } - Ok((block_height, block_hash, block)) + Ok((block_height, block_info.block_hash, block)) } //---------------------------------------------------------------------------------------------------- `get_block_*` @@ -231,31 +294,32 @@ pub fn get_block_extended_header( /// Same as [`get_block_extended_header`] but with a [`BlockHeight`]. #[doc = doc_error!()] -#[allow(clippy::missing_panics_doc)] // The panic is only possible with a corrupt DB +#[expect( + clippy::missing_panics_doc, + reason = "The panic is only possible with a corrupt DB" +)] #[inline] pub fn get_block_extended_header_from_height( block_height: &BlockHeight, tables: &impl Tables, ) -> Result { let block_info = tables.block_infos().get(block_height)?; - let block_blob = tables.block_blobs().get(block_height)?.0; - let block = Block::read(&mut block_blob.as_slice())?; + let block_header_blob = tables.block_header_blobs().get(block_height)?.0; + let block_header = BlockHeader::read(&mut block_header_blob.as_slice())?; let cumulative_difficulty = combine_low_high_bits_to_u128( block_info.cumulative_difficulty_low, block_info.cumulative_difficulty_high, ); - // INVARIANT: #[cfg] @ lib.rs asserts `usize == u64` - #[allow(clippy::cast_possible_truncation)] Ok(ExtendedBlockHeader { cumulative_difficulty, - version: HardFork::from_version(block.header.hardfork_version) + version: HardFork::from_version(block_header.hardfork_version) .expect("Stored block must have a valid hard-fork"), vote: block.header.hardfork_signal, timestamp: block.header.timestamp, - block_weight: block_info.weight as usize, - long_term_weight: block_info.long_term_weight as usize, + block_weight: block_info.weight, + long_term_weight: block_info.long_term_weight, height: *block_height as u64, }) } @@ -309,25 +373,21 @@ pub fn block_exists( //---------------------------------------------------------------------------------------------------- Tests #[cfg(test)] -#[allow( - clippy::significant_drop_tightening, - clippy::cognitive_complexity, - clippy::too_many_lines -)] +#[expect(clippy::too_many_lines)] mod test { use pretty_assertions::assert_eq; use cuprate_database::{Env, EnvInner, TxRw}; use cuprate_test_utils::data::{BLOCK_V16_TX0, BLOCK_V1_TX2, BLOCK_V9_TX3}; - use super::*; - use crate::{ ops::tx::{get_tx, tx_exists}, tables::OpenTables, tests::{assert_all_tables_are_empty, tmp_concrete_env, AssertTableLen}, }; + use super::*; + /// Tests all above block functions. /// /// Note that this doesn't test the correctness of values added, as the @@ -379,7 +439,8 @@ mod test { // Assert only the proper tables were added to. AssertTableLen { block_infos: 3, - block_blobs: 3, + block_header_blobs: 3, + block_txs_hashes: 3, block_heights: 3, key_images: 69, num_outputs: 41, @@ -462,7 +523,8 @@ mod test { for block_hash in block_hashes.into_iter().rev() { println!("pop_block(): block_hash: {}", hex::encode(block_hash)); - let (_popped_height, popped_hash, _popped_block) = pop_block(&mut tables).unwrap(); + let (_popped_height, popped_hash, _popped_block) = + pop_block(None, &mut tables).unwrap(); assert_eq!(block_hash, popped_hash); diff --git a/storage/blockchain/src/ops/blockchain.rs b/storage/blockchain/src/ops/blockchain.rs index dcea6d4..5c5a27a 100644 --- a/storage/blockchain/src/ops/blockchain.rs +++ b/storage/blockchain/src/ops/blockchain.rs @@ -26,7 +26,7 @@ use crate::{ pub fn chain_height( table_block_heights: &impl DatabaseRo, ) -> Result { - #[allow(clippy::cast_possible_truncation)] // we enforce 64-bit + #[expect(clippy::cast_possible_truncation, reason = "we enforce 64-bit")] table_block_heights.len().map(|height| height as usize) } @@ -49,7 +49,7 @@ pub fn top_block_height( ) -> Result { match table_block_heights.len()? { 0 => Err(RuntimeError::KeyNotFound), - #[allow(clippy::cast_possible_truncation)] // we enforce 64-bit + #[expect(clippy::cast_possible_truncation, reason = "we enforce 64-bit")] height => Ok(height as usize - 1), } } @@ -148,7 +148,8 @@ mod test { // Assert reads are correct. AssertTableLen { block_infos: 3, - block_blobs: 3, + block_header_blobs: 3, + block_txs_hashes: 3, block_heights: 3, key_images: 69, num_outputs: 41, diff --git a/storage/blockchain/src/ops/macros.rs b/storage/blockchain/src/ops/macros.rs index b7cdba4..18ec506 100644 --- a/storage/blockchain/src/ops/macros.rs +++ b/storage/blockchain/src/ops/macros.rs @@ -31,3 +31,25 @@ When calling this function, ensure that either: }; } pub(super) use doc_add_block_inner_invariant; + +/// Generate `# Invariant` documentation for internal alt block `fn`'s +/// that should be called directly with caution. +/// +/// This is pretty much the same as [`doc_add_block_inner_invariant`], +/// it's not worth the effort to reduce the duplication. +macro_rules! doc_add_alt_block_inner_invariant { + () => { + r#"# ⚠️ Invariant ⚠️ +This function mainly exists to be used internally by the parent function [`crate::ops::alt_block::add_alt_block`]. + +`add_alt_block()` makes sure all data related to the input is mutated, while +this function _does not_, it specifically mutates _particular_ tables. + +This is usually undesired - although this function is still available to call directly. + +When calling this function, ensure that either: +1. This effect (incomplete database mutation) is what is desired, or that... +2. ...the other tables will also be mutated to a correct state"# + }; +} +pub(super) use doc_add_alt_block_inner_invariant; diff --git a/storage/blockchain/src/ops/mod.rs b/storage/blockchain/src/ops/mod.rs index 4ff7dff..285aa24 100644 --- a/storage/blockchain/src/ops/mod.rs +++ b/storage/blockchain/src/ops/mod.rs @@ -94,7 +94,7 @@ //! // Read the data, assert it is correct. //! let tx_rw = env_inner.tx_rw()?; //! let mut tables = env_inner.open_tables_mut(&tx_rw)?; -//! let (height, hash, serai_block) = pop_block(&mut tables)?; +//! let (height, hash, serai_block) = pop_block(None, &mut tables)?; //! //! assert_eq!(height, 0); //! assert_eq!(serai_block, block.block); @@ -102,6 +102,7 @@ //! # Ok(()) } //! ``` +pub mod alt_block; pub mod block; pub mod blockchain; pub mod key_image; diff --git a/storage/blockchain/src/ops/output.rs b/storage/blockchain/src/ops/output.rs index f3453e4..1c7c1d7 100644 --- a/storage/blockchain/src/ops/output.rs +++ b/storage/blockchain/src/ops/output.rs @@ -316,7 +316,8 @@ mod test { // Assert proper tables were added to. AssertTableLen { block_infos: 0, - block_blobs: 0, + block_header_blobs: 0, + block_txs_hashes: 0, block_heights: 0, key_images: 0, num_outputs: 1, diff --git a/storage/blockchain/src/ops/tx.rs b/storage/blockchain/src/ops/tx.rs index e7dbdcf..c9799a2 100644 --- a/storage/blockchain/src/ops/tx.rs +++ b/storage/blockchain/src/ops/tx.rs @@ -366,7 +366,8 @@ mod test { // Assert only the proper tables were added to. AssertTableLen { block_infos: 0, - block_blobs: 0, + block_header_blobs: 0, + block_txs_hashes: 0, block_heights: 0, key_images: 4, // added to key images pruned_tx_blobs: 0, diff --git a/storage/blockchain/src/service/free.rs b/storage/blockchain/src/service/free.rs index 2e7c908..d8a878c 100644 --- a/storage/blockchain/src/service/free.rs +++ b/storage/blockchain/src/service/free.rs @@ -4,11 +4,14 @@ use std::sync::Arc; use cuprate_database::{ConcreteEnv, InitError}; +use cuprate_types::{AltBlockInformation, VerifiedBlockInformation}; -use crate::service::{init_read_service, init_write_service}; use crate::{ config::Config, - service::types::{BlockchainReadHandle, BlockchainWriteHandle}, + service::{ + init_read_service, init_write_service, + types::{BlockchainReadHandle, BlockchainWriteHandle}, + }, }; //---------------------------------------------------------------------------------------------------- Init @@ -81,6 +84,44 @@ pub(super) const fn compact_history_genesis_not_included INITIAL_BLOCKS && !(top_block_height - INITIAL_BLOCKS + 2).is_power_of_two() } +//---------------------------------------------------------------------------------------------------- Map Block +/// Maps [`AltBlockInformation`] to [`VerifiedBlockInformation`] +/// +/// # Panics +/// This will panic if the block is invalid, so should only be used on blocks that have been popped from +/// the main-chain. +pub(super) fn map_valid_alt_block_to_verified_block( + alt_block: AltBlockInformation, +) -> VerifiedBlockInformation { + let total_fees = alt_block.txs.iter().map(|tx| tx.fee).sum::(); + let total_miner_output = alt_block + .block + .miner_transaction + .prefix() + .outputs + .iter() + .map(|out| out.amount.unwrap_or(0)) + .sum::(); + + VerifiedBlockInformation { + block: alt_block.block, + block_blob: alt_block.block_blob, + txs: alt_block + .txs + .into_iter() + .map(TryInto::try_into) + .collect::>() + .unwrap(), + block_hash: alt_block.block_hash, + pow_hash: alt_block.pow_hash, + height: alt_block.height, + generated_coins: total_miner_output - total_fees, + weight: alt_block.weight, + long_term_weight: alt_block.long_term_weight, + cumulative_difficulty: alt_block.cumulative_difficulty, + } +} + //---------------------------------------------------------------------------------------------------- Tests #[cfg(test)] diff --git a/storage/blockchain/src/service/mod.rs b/storage/blockchain/src/service/mod.rs index 12357f3..fed0bd6 100644 --- a/storage/blockchain/src/service/mod.rs +++ b/storage/blockchain/src/service/mod.rs @@ -94,7 +94,7 @@ //! //! // Block write was OK. //! let response = response_channel.await?; -//! assert_eq!(response, BlockchainResponse::WriteBlockOk); +//! assert_eq!(response, BlockchainResponse::Ok); //! //! // Now, let's try getting the block hash //! // of the block we just wrote. diff --git a/storage/blockchain/src/service/read.rs b/storage/blockchain/src/service/read.rs index 8f2d850..86d6559 100644 --- a/storage/blockchain/src/service/read.rs +++ b/storage/blockchain/src/service/read.rs @@ -9,6 +9,7 @@ use std::{ use monero_serai::block::Block; use rayon::{ iter::{IntoParallelIterator, ParallelIterator}, + prelude::*, ThreadPool, }; use thread_local::ThreadLocal; @@ -18,11 +19,15 @@ use cuprate_database_service::{init_thread_pool, DatabaseReadService, ReaderThre use cuprate_helper::map::combine_low_high_bits_to_u128; use cuprate_types::{ blockchain::{BlockchainReadRequest, BlockchainResponse}, - Chain, ExtendedBlockHeader, OutputOnChain, + Chain, ChainId, ExtendedBlockHeader, OutputOnChain, }; use crate::{ ops::{ + alt_block::{ + get_alt_block, get_alt_block_extended_header_from_height, get_alt_block_hash, + get_alt_chain_history_ranges, + }, block::{ block_exists, get_block, get_block_by_hash, get_block_extended_header, get_block_extended_header_from_height, get_block_extended_header_top, get_block_height, @@ -36,8 +41,10 @@ use crate::{ free::{compact_history_genesis_not_included, compact_history_index_to_height_offset}, types::{BlockchainReadHandle, ResponseResult}, }, - tables::{BlockBlobs, BlockHeights, BlockInfos, KeyImages, OpenTables, Tables}, - types::{Amount, AmountIndex, BlockHash, BlockHeight, KeyImage, PreRctOutputId}, + tables::{AltBlockHeights, BlockHeights, BlockInfos, OpenTables, Tables}, + types::{ + AltBlockHeight, Amount, AmountIndex, BlockHash, BlockHeight, KeyImage, PreRctOutputId, + }, }; //---------------------------------------------------------------------------------------------------- init_read_service @@ -97,7 +104,7 @@ fn map_request( R::TopBlockFull => top_block_full(env), R::CurrentHardFork => current_hard_fork(env), R::BlockHash(height, chain) => block_hash(env, height, chain), - R::FindBlock(_) => todo!("Add alt blocks to DB"), + R::FindBlock(block_hash) => find_block(env, block_hash), R::FilterUnknownHashes(hashes) => filter_unknown_hashes(env, hashes), R::BlockExtendedHeaderInRange(range, chain) => { block_extended_header_in_range(env, range, chain) @@ -111,6 +118,7 @@ fn map_request( R::CompactChainHistory => compact_chain_history(env), R::FindFirstUnknown(block_ids) => find_first_unknown(env, &block_ids), R::CumulativeBlockWeightLimit => cumulative_block_weight_limit(env), + R::AltBlocksInChain(chain_id) => alt_blocks_in_chain(env, chain_id), } /* SOMEDAY: post-request handling, run some code for each request? */ @@ -154,7 +162,6 @@ fn thread_local(env: &impl Env) -> ThreadLocal { macro_rules! get_tables { ($env_inner:ident, $tx_ro:ident, $tables:ident) => {{ $tables.get_or_try(|| { - #[allow(clippy::significant_drop_in_scrutinee)] match $env_inner.open_tables($tx_ro) { // SAFETY: see above macro doc comment. Ok(tables) => Ok(unsafe { crate::unsafe_sendable::UnsafeSendable::new(tables) }), @@ -190,46 +197,49 @@ macro_rules! get_tables { /// [`BlockchainReadRequest::Block`]. #[inline] fn block(env: &ConcreteEnv, block_height: BlockHeight) -> ResponseResult { - // Single-threaded, no `ThreadLocal` required. - let env_inner = env.env_inner(); - let tx_ro = env_inner.tx_ro()?; - let table_block_blobs = env_inner.open_db_ro::(&tx_ro)?; + Ok(todo!()) + // // Single-threaded, no `ThreadLocal` required. + // let env_inner = env.env_inner(); + // let tx_ro = env_inner.tx_ro()?; + // let table_block_blobs = env_inner.open_db_ro::(&tx_ro)?; - Ok(BlockchainResponse::Block(get_block( - &block_height, - &table_block_blobs, - )?)) + // Ok(BlockchainResponse::Block(get_block( + // &block_height, + // &table_block_blobs, + // )?)) } /// [`BlockchainReadRequest::BlockByHash`]. #[inline] fn block_by_hash(env: &ConcreteEnv, block_hash: BlockHash) -> ResponseResult { - // Single-threaded, no `ThreadLocal` required. - let env_inner = env.env_inner(); - let tx_ro = env_inner.tx_ro()?; - let table_block_heights = env_inner.open_db_ro::(&tx_ro)?; - let table_block_blobs = env_inner.open_db_ro::(&tx_ro)?; + Ok(todo!()) + // // Single-threaded, no `ThreadLocal` required. + // let env_inner = env.env_inner(); + // let tx_ro = env_inner.tx_ro()?; + // let table_block_heights = env_inner.open_db_ro::(&tx_ro)?; + // let table_block_blobs = env_inner.open_db_ro::(&tx_ro)?; - Ok(BlockchainResponse::BlockByHash(get_block_by_hash( - &block_hash, - &table_block_heights, - &table_block_blobs, - )?)) + // Ok(BlockchainResponse::BlockByHash(get_block_by_hash( + // &block_hash, + // &table_block_heights, + // &table_block_blobs, + // )?)) } /// [`BlockchainReadRequest::TopBlock`]. #[inline] fn top_block(env: &ConcreteEnv) -> ResponseResult { - // Single-threaded, no `ThreadLocal` required. - let env_inner = env.env_inner(); - let tx_ro = env_inner.tx_ro()?; - let table_block_heights = env_inner.open_db_ro::(&tx_ro)?; - let table_block_blobs = env_inner.open_db_ro::(&tx_ro)?; + Ok(todo!()) + // // Single-threaded, no `ThreadLocal` required. + // let env_inner = env.env_inner(); + // let tx_ro = env_inner.tx_ro()?; + // let table_block_heights = env_inner.open_db_ro::(&tx_ro)?; + // let table_block_blobs = env_inner.open_db_ro::(&tx_ro)?; - Ok(BlockchainResponse::TopBlock(get_top_block( - &table_block_heights, - &table_block_blobs, - )?)) + // Ok(BlockchainResponse::TopBlock(get_top_block( + // &table_block_heights, + // &table_block_blobs, + // )?)) } /// [`BlockchainReadRequest::BlockExtendedHeader`]. @@ -307,12 +317,41 @@ fn block_hash(env: &ConcreteEnv, block_height: BlockHeight, chain: Chain) -> Res let block_hash = match chain { Chain::Main => get_block_info(&block_height, &table_block_infos)?.block_hash, - Chain::Alt(_) => todo!("Add alt blocks to DB"), + Chain::Alt(chain) => { + get_alt_block_hash(&block_height, chain, &env_inner.open_tables(&tx_ro)?)? + } }; Ok(BlockchainResponse::BlockHash(block_hash)) } +/// [`BlockchainReadRequest::FindBlock`] +fn find_block(env: &ConcreteEnv, block_hash: BlockHash) -> ResponseResult { + // Single-threaded, no `ThreadLocal` required. + let env_inner = env.env_inner(); + let tx_ro = env_inner.tx_ro()?; + + let table_block_heights = env_inner.open_db_ro::(&tx_ro)?; + + // Check the main chain first. + match table_block_heights.get(&block_hash) { + Ok(height) => return Ok(BlockchainResponse::FindBlock(Some((Chain::Main, height)))), + Err(RuntimeError::KeyNotFound) => (), + Err(e) => return Err(e), + } + + let table_alt_block_heights = env_inner.open_db_ro::(&tx_ro)?; + + match table_alt_block_heights.get(&block_hash) { + Ok(height) => Ok(BlockchainResponse::FindBlock(Some(( + Chain::Alt(height.chain_id.into()), + height.height, + )))), + Err(RuntimeError::KeyNotFound) => Ok(BlockchainResponse::FindBlock(None)), + Err(e) => Err(e), + } +} + /// [`BlockchainReadRequest::FilterUnknownHashes`]. #[inline] fn filter_unknown_hashes(env: &ConcreteEnv, mut hashes: HashSet) -> ResponseResult { @@ -363,7 +402,37 @@ fn block_extended_header_in_range( get_block_extended_header_from_height(&block_height, tables) }) .collect::, RuntimeError>>()?, - Chain::Alt(_) => todo!("Add alt blocks to DB"), + Chain::Alt(chain_id) => { + let ranges = { + let tx_ro = tx_ro.get_or_try(|| env_inner.tx_ro())?; + let tables = get_tables!(env_inner, tx_ro, tables)?.as_ref(); + let alt_chains = tables.alt_chain_infos(); + + get_alt_chain_history_ranges(range, chain_id, alt_chains)? + }; + + ranges + .par_iter() + .rev() + .flat_map(|(chain, range)| { + range.clone().into_par_iter().map(|height| { + let tx_ro = tx_ro.get_or_try(|| env_inner.tx_ro())?; + let tables = get_tables!(env_inner, tx_ro, tables)?.as_ref(); + + match *chain { + Chain::Main => get_block_extended_header_from_height(&height, tables), + Chain::Alt(chain_id) => get_alt_block_extended_header_from_height( + &AltBlockHeight { + chain_id: chain_id.into(), + height, + }, + tables, + ), + } + }) + }) + .collect::, _>>()? + } }; Ok(BlockchainResponse::BlockExtendedHeaderInRange(vec)) @@ -448,8 +517,10 @@ fn number_outputs_with_amount(env: &ConcreteEnv, amounts: Vec) -> Respon let tables = thread_local(env); // Cache the amount of RCT outputs once. - // INVARIANT: #[cfg] @ lib.rs asserts `usize == u64` - #[allow(clippy::cast_possible_truncation)] + #[expect( + clippy::cast_possible_truncation, + reason = "INVARIANT: #[cfg] @ lib.rs asserts `usize == u64`" + )] let num_rct_outputs = { let tx_ro = env_inner.tx_ro()?; let tables = env_inner.open_tables(&tx_ro)?; @@ -469,8 +540,10 @@ fn number_outputs_with_amount(env: &ConcreteEnv, amounts: Vec) -> Respon } else { // v1 transactions. match tables.num_outputs().get(&amount) { - // INVARIANT: #[cfg] @ lib.rs asserts `usize == u64` - #[allow(clippy::cast_possible_truncation)] + #[expect( + clippy::cast_possible_truncation, + reason = "INVARIANT: #[cfg] @ lib.rs asserts `usize == u64`" + )] Ok(count) => Ok((amount, count as usize)), // If we get a request for an `amount` that doesn't exist, // we return `0` instead of an error. @@ -487,16 +560,18 @@ fn number_outputs_with_amount(env: &ConcreteEnv, amounts: Vec) -> Respon /// [`BlockchainReadRequest::KeyImageSpent`]. #[inline] fn key_image_spent(env: &ConcreteEnv, key_image: KeyImage) -> ResponseResult { - // Single-threaded, no `ThreadLocal` required. - let env_inner = env.env_inner(); - let tx_ro = env_inner.tx_ro()?; - let table_key_images = env_inner.open_db_ro::(&tx_ro)?; + Ok(todo!()) - match key_image_exists(&key_image, &table_key_images) { - Ok(false) => Ok(BlockchainResponse::KeyImagesSpent(false)), // Key image was NOT found. - Ok(true) => Ok(BlockchainResponse::KeyImagesSpent(true)), // Key image was found. - Err(e) => Err(e), // A database error occurred. - } + // // Single-threaded, no `ThreadLocal` required. + // let env_inner = env.env_inner(); + // let tx_ro = env_inner.tx_ro()?; + // let table_key_images = env_inner.open_db_ro::(&tx_ro)?; + + // match key_image_exists(&key_image, &table_key_images) { + // Ok(false) => Ok(BlockchainResponse::KeyImagesSpent(false)), // Key image was NOT found. + // Ok(true) => Ok(BlockchainResponse::KeyImagesSpent(true)), // Key image was found. + // Err(e) => Err(e), // A database error occurred. + // } } /// [`BlockchainReadRequest::KeyImagesSpent`]. @@ -625,3 +700,45 @@ fn cumulative_block_weight_limit(env: &ConcreteEnv) -> ResponseResult { Ok(BlockchainResponse::CumulativeBlockWeightLimit(limit)) } + +/// [`BlockchainReadRequest::AltBlocksInChain`] +fn alt_blocks_in_chain(env: &ConcreteEnv, chain_id: ChainId) -> ResponseResult { + // Prepare tx/tables in `ThreadLocal`. + let env_inner = env.env_inner(); + let tx_ro = thread_local(env); + let tables = thread_local(env); + + // Get the history of this alt-chain. + let history = { + let tx_ro = tx_ro.get_or_try(|| env_inner.tx_ro())?; + let tables = get_tables!(env_inner, tx_ro, tables)?.as_ref(); + get_alt_chain_history_ranges(0..usize::MAX, chain_id, tables.alt_chain_infos())? + }; + + // Get all the blocks until we join the main-chain. + let blocks = history + .par_iter() + .rev() + .skip(1) + .flat_map(|(chain_id, range)| { + let Chain::Alt(chain_id) = chain_id else { + panic!("Should not have main chain blocks here we skipped last range"); + }; + + range.clone().into_par_iter().map(|height| { + let tx_ro = tx_ro.get_or_try(|| env_inner.tx_ro())?; + let tables = get_tables!(env_inner, tx_ro, tables)?.as_ref(); + + get_alt_block( + &AltBlockHeight { + chain_id: (*chain_id).into(), + height, + }, + tables, + ) + }) + }) + .collect::>()?; + + Ok(BlockchainResponse::AltBlocksInChain(blocks)) +} diff --git a/storage/blockchain/src/service/tests.rs b/storage/blockchain/src/service/tests.rs index 8e165f2..3ba4638 100644 --- a/storage/blockchain/src/service/tests.rs +++ b/storage/blockchain/src/service/tests.rs @@ -13,13 +13,14 @@ use std::{ }; use pretty_assertions::assert_eq; +use rand::Rng; use tower::{Service, ServiceExt}; use cuprate_database::{ConcreteEnv, DatabaseIter, DatabaseRo, Env, EnvInner, RuntimeError}; use cuprate_test_utils::data::{BLOCK_V16_TX0, BLOCK_V1_TX2, BLOCK_V9_TX3}; use cuprate_types::{ blockchain::{BlockchainReadRequest, BlockchainResponse, BlockchainWriteRequest}, - Chain, OutputOnChain, VerifiedBlockInformation, + Chain, ChainId, OutputOnChain, VerifiedBlockInformation, }; use crate::{ @@ -31,7 +32,7 @@ use crate::{ }, service::{init, BlockchainReadHandle, BlockchainWriteHandle}, tables::{OpenTables, Tables, TablesIter}, - tests::AssertTableLen, + tests::{map_verified_block_to_alt, AssertTableLen}, types::{Amount, AmountIndex, PreRctOutputId}, }; @@ -58,7 +59,10 @@ fn init_service() -> ( /// - Receive response(s) /// - Assert proper tables were mutated /// - Assert read requests lead to expected responses -#[allow(clippy::future_not_send)] // INVARIANT: tests are using a single threaded runtime +#[expect( + clippy::future_not_send, + reason = "INVARIANT: tests are using a single threaded runtime" +)] async fn test_template( // Which block(s) to add? blocks: &[&VerifiedBlockInformation], @@ -84,7 +88,7 @@ async fn test_template( let request = BlockchainWriteRequest::WriteBlock(block); let response_channel = writer.call(request); let response = response_channel.await.unwrap(); - assert_eq!(response, BlockchainResponse::WriteBlock); + assert_eq!(response, BlockchainResponse::Ok); } //----------------------------------------------------------------------- Reset the transaction @@ -164,8 +168,10 @@ async fn test_template( num_req .iter() .map(|amount| match tables.num_outputs().get(amount) { - // INVARIANT: #[cfg] @ lib.rs asserts `usize == u64` - #[allow(clippy::cast_possible_truncation)] + #[expect( + clippy::cast_possible_truncation, + reason = "INVARIANT: #[cfg] @ lib.rs asserts `usize == u64`" + )] Ok(count) => (*amount, count as usize), Err(RuntimeError::KeyNotFound) => (*amount, 0), Err(e) => panic!("{e:?}"), @@ -235,42 +241,38 @@ async fn test_template( //----------------------------------------------------------------------- Output checks // Create the map of amounts and amount indices. - // - // FIXME: There's definitely a better way to map - // `Vec` -> `HashMap>` let (map, output_count) = { - let mut ids = tables - .outputs_iter() - .keys() - .unwrap() - .map(Result::unwrap) - .collect::>(); - - ids.extend( - tables - .rct_outputs_iter() - .keys() - .unwrap() - .map(Result::unwrap) - .map(|amount_index| PreRctOutputId { - amount: 0, - amount_index, - }), - ); + let mut map = HashMap::>::new(); // Used later to compare the amount of Outputs // returned in the Response is equal to the amount // we asked for. - let output_count = ids.len(); + let mut output_count: usize = 0; - let mut map = HashMap::>::new(); - for id in ids { - map.entry(id.amount) - .and_modify(|set| { - set.insert(id.amount_index); - }) - .or_insert_with(|| HashSet::from([id.amount_index])); - } + tables + .outputs_iter() + .keys() + .unwrap() + .map(Result::unwrap) + .chain( + tables + .rct_outputs_iter() + .keys() + .unwrap() + .map(Result::unwrap) + .map(|amount_index| PreRctOutputId { + amount: 0, + amount_index, + }), + ) + .for_each(|id| { + output_count += 1; + map.entry(id.amount) + .and_modify(|set| { + set.insert(id.amount_index); + }) + .or_insert_with(|| HashSet::from([id.amount_index])); + }); (map, output_count) }; @@ -304,7 +306,10 @@ async fn test_template( // Assert we get back the same map of // `Amount`'s and `AmountIndex`'s. let mut response_output_count = 0; - #[allow(clippy::iter_over_hash_type)] // order doesn't matter in this test + #[expect( + clippy::iter_over_hash_type, + reason = "order doesn't matter in this test" + )] for (amount, output_map) in response { let amount_index_set = &map[&amount]; @@ -338,7 +343,8 @@ async fn v1_tx2() { 14_535_350_982_449, AssertTableLen { block_infos: 1, - block_blobs: 1, + block_header_blobs: 1, + block_txs_hashes: 1, block_heights: 1, key_images: 65, num_outputs: 41, @@ -364,7 +370,8 @@ async fn v9_tx3() { 3_403_774_022_163, AssertTableLen { block_infos: 1, - block_blobs: 1, + block_header_blobs: 1, + block_txs_hashes: 1, block_heights: 1, key_images: 4, num_outputs: 0, @@ -390,7 +397,8 @@ async fn v16_tx0() { 600_000_000_000, AssertTableLen { block_infos: 1, - block_blobs: 1, + block_header_blobs: 1, + block_txs_hashes: 1, block_heights: 1, key_images: 0, num_outputs: 0, @@ -407,3 +415,92 @@ async fn v16_tx0() { ) .await; } + +/// Tests the alt-chain requests and responses. +#[tokio::test] +async fn alt_chain_requests() { + let (reader, mut writer, _, _tempdir) = init_service(); + + // Set up the test by adding blocks to the main-chain. + for (i, mut block) in [BLOCK_V9_TX3.clone(), BLOCK_V16_TX0.clone()] + .into_iter() + .enumerate() + { + block.height = i; + + let request = BlockchainWriteRequest::WriteBlock(block); + writer.call(request).await.unwrap(); + } + + // Generate the alt-blocks. + let mut prev_hash = BLOCK_V9_TX3.block_hash; + let mut chain_id = 1; + let alt_blocks = [&BLOCK_V16_TX0, &BLOCK_V9_TX3, &BLOCK_V1_TX2] + .into_iter() + .enumerate() + .map(|(i, block)| { + let mut block = (**block).clone(); + block.height = i + 1; + block.block.header.previous = prev_hash; + block.block_blob = block.block.serialize(); + + prev_hash = block.block_hash; + // Randomly either keep the [`ChainId`] the same or change it to a new value. + chain_id += rand::thread_rng().gen_range(0..=1); + + map_verified_block_to_alt(block, ChainId(chain_id.try_into().unwrap())) + }) + .collect::>(); + + for block in &alt_blocks { + // Request a block to be written, assert it was written. + let request = BlockchainWriteRequest::WriteAltBlock(block.clone()); + let response_channel = writer.call(request); + let response = response_channel.await.unwrap(); + assert_eq!(response, BlockchainResponse::Ok); + } + + // Get the full alt-chain + let request = BlockchainReadRequest::AltBlocksInChain(ChainId(chain_id.try_into().unwrap())); + let response = reader.clone().oneshot(request).await.unwrap(); + + let BlockchainResponse::AltBlocksInChain(blocks) = response else { + panic!("Wrong response type was returned"); + }; + + assert_eq!(blocks.len(), alt_blocks.len()); + for (got_block, alt_block) in blocks.into_iter().zip(alt_blocks) { + assert_eq!(got_block.block_blob, alt_block.block_blob); + assert_eq!(got_block.block_hash, alt_block.block_hash); + assert_eq!(got_block.chain_id, alt_block.chain_id); + assert_eq!(got_block.txs, alt_block.txs); + } + + // Flush all alt blocks. + let request = BlockchainWriteRequest::FlushAltBlocks; + let response = writer.ready().await.unwrap().call(request).await.unwrap(); + assert_eq!(response, BlockchainResponse::Ok); + + // Pop blocks from the main chain + let request = BlockchainWriteRequest::PopBlocks(1); + let response = writer.ready().await.unwrap().call(request).await.unwrap(); + + let BlockchainResponse::PopBlocks(_, old_main_chain_id) = response else { + panic!("Wrong response type was returned"); + }; + + // Check we have popped the top block. + let request = BlockchainReadRequest::ChainHeight; + let response = reader.clone().oneshot(request).await.unwrap(); + assert!(matches!(response, BlockchainResponse::ChainHeight(1, _))); + + // Attempt to add the popped block back. + let request = BlockchainWriteRequest::ReverseReorg(old_main_chain_id); + let response = writer.ready().await.unwrap().call(request).await.unwrap(); + assert_eq!(response, BlockchainResponse::Ok); + + // Check we have the popped block back. + let request = BlockchainReadRequest::ChainHeight; + let response = reader.clone().oneshot(request).await.unwrap(); + assert!(matches!(response, BlockchainResponse::ChainHeight(2, _))); +} diff --git a/storage/blockchain/src/service/write.rs b/storage/blockchain/src/service/write.rs index 91292aa..1b06a66 100644 --- a/storage/blockchain/src/service/write.rs +++ b/storage/blockchain/src/service/write.rs @@ -1,21 +1,31 @@ //! Database writer thread definitions and logic. - //---------------------------------------------------------------------------------------------------- Import use std::sync::Arc; -use cuprate_database::{ConcreteEnv, Env, EnvInner, RuntimeError, TxRw}; +use cuprate_database::{ConcreteEnv, DatabaseRo, Env, EnvInner, RuntimeError, TxRw}; use cuprate_database_service::DatabaseWriteHandle; use cuprate_types::{ blockchain::{BlockchainResponse, BlockchainWriteRequest}, - VerifiedBlockInformation, + AltBlockInformation, Chain, ChainId, VerifiedBlockInformation, }; use crate::{ - ops, - service::types::{BlockchainWriteHandle, ResponseResult}, - tables::OpenTables, + ops::{alt_block, block, blockchain}, + service::{ + free::map_valid_alt_block_to_verified_block, + types::{BlockchainWriteHandle, ResponseResult}, + }, + tables::{OpenTables, Tables}, + types::AltBlockHeight, }; +/// Write functions within this module abort if the write transaction +/// could not be aborted successfully to maintain atomicity. +/// +/// This is the panic message if the `abort()` fails. +const TX_RW_ABORT_FAIL: &str = + "Could not maintain blockchain database atomicity by aborting write transaction"; + //---------------------------------------------------------------------------------------------------- init_write_service /// Initialize the blockchain write service from a [`ConcreteEnv`]. pub fn init_write_service(env: Arc) -> BlockchainWriteHandle { @@ -30,7 +40,12 @@ fn handle_blockchain_request( ) -> Result { match req { BlockchainWriteRequest::WriteBlock(block) => write_block(env, block), - BlockchainWriteRequest::PopBlocks(nblocks) => pop_blocks(env, *nblocks), + BlockchainWriteRequest::WriteAltBlock(alt_block) => write_alt_block(env, alt_block), + BlockchainWriteRequest::PopBlocks(numb_blocks) => pop_blocks(env, *numb_blocks), + BlockchainWriteRequest::ReverseReorg(old_main_chain_id) => { + reverse_reorg(env, *old_main_chain_id) + } + BlockchainWriteRequest::FlushAltBlocks => flush_alt_blocks(env), } } @@ -51,51 +66,145 @@ fn write_block(env: &ConcreteEnv, block: &VerifiedBlockInformation) -> ResponseR let result = { let mut tables_mut = env_inner.open_tables_mut(&tx_rw)?; - ops::block::add_block(block, &mut tables_mut) + block::add_block(block, &mut tables_mut) }; match result { Ok(()) => { TxRw::commit(tx_rw)?; - Ok(BlockchainResponse::WriteBlock) + Ok(BlockchainResponse::Ok) } Err(e) => { - // INVARIANT: ensure database atomicity by aborting - // the transaction on `add_block()` failures. - TxRw::abort(tx_rw) - .expect("could not maintain database atomicity by aborting write transaction"); + TxRw::abort(tx_rw).expect(TX_RW_ABORT_FAIL); + Err(e) + } + } +} + +/// [`BlockchainWriteRequest::WriteAltBlock`]. +#[inline] +fn write_alt_block(env: &ConcreteEnv, block: &AltBlockInformation) -> ResponseResult { + let env_inner = env.env_inner(); + let tx_rw = env_inner.tx_rw()?; + + let result = { + let mut tables_mut = env_inner.open_tables_mut(&tx_rw)?; + alt_block::add_alt_block(block, &mut tables_mut) + }; + + match result { + Ok(()) => { + TxRw::commit(tx_rw)?; + Ok(BlockchainResponse::Ok) + } + Err(e) => { + TxRw::abort(tx_rw).expect(TX_RW_ABORT_FAIL); Err(e) } } } /// [`BlockchainWriteRequest::PopBlocks`]. -#[inline] -fn pop_blocks(env: &ConcreteEnv, nblocks: u64) -> ResponseResult { +fn pop_blocks(env: &ConcreteEnv, numb_blocks: usize) -> ResponseResult { let env_inner = env.env_inner(); - let tx_rw = env_inner.tx_rw()?; + let mut tx_rw = env_inner.tx_rw()?; + + // FIXME: turn this function into a try block once stable. + let mut result = || { + // flush all the current alt blocks as they may reference blocks to be popped. + alt_block::flush_alt_blocks(&env_inner, &mut tx_rw)?; - let result = || { let mut tables_mut = env_inner.open_tables_mut(&tx_rw)?; - let mut height = 0; + // generate a `ChainId` for the popped blocks. + let old_main_chain_id = ChainId(rand::random()); - for _ in 0..nblocks { - (height, _, _) = ops::block::pop_block(&mut tables_mut)?; + // pop the blocks + for _ in 0..numb_blocks { + block::pop_block(Some(old_main_chain_id), &mut tables_mut)?; } - Ok(height) + Ok(old_main_chain_id) }; match result() { - Ok(height) => { + Ok(old_main_chain_id) => { TxRw::commit(tx_rw)?; - Ok(BlockchainResponse::PopBlocks(height)) + Ok(BlockchainResponse::PopBlocks(todo!(), old_main_chain_id)) } Err(e) => { - // INVARIANT: ensure database atomicity by aborting - // the transaction on `add_block()` failures. - TxRw::abort(tx_rw) - .expect("could not maintain database atomicity by aborting write transaction"); + TxRw::abort(tx_rw).expect(TX_RW_ABORT_FAIL); + Err(e) + } + } +} + +/// [`BlockchainWriteRequest::ReverseReorg`]. +fn reverse_reorg(env: &ConcreteEnv, chain_id: ChainId) -> ResponseResult { + let env_inner = env.env_inner(); + let mut tx_rw = env_inner.tx_rw()?; + + // FIXME: turn this function into a try block once stable. + let mut result = || { + let mut tables_mut = env_inner.open_tables_mut(&tx_rw)?; + + let chain_info = tables_mut.alt_chain_infos().get(&chain_id.into())?; + // Although this doesn't guarantee the chain was popped from the main-chain, it's an easy + // thing for us to check. + assert_eq!(Chain::from(chain_info.parent_chain), Chain::Main); + + let top_block_height = blockchain::top_block_height(tables_mut.block_heights())?; + + // pop any blocks that were added as part of a re-org. + for _ in chain_info.common_ancestor_height..top_block_height { + block::pop_block(None, &mut tables_mut)?; + } + + // Add the old main chain blocks back to the main chain. + for height in (chain_info.common_ancestor_height + 1)..chain_info.chain_height { + let alt_block = alt_block::get_alt_block( + &AltBlockHeight { + chain_id: chain_id.into(), + height, + }, + &tables_mut, + )?; + let verified_block = map_valid_alt_block_to_verified_block(alt_block); + block::add_block(&verified_block, &mut tables_mut)?; + } + + drop(tables_mut); + alt_block::flush_alt_blocks(&env_inner, &mut tx_rw)?; + + Ok(()) + }; + + match result() { + Ok(()) => { + TxRw::commit(tx_rw)?; + Ok(BlockchainResponse::Ok) + } + Err(e) => { + TxRw::abort(tx_rw).expect(TX_RW_ABORT_FAIL); + Err(e) + } + } +} + +/// [`BlockchainWriteRequest::FlushAltBlocks`]. +#[inline] +fn flush_alt_blocks(env: &ConcreteEnv) -> ResponseResult { + let env_inner = env.env_inner(); + let mut tx_rw = env_inner.tx_rw()?; + + let result = alt_block::flush_alt_blocks(&env_inner, &mut tx_rw); + + match result { + Ok(()) => { + TxRw::commit(tx_rw)?; + Ok(BlockchainResponse::Ok) + } + Err(e) => { + TxRw::abort(tx_rw).expect(TX_RW_ABORT_FAIL); Err(e) } } diff --git a/storage/blockchain/src/tables.rs b/storage/blockchain/src/tables.rs index 122ac31..b9fc5ed 100644 --- a/storage/blockchain/src/tables.rs +++ b/storage/blockchain/src/tables.rs @@ -9,7 +9,7 @@ //! Table structs are `CamelCase`, and their static string //! names used by the actual database backend are `snake_case`. //! -//! For example: [`BlockBlobs`] -> `block_blobs`. +//! For example: [`BlockHeaderBlobs`] -> `block_header_blobs`. //! //! # Traits //! This module also contains a set of traits for @@ -17,9 +17,10 @@ //---------------------------------------------------------------------------------------------------- Import use crate::types::{ - Amount, AmountIndex, AmountIndices, BlockBlob, BlockHash, BlockHeight, BlockInfo, KeyImage, - Output, PreRctOutputId, PrunableBlob, PrunableHash, PrunedBlob, RctOutput, TxBlob, TxHash, - TxId, UnlockTime, + AltBlockHeight, AltChainInfo, AltTransactionInfo, Amount, AmountIndex, AmountIndices, + BlockBlob, BlockHash, BlockHeaderBlob, BlockHeight, BlockInfo, BlockTxHashes, + CompactAltBlockInfo, KeyImage, Output, PreRctOutputId, PrunableBlob, PrunableHash, PrunedBlob, + RawChainId, RctOutput, TxBlob, TxHash, TxId, UnlockTime, }; //---------------------------------------------------------------------------------------------------- Tables @@ -29,22 +30,28 @@ use crate::types::{ // - If adding/changing a table also edit: // - the tests in `src/backend/tests.rs` cuprate_database::define_tables! { - /// Serialized block blobs (bytes). + /// Serialized block header blobs (bytes). /// - /// Contains the serialized version of all blocks. - 0 => BlockBlobs, - BlockHeight => BlockBlob, + /// Contains the serialized version of all blocks headers. + 0 => BlockHeaderBlobs, + BlockHeight => BlockHeaderBlob, + + /// Block transactions hashes + /// + /// Contains all the transaction hashes of all blocks. + 1 => BlockTxsHashes, + BlockHeight => BlockTxHashes, /// Block heights. /// /// Contains the height of all blocks. - 1 => BlockHeights, + 2 => BlockHeights, BlockHash => BlockHeight, /// Block information. /// /// Contains metadata of all blocks. - 2 => BlockInfos, + 3 => BlockInfos, BlockHeight => BlockInfo, /// Set of key images. @@ -53,38 +60,38 @@ cuprate_database::define_tables! { /// /// This table has `()` as the value type, as in, /// it is a set of key images. - 3 => KeyImages, + 4 => KeyImages, KeyImage => (), /// Maps an output's amount to the number of outputs with that amount. /// /// For example, if there are 5 outputs with `amount = 123` /// then calling `get(123)` on this table will return 5. - 4 => NumOutputs, + 5 => NumOutputs, Amount => u64, /// Pre-RCT output data. - 5 => Outputs, + 6 => Outputs, PreRctOutputId => Output, /// Pruned transaction blobs (bytes). /// /// Contains the pruned portion of serialized transaction data. - 6 => PrunedTxBlobs, + 7 => PrunedTxBlobs, TxId => PrunedBlob, /// Prunable transaction blobs (bytes). /// /// Contains the prunable portion of serialized transaction data. // SOMEDAY: impl when `monero-serai` supports pruning - 7 => PrunableTxBlobs, + 8 => PrunableTxBlobs, TxId => PrunableBlob, /// Prunable transaction hashes. /// /// Contains the prunable portion of transaction hashes. // SOMEDAY: impl when `monero-serai` supports pruning - 8 => PrunableHashes, + 9 => PrunableHashes, TxId => PrunableHash, // SOMEDAY: impl a properties table: @@ -94,41 +101,75 @@ cuprate_database::define_tables! { // StorableString => StorableVec, /// RCT output data. - 9 => RctOutputs, + 10 => RctOutputs, AmountIndex => RctOutput, /// Transaction blobs (bytes). /// /// Contains the serialized version of all transactions. // SOMEDAY: remove when `monero-serai` supports pruning - 10 => TxBlobs, + 11 => TxBlobs, TxId => TxBlob, /// Transaction indices. /// /// Contains the indices all transactions. - 11 => TxIds, + 12 => TxIds, TxHash => TxId, /// Transaction heights. /// /// Contains the block height associated with all transactions. - 12 => TxHeights, + 13 => TxHeights, TxId => BlockHeight, /// Transaction outputs. /// /// Contains the list of `AmountIndex`'s of the /// outputs associated with all transactions. - 13 => TxOutputs, + 14 => TxOutputs, TxId => AmountIndices, /// Transaction unlock time. /// /// Contains the unlock time of transactions IF they have one. /// Transactions without unlock times will not exist in this table. - 14 => TxUnlockTime, + 15 => TxUnlockTime, TxId => UnlockTime, + + /// Information on alt-chains. + 16 => AltChainInfos, + RawChainId => AltChainInfo, + + /// Alt-block heights. + /// + /// Contains the height of all alt-blocks. + 17 => AltBlockHeights, + BlockHash => AltBlockHeight, + + /// Alt-block information. + /// + /// Contains information on all alt-blocks. + 18 => AltBlocksInfo, + AltBlockHeight => CompactAltBlockInfo, + + /// Alt-block blobs. + /// + /// Contains the raw bytes of all alt-blocks. + 19 => AltBlockBlobs, + AltBlockHeight => BlockBlob, + + /// Alt-block transaction blobs. + /// + /// Contains the raw bytes of alt transactions, if those transactions are not in the main-chain. + 20 => AltTransactionBlobs, + TxHash => TxBlob, + + /// Alt-block transaction information. + /// + /// Contains information on all alt transactions, even if they are in the main-chain. + 21 => AltTransactionInfos, + TxHash => AltTransactionInfo, } //---------------------------------------------------------------------------------------------------- Tests diff --git a/storage/blockchain/src/tests.rs b/storage/blockchain/src/tests.rs index 65527e1..1fe2063 100644 --- a/storage/blockchain/src/tests.rs +++ b/storage/blockchain/src/tests.rs @@ -9,7 +9,8 @@ use std::{borrow::Cow, fmt::Debug}; use pretty_assertions::assert_eq; -use cuprate_database::{ConcreteEnv, DatabaseRo, Env, EnvInner}; +use cuprate_database::{DatabaseRo, Env, EnvInner}; +use cuprate_types::{AltBlockInformation, ChainId, VerifiedBlockInformation}; use crate::{ config::ConfigBuilder, @@ -25,7 +26,8 @@ use crate::{ #[derive(Copy, Clone, Debug, Default, PartialEq, Eq, PartialOrd, Ord, Hash)] pub(crate) struct AssertTableLen { pub(crate) block_infos: u64, - pub(crate) block_blobs: u64, + pub(crate) block_header_blobs: u64, + pub(crate) block_txs_hashes: u64, pub(crate) block_heights: u64, pub(crate) key_images: u64, pub(crate) num_outputs: u64, @@ -45,7 +47,8 @@ impl AssertTableLen { pub(crate) fn assert(self, tables: &impl Tables) { let other = Self { block_infos: tables.block_infos().len().unwrap(), - block_blobs: tables.block_blobs().len().unwrap(), + block_header_blobs: tables.block_header_blobs().len().unwrap(), + block_txs_hashes: tables.block_txs_hashes().len().unwrap(), block_heights: tables.block_heights().len().unwrap(), key_images: tables.key_images().len().unwrap(), num_outputs: tables.num_outputs().len().unwrap(), @@ -68,8 +71,7 @@ impl AssertTableLen { /// Create an `Env` in a temporarily directory. /// The directory is automatically removed after the `TempDir` is dropped. /// -/// FIXME: changing this to `-> impl Env` causes lifetime errors... -pub(crate) fn tmp_concrete_env() -> (ConcreteEnv, tempfile::TempDir) { +pub(crate) fn tmp_concrete_env() -> (impl Env, tempfile::TempDir) { let tempdir = tempfile::tempdir().unwrap(); let config = ConfigBuilder::new() .db_directory(Cow::Owned(tempdir.path().into())) @@ -81,10 +83,28 @@ pub(crate) fn tmp_concrete_env() -> (ConcreteEnv, tempfile::TempDir) { } /// Assert all the tables in the environment are empty. -pub(crate) fn assert_all_tables_are_empty(env: &ConcreteEnv) { +pub(crate) fn assert_all_tables_are_empty(env: &impl Env) { let env_inner = env.env_inner(); let tx_ro = env_inner.tx_ro().unwrap(); let tables = env_inner.open_tables(&tx_ro).unwrap(); assert!(tables.all_tables_empty().unwrap()); assert_eq!(crate::ops::tx::get_num_tx(tables.tx_ids()).unwrap(), 0); } + +pub(crate) fn map_verified_block_to_alt( + verified_block: VerifiedBlockInformation, + chain_id: ChainId, +) -> AltBlockInformation { + AltBlockInformation { + block: verified_block.block, + block_blob: verified_block.block_blob, + txs: verified_block.txs, + block_hash: verified_block.block_hash, + pow_hash: verified_block.pow_hash, + height: verified_block.height, + weight: verified_block.weight, + long_term_weight: verified_block.long_term_weight, + cumulative_difficulty: verified_block.cumulative_difficulty, + chain_id, + } +} diff --git a/storage/blockchain/src/types.rs b/storage/blockchain/src/types.rs index eb1dc64..86ef91c 100644 --- a/storage/blockchain/src/types.rs +++ b/storage/blockchain/src/types.rs @@ -41,12 +41,14 @@ #![forbid(unsafe_code)] // if you remove this line i will steal your monero //---------------------------------------------------------------------------------------------------- Import -use bytemuck::{Pod, Zeroable}; +use std::num::NonZero; +use bytemuck::{Pod, Zeroable}; #[cfg(feature = "serde")] use serde::{Deserialize, Serialize}; use cuprate_database::{Key, StorableVec}; +use cuprate_types::{Chain, ChainId}; //---------------------------------------------------------------------------------------------------- Aliases // These type aliases exist as many Monero-related types are the exact same. @@ -64,6 +66,12 @@ pub type AmountIndices = StorableVec; /// A serialized block. pub type BlockBlob = StorableVec; +/// A serialized block header +pub type BlockHeaderBlob = StorableVec; + +/// A block transaction hashes +pub type BlockTxHashes = StorableVec<[u8; 32]>; + /// A block's hash. pub type BlockHash = [u8; 32]; @@ -164,6 +172,7 @@ impl Key for PreRctOutputId {} /// block_hash: [54; 32], /// cumulative_rct_outs: 2389, /// long_term_weight: 2389, +/// mining_tx_index: 23 /// }; /// let b = Storable::as_bytes(&a); /// let c: BlockInfo = Storable::from_bytes(b); @@ -173,7 +182,7 @@ impl Key for PreRctOutputId {} /// # Size & Alignment /// ```rust /// # use cuprate_blockchain::types::*; -/// assert_eq!(size_of::(), 88); +/// assert_eq!(size_of::(), 96); /// assert_eq!(align_of::(), 8); /// ``` #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] @@ -187,7 +196,7 @@ pub struct BlockInfo { /// The adjusted block size, in bytes. /// /// See [`block_weight`](https://monero-book.cuprate.org/consensus_rules/blocks/weights.html#blocks-weight). - pub weight: u64, + pub weight: usize, /// Least-significant 64 bits of the 128-bit cumulative difficulty. pub cumulative_difficulty_low: u64, /// Most-significant 64 bits of the 128-bit cumulative difficulty. @@ -199,7 +208,9 @@ pub struct BlockInfo { /// The long term block weight, based on the median weight of the preceding `100_000` blocks. /// /// See [`long_term_weight`](https://monero-book.cuprate.org/consensus_rules/blocks/weights.html#long-term-block-weight). - pub long_term_weight: u64, + pub long_term_weight: usize, + /// [`TxId`] (u64) of the block coinbase transaction. + pub mining_tx_index: TxId, } //---------------------------------------------------------------------------------------------------- OutputFlags @@ -324,6 +335,259 @@ pub struct RctOutput { } // TODO: local_index? +//---------------------------------------------------------------------------------------------------- RawChain +/// [`Chain`] in a format which can be stored in the DB. +/// +/// Implements [`Into`] and [`From`] for [`Chain`]. +/// +/// ```rust +/// # use std::borrow::*; +/// # use cuprate_blockchain::{*, types::*}; +/// use cuprate_database::Storable; +/// use cuprate_types::Chain; +/// +/// // Assert Storable is correct. +/// let a: RawChain = Chain::Main.into(); +/// let b = Storable::as_bytes(&a); +/// let c: RawChain = Storable::from_bytes(b); +/// assert_eq!(a, c); +/// ``` +/// +/// # Size & Alignment +/// ```rust +/// # use cuprate_blockchain::types::*; +/// assert_eq!(size_of::(), 8); +/// assert_eq!(align_of::(), 8); +/// ``` +#[derive(Copy, Clone, Debug, PartialEq, PartialOrd, Eq, Ord, Hash, Pod, Zeroable)] +#[repr(transparent)] +pub struct RawChain(u64); + +impl From for RawChain { + fn from(value: Chain) -> Self { + match value { + Chain::Main => Self(0), + Chain::Alt(chain_id) => Self(chain_id.0.get()), + } + } +} + +impl From for Chain { + fn from(value: RawChain) -> Self { + NonZero::new(value.0).map_or(Self::Main, |id| Self::Alt(ChainId(id))) + } +} + +impl From for RawChain { + fn from(value: RawChainId) -> Self { + // A [`ChainID`] with an inner value of `0` is invalid. + assert_ne!(value.0, 0); + + Self(value.0) + } +} + +//---------------------------------------------------------------------------------------------------- RawChainId +/// [`ChainId`] in a format which can be stored in the DB. +/// +/// Implements [`Into`] and [`From`] for [`ChainId`]. +/// +/// ```rust +/// # use std::borrow::*; +/// # use cuprate_blockchain::{*, types::*}; +/// use cuprate_database::Storable; +/// use cuprate_types::ChainId; +/// +/// // Assert Storable is correct. +/// let a: RawChainId = ChainId(10.try_into().unwrap()).into(); +/// let b = Storable::as_bytes(&a); +/// let c: RawChainId = Storable::from_bytes(b); +/// assert_eq!(a, c); +/// ``` +/// +/// # Size & Alignment +/// ```rust +/// # use cuprate_blockchain::types::*; +/// assert_eq!(size_of::(), 8); +/// assert_eq!(align_of::(), 8); +/// ``` +#[derive(Copy, Clone, Debug, PartialEq, PartialOrd, Eq, Ord, Hash, Pod, Zeroable)] +#[repr(transparent)] +pub struct RawChainId(u64); + +impl From for RawChainId { + fn from(value: ChainId) -> Self { + Self(value.0.get()) + } +} + +impl From for ChainId { + fn from(value: RawChainId) -> Self { + Self(NonZero::new(value.0).expect("RawChainId cannot have a value of `0`")) + } +} + +impl Key for RawChainId {} + +//---------------------------------------------------------------------------------------------------- AltChainInfo +/// Information on an alternative chain. +/// +/// ```rust +/// # use std::borrow::*; +/// # use cuprate_blockchain::{*, types::*}; +/// use cuprate_database::Storable; +/// use cuprate_types::Chain; +/// +/// // Assert Storable is correct. +/// let a: AltChainInfo = AltChainInfo { +/// parent_chain: Chain::Main.into(), +/// common_ancestor_height: 0, +/// chain_height: 1, +/// }; +/// let b = Storable::as_bytes(&a); +/// let c: AltChainInfo = Storable::from_bytes(b); +/// assert_eq!(a, c); +/// ``` +/// +/// # Size & Alignment +/// ```rust +/// # use cuprate_blockchain::types::*; +/// assert_eq!(size_of::(), 24); +/// assert_eq!(align_of::(), 8); +/// ``` +#[derive(Copy, Clone, Debug, PartialEq, PartialOrd, Eq, Ord, Hash, Pod, Zeroable)] +#[repr(C)] +pub struct AltChainInfo { + /// The chain this alt chain forks from. + pub parent_chain: RawChain, + /// The height of the first block we share with the parent chain. + pub common_ancestor_height: usize, + /// The chain height of the blocks in this alt chain. + pub chain_height: usize, +} + +//---------------------------------------------------------------------------------------------------- AltBlockHeight +/// Represents the height of a block on an alt-chain. +/// +/// ```rust +/// # use std::borrow::*; +/// # use cuprate_blockchain::{*, types::*}; +/// use cuprate_database::Storable; +/// use cuprate_types::ChainId; +/// +/// // Assert Storable is correct. +/// let a: AltBlockHeight = AltBlockHeight { +/// chain_id: ChainId(1.try_into().unwrap()).into(), +/// height: 1, +/// }; +/// let b = Storable::as_bytes(&a); +/// let c: AltBlockHeight = Storable::from_bytes(b); +/// assert_eq!(a, c); +/// ``` +/// +/// # Size & Alignment +/// ```rust +/// # use cuprate_blockchain::types::*; +/// assert_eq!(size_of::(), 16); +/// assert_eq!(align_of::(), 8); +/// ``` +#[derive(Copy, Clone, Debug, PartialEq, PartialOrd, Eq, Ord, Hash, Pod, Zeroable)] +#[repr(C)] +pub struct AltBlockHeight { + /// The [`ChainId`] of the chain this alt block is on, in raw form. + pub chain_id: RawChainId, + /// The height of this alt-block. + pub height: usize, +} + +impl Key for AltBlockHeight {} + +//---------------------------------------------------------------------------------------------------- CompactAltBlockInfo +/// Represents information on an alt-chain. +/// +/// ```rust +/// # use std::borrow::*; +/// # use cuprate_blockchain::{*, types::*}; +/// use cuprate_database::Storable; +/// +/// // Assert Storable is correct. +/// let a: CompactAltBlockInfo = CompactAltBlockInfo { +/// block_hash: [1; 32], +/// pow_hash: [2; 32], +/// height: 10, +/// weight: 20, +/// long_term_weight: 30, +/// cumulative_difficulty_low: 40, +/// cumulative_difficulty_high: 50, +/// }; +/// +/// let b = Storable::as_bytes(&a); +/// let c: CompactAltBlockInfo = Storable::from_bytes(b); +/// assert_eq!(a, c); +/// ``` +/// +/// # Size & Alignment +/// ```rust +/// # use cuprate_blockchain::types::*; +/// assert_eq!(size_of::(), 104); +/// assert_eq!(align_of::(), 8); +/// ``` +#[derive(Copy, Clone, Debug, PartialEq, PartialOrd, Eq, Ord, Hash, Pod, Zeroable)] +#[repr(C)] +pub struct CompactAltBlockInfo { + /// The block's hash. + pub block_hash: [u8; 32], + /// The block's proof-of-work hash. + pub pow_hash: [u8; 32], + /// The block's height. + pub height: usize, + /// The adjusted block size, in bytes. + pub weight: usize, + /// The long term block weight, which is the weight factored in with previous block weights. + pub long_term_weight: usize, + /// The low 64 bits of the cumulative difficulty. + pub cumulative_difficulty_low: u64, + /// The high 64 bits of the cumulative difficulty. + pub cumulative_difficulty_high: u64, +} + +//---------------------------------------------------------------------------------------------------- AltTransactionInfo +/// Represents information on an alt transaction. +/// +/// ```rust +/// # use std::borrow::*; +/// # use cuprate_blockchain::{*, types::*}; +/// use cuprate_database::Storable; +/// +/// // Assert Storable is correct. +/// let a: AltTransactionInfo = AltTransactionInfo { +/// tx_weight: 1, +/// fee: 6, +/// tx_hash: [6; 32], +/// }; +/// +/// let b = Storable::as_bytes(&a); +/// let c: AltTransactionInfo = Storable::from_bytes(b); +/// assert_eq!(a, c); +/// ``` +/// +/// # Size & Alignment +/// ```rust +/// # use cuprate_blockchain::types::*; +/// assert_eq!(size_of::(), 48); +/// assert_eq!(align_of::(), 8); +/// ``` +#[derive(Copy, Clone, Debug, PartialEq, PartialOrd, Eq, Ord, Hash, Pod, Zeroable)] +#[repr(C)] +pub struct AltTransactionInfo { + /// The transaction's weight. + pub tx_weight: usize, + /// The transaction's total fees. + pub fee: u64, + /// The transaction's hash. + pub tx_hash: [u8; 32], +} + //---------------------------------------------------------------------------------------------------- Tests #[cfg(test)] mod test { diff --git a/storage/blockchain/src/unsafe_sendable.rs b/storage/blockchain/src/unsafe_sendable.rs index 9447293..76c7899 100644 --- a/storage/blockchain/src/unsafe_sendable.rs +++ b/storage/blockchain/src/unsafe_sendable.rs @@ -26,7 +26,7 @@ use bytemuck::TransparentWrapper; /// Notably, `heed`'s table type uses this inside `service`. pub(crate) struct UnsafeSendable(T); -#[allow(clippy::non_send_fields_in_send_ty)] +#[expect(clippy::non_send_fields_in_send_ty)] // SAFETY: Users ensure that their usage of this type is safe. unsafe impl Send for UnsafeSendable {} @@ -41,7 +41,7 @@ impl UnsafeSendable { } /// Extract the inner `T`. - #[allow(dead_code)] + #[expect(dead_code)] pub(crate) fn into_inner(self) -> T { self.0 } diff --git a/storage/database/src/backend/heed/env.rs b/storage/database/src/backend/heed/env.rs index 8c71e61..568379e 100644 --- a/storage/database/src/backend/heed/env.rs +++ b/storage/database/src/backend/heed/env.rs @@ -144,7 +144,7 @@ impl Env for ConcreteEnv { // (current disk size) + (a bit of leeway) // to account for empty databases where we // need to write same tables. - #[allow(clippy::cast_possible_truncation)] // only 64-bit targets + #[expect(clippy::cast_possible_truncation, reason = "only 64-bit targets")] let disk_size_bytes = match std::fs::File::open(&config.db_file) { Ok(file) => file.metadata()?.len() as usize, // The database file doesn't exist, 0 bytes. diff --git a/storage/database/src/backend/heed/error.rs b/storage/database/src/backend/heed/error.rs index bbaeaf0..fdeab70 100644 --- a/storage/database/src/backend/heed/error.rs +++ b/storage/database/src/backend/heed/error.rs @@ -57,7 +57,10 @@ impl From for crate::InitError { } //---------------------------------------------------------------------------------------------------- RuntimeError -#[allow(clippy::fallible_impl_from)] // We need to panic sometimes. +#[expect( + clippy::fallible_impl_from, + reason = "We need to panic sometimes for safety" +)] impl From for crate::RuntimeError { /// # Panics /// This will panic on unrecoverable errors for safety. diff --git a/storage/database/src/backend/tests.rs b/storage/database/src/backend/tests.rs index e219c42..0c0fe05 100644 --- a/storage/database/src/backend/tests.rs +++ b/storage/database/src/backend/tests.rs @@ -194,7 +194,7 @@ fn db_read_write() { // Insert keys. let mut key = KEY; - #[allow(clippy::explicit_counter_loop)] // we need the +1 side effect + #[expect(clippy::explicit_counter_loop, reason = "we need the +1 side effect")] for _ in 0..N { table.put(&key, &VALUE).unwrap(); key += 1; @@ -269,7 +269,7 @@ fn db_read_write() { assert_ne!(table.get(&KEY).unwrap(), NEW_VALUE); - #[allow(unused_assignments)] + #[expect(unused_assignments)] table .update(&KEY, |mut value| { value = NEW_VALUE; diff --git a/storage/database/src/config/mod.rs b/storage/database/src/config/mod.rs index c6ed0c0..7d65233 100644 --- a/storage/database/src/config/mod.rs +++ b/storage/database/src/config/mod.rs @@ -33,7 +33,7 @@ //! # Ok(()) } //! ``` -#[allow(clippy::module_inception)] +#[expect(clippy::module_inception)] mod config; pub use config::{Config, ConfigBuilder, READER_THREADS_DEFAULT}; diff --git a/storage/database/src/database.rs b/storage/database/src/database.rs index 4a45f7c..6fbb7aa 100644 --- a/storage/database/src/database.rs +++ b/storage/database/src/database.rs @@ -54,7 +54,7 @@ pub trait DatabaseIter { /// Get an [`Iterator`] that returns the `(key, value)` types for this database. #[doc = doc_iter!()] - #[allow(clippy::iter_not_returning_iterator)] + #[expect(clippy::iter_not_returning_iterator)] fn iter( &self, ) -> Result> + '_, RuntimeError>; diff --git a/storage/database/src/env.rs b/storage/database/src/env.rs index 8294443..1ae6aa1 100644 --- a/storage/database/src/env.rs +++ b/storage/database/src/env.rs @@ -122,7 +122,7 @@ pub trait Env: Sized { /// This function _must_ be re-implemented if [`Env::MANUAL_RESIZE`] is `true`. /// /// Otherwise, this function will panic with `unreachable!()`. - #[allow(unused_variables)] + #[expect(unused_variables)] fn resize_map(&self, resize_algorithm: Option) -> NonZeroUsize { unreachable!() } diff --git a/storage/database/src/resize.rs b/storage/database/src/resize.rs index 6ef9974..b217478 100644 --- a/storage/database/src/resize.rs +++ b/storage/database/src/resize.rs @@ -261,7 +261,7 @@ pub fn percent(current_size_bytes: usize, percent: f32) -> NonZeroUsize { let page_size = *PAGE_SIZE; // INVARIANT: Allow `f32` <-> `usize` casting, we handle all cases. - #[allow( + #[expect( clippy::cast_possible_truncation, clippy::cast_sign_loss, clippy::cast_precision_loss diff --git a/storage/service/src/reader_threads.rs b/storage/service/src/reader_threads.rs index 72f619a..a182e48 100644 --- a/storage/service/src/reader_threads.rs +++ b/storage/service/src/reader_threads.rs @@ -153,7 +153,7 @@ impl ReaderThreads { }, // We handle the casting loss. - #[allow( + #[expect( clippy::cast_precision_loss, clippy::cast_possible_truncation, clippy::cast_sign_loss diff --git a/storage/txpool/src/service/interface.rs b/storage/txpool/src/service/interface.rs index 1708939..c1883bf 100644 --- a/storage/txpool/src/service/interface.rs +++ b/storage/txpool/src/service/interface.rs @@ -18,8 +18,14 @@ pub enum TxpoolReadRequest { //---------------------------------------------------------------------------------------------------- TxpoolReadResponse /// The transaction pool [`tower::Service`] read response type. +<<<<<<< HEAD #[allow(clippy::large_enum_variant)] #[derive(Debug)] +||||||| 0162553 +#[allow(clippy::large_enum_variant)] +======= +#[expect(clippy::large_enum_variant)] +>>>>>>> main pub enum TxpoolReadResponse { /// A response containing the raw bytes of a transaction. // TODO: use bytes::Bytes. diff --git a/storage/txpool/src/service/read.rs b/storage/txpool/src/service/read.rs index 5654164..f006813 100644 --- a/storage/txpool/src/service/read.rs +++ b/storage/txpool/src/service/read.rs @@ -50,7 +50,7 @@ fn init_read_service_with_pool(env: Arc, pool: Arc) -> /// 1. `Request` is mapped to a handler function /// 2. Handler function is called /// 3. [`TxpoolReadResponse`] is returned -#[allow(clippy::needless_pass_by_value)] +#[expect(clippy::needless_pass_by_value)] fn map_request( env: &ConcreteEnv, // Access to the database request: TxpoolReadRequest, // The request we must fulfill diff --git a/storage/txpool/src/types.rs b/storage/txpool/src/types.rs index 09b0ce0..4da2d0f 100644 --- a/storage/txpool/src/types.rs +++ b/storage/txpool/src/types.rs @@ -39,7 +39,7 @@ pub struct TransactionInfo { pub weight: usize, /// [`TxStateFlags`] of this transaction. pub flags: TxStateFlags, - #[allow(clippy::pub_underscore_fields)] + #[expect(clippy::pub_underscore_fields)] /// Explicit padding so that we have no implicit padding bytes in `repr(C)`. /// /// Allows potential future expansion of this type. @@ -92,7 +92,7 @@ impl From for CachedVerificationState { } } -#[allow(clippy::fallible_impl_from)] // only panics in invalid states +#[expect(clippy::fallible_impl_from, reason = "only panics in invalid states")] impl From for RawCachedVerificationState { fn from(value: CachedVerificationState) -> Self { match value { diff --git a/test-utils/Cargo.toml b/test-utils/Cargo.toml index a96a9cf..abf7ee4 100644 --- a/test-utils/Cargo.toml +++ b/test-utils/Cargo.toml @@ -7,7 +7,7 @@ authors = ["Boog900", "hinto-janai"] [dependencies] cuprate-types = { path = "../types" } -cuprate-helper = { path = "../helper", features = ["map"] } +cuprate-helper = { path = "../helper", features = ["map", "tx"] } cuprate-wire = { path = "../net/wire" } cuprate-p2p-core = { path = "../p2p/p2p-core", features = ["borsh"] } @@ -22,11 +22,13 @@ tokio = { workspace = true, features = ["full"] } tokio-util = { workspace = true } serde = { workspace = true } serde_json = { workspace = true } -bytes = { workspace = true, features = ["std"] } tempfile = { workspace = true } paste = { workspace = true } borsh = { workspace = true, features = ["derive"]} [dev-dependencies] hex = { workspace = true } -pretty_assertions = { workspace = true } \ No newline at end of file +pretty_assertions = { workspace = true } + +[lints] +workspace = true \ No newline at end of file diff --git a/test-utils/src/data/mod.rs b/test-utils/src/data/mod.rs index b9d42fb..3be409f 100644 --- a/test-utils/src/data/mod.rs +++ b/test-utils/src/data/mod.rs @@ -25,13 +25,11 @@ //! let tx: VerifiedTransactionInformation = TX_V1_SIG0.clone(); //! ``` -mod constants; pub use constants::{ BLOCK_43BD1F, BLOCK_5ECB7E, BLOCK_BBD604, BLOCK_F91043, TX_2180A8, TX_3BC7FF, TX_84D48D, TX_9E3F73, TX_B6B439, TX_D7FEBD, TX_E2D393, TX_E57440, }; +pub use statics::{BLOCK_V16_TX0, BLOCK_V1_TX2, BLOCK_V9_TX3, TX_V1_SIG0, TX_V1_SIG2, TX_V2_RCT3}; +mod constants; mod statics; -pub use statics::{ - tx_fee, BLOCK_V16_TX0, BLOCK_V1_TX2, BLOCK_V9_TX3, TX_V1_SIG0, TX_V1_SIG2, TX_V2_RCT3, -}; diff --git a/test-utils/src/data/statics.rs b/test-utils/src/data/statics.rs index 8b98171..c67c7eb 100644 --- a/test-utils/src/data/statics.rs +++ b/test-utils/src/data/statics.rs @@ -8,12 +8,12 @@ //---------------------------------------------------------------------------------------------------- Import use std::sync::LazyLock; -use cuprate_helper::map::combine_low_high_bits_to_u128; -use cuprate_types::{VerifiedBlockInformation, VerifiedTransactionInformation}; use hex_literal::hex; -use monero_serai::transaction::Input; use monero_serai::{block::Block, transaction::Transaction}; +use cuprate_helper::{map::combine_low_high_bits_to_u128, tx::tx_fee}; +use cuprate_types::{VerifiedBlockInformation, VerifiedTransactionInformation}; + use crate::data::constants::{ BLOCK_43BD1F, BLOCK_5ECB7E, BLOCK_F91043, TX_2180A8, TX_3BC7FF, TX_84D48D, TX_9E3F73, TX_B6B439, TX_D7FEBD, TX_E2D393, TX_E57440, @@ -110,36 +110,6 @@ fn to_tx_verification_data(tx_blob: impl AsRef<[u8]>) -> VerifiedTransactionInfo } } -/// Calculates the fee of the [`Transaction`]. -/// -/// # Panics -/// This will panic if the inputs overflow or the transaction outputs too much. -pub fn tx_fee(tx: &Transaction) -> u64 { - let mut fee = 0_u64; - - match &tx { - Transaction::V1 { prefix, .. } => { - for input in &prefix.inputs { - match input { - Input::Gen(_) => return 0, - Input::ToKey { amount, .. } => { - fee = fee.checked_add(amount.unwrap_or(0)).unwrap(); - } - } - } - - for output in &prefix.outputs { - fee.checked_sub(output.amount.unwrap_or(0)).unwrap(); - } - } - Transaction::V2 { proofs, .. } => { - fee = proofs.as_ref().unwrap().base.fee; - } - }; - - fee -} - //---------------------------------------------------------------------------------------------------- Blocks /// Generate a `static LazyLock`. /// @@ -148,8 +118,8 @@ pub fn tx_fee(tx: &Transaction) -> u64 { /// /// This requires some static block/tx input (from data) and some fields. /// This data can be accessed more easily via: -/// - A block explorer (https://xmrchain.net) -/// - Monero RPC (see cuprate_test_utils::rpc for this) +/// - A block explorer () +/// - Monero RPC (see `cuprate_test_utils::rpc` for this) /// /// See below for actual usage. macro_rules! verified_block_information { @@ -311,12 +281,12 @@ transaction_verification_data! { //---------------------------------------------------------------------------------------------------- TESTS #[cfg(test)] mod tests { - use super::*; - use pretty_assertions::assert_eq; use crate::rpc::client::HttpRpcClient; + use super::*; + /// Assert the defined blocks are the same compared to ones received from a local RPC call. #[ignore] // FIXME: doesn't work in CI, we need a real unrestricted node #[tokio::test] diff --git a/test-utils/src/monerod.rs b/test-utils/src/monerod.rs index 9ffa08d..abad4c9 100644 --- a/test-utils/src/monerod.rs +++ b/test-utils/src/monerod.rs @@ -178,6 +178,7 @@ impl Drop for SpawnedMoneroD { println!("------END-MONEROD-LOGS------"); } + #[expect(clippy::manual_assert, reason = "`if` is more clear")] if error && !panicking() { // `println` only outputs in a test when panicking so if there is an error while // dropping monerod but not an error in the test then we need to panic to make sure diff --git a/test-utils/src/rpc/client.rs b/test-utils/src/rpc/client.rs index fbe6fb9..ce7fb09 100644 --- a/test-utils/src/rpc/client.rs +++ b/test-utils/src/rpc/client.rs @@ -1,18 +1,16 @@ //! HTTP RPC client. //---------------------------------------------------------------------------------------------------- Use +use monero_rpc::Rpc; +use monero_serai::block::Block; +use monero_simple_request_rpc::SimpleRequestRpc; use serde::Deserialize; use serde_json::json; use tokio::task::spawn_blocking; -use monero_rpc::Rpc; -use monero_serai::block::Block; -use monero_simple_request_rpc::SimpleRequestRpc; - +use cuprate_helper::tx::tx_fee; use cuprate_types::{VerifiedBlockInformation, VerifiedTransactionInformation}; -use crate::data::tx_fee; - //---------------------------------------------------------------------------------------------------- Constants /// The default URL used for Monero RPC connections. pub const LOCALHOST_RPC_URL: &str = "http://127.0.0.1:18081"; @@ -47,13 +45,13 @@ impl HttpRpcClient { } /// The address used for this [`HttpRpcClient`]. - #[allow(dead_code)] + #[allow(clippy::allow_attributes, dead_code, reason = "expect doesn't work")] const fn address(&self) -> &String { &self.address } /// Access to the inner RPC client for other usage. - #[allow(dead_code)] + #[expect(dead_code)] const fn rpc(&self) -> &SimpleRequestRpc { &self.rpc } @@ -184,9 +182,10 @@ impl HttpRpcClient { //---------------------------------------------------------------------------------------------------- TESTS #[cfg(test)] mod tests { - use super::*; use hex_literal::hex; + use super::*; + /// Assert the default address is localhost. #[tokio::test] async fn localhost() { @@ -197,7 +196,7 @@ mod tests { #[ignore] // FIXME: doesn't work in CI, we need a real unrestricted node #[tokio::test] async fn get() { - #[allow(clippy::too_many_arguments)] + #[expect(clippy::too_many_arguments)] async fn assert_eq( rpc: &HttpRpcClient, height: usize, diff --git a/test-utils/src/rpc/data/macros.rs b/test-utils/src/rpc/data/macros.rs index 632917a..63a214c 100644 --- a/test-utils/src/rpc/data/macros.rs +++ b/test-utils/src/rpc/data/macros.rs @@ -156,13 +156,5 @@ macro_rules! define_request_and_response_doc_test { "```\n", ) }; - - // No doc test. - ( - $name:ident, - $test:ident, - ) => { - "" - }; } pub(super) use define_request_and_response_doc_test; diff --git a/test-utils/src/rpc/data/other.rs b/test-utils/src/rpc/data/other.rs index 80a48ab..9af6d8b 100644 --- a/test-utils/src/rpc/data/other.rs +++ b/test-utils/src/rpc/data/other.rs @@ -8,8 +8,7 @@ define_request_and_response! { // `(other)` adds a JSON sanity-check test. get_height (other), GET_HEIGHT: &str, - Request = -r#"{}"#; + Request = "{}"; Response = r#"{ "hash": "68bb1a1cff8e2a44c3221e8e1aff80bc6ca45d06fa8eff4d2a3a7ac31d4efe3f", @@ -53,8 +52,7 @@ r#"{ define_request_and_response! { get_alt_blocks_hashes (other), GET_ALT_BLOCKS_HASHES: &str, - Request = -r#"{}"#; + Request = "{}"; Response = r#"{ "blks_hashes": ["8ee10db35b1baf943f201b303890a29e7d45437bd76c2bd4df0d2f2ee34be109"], @@ -134,8 +132,7 @@ r#"{ define_request_and_response! { stop_mining (other), STOP_MINING: &str, - Request = -r#"{}"#; + Request = "{}"; Response = r#"{ "status": "OK", @@ -146,8 +143,7 @@ r#"{ define_request_and_response! { mining_status (other), MINING_STATUS: &str, - Request = -r#"{}"#; + Request = "{}"; Response = r#"{ "active": false, @@ -173,8 +169,7 @@ r#"{ define_request_and_response! { save_bc (other), SAVE_BC: &str, - Request = -r#"{}"#; + Request = "{}"; Response = r#"{ "status": "OK", @@ -185,8 +180,7 @@ r#"{ define_request_and_response! { get_peer_list (other), GET_PEER_LIST: &str, - Request = -r#"{}"#; + Request = "{}"; Response = r#"{ "gray_list": [{ @@ -291,8 +285,7 @@ r#"{ define_request_and_response! { get_transaction_pool (other), GET_TRANSACTION_POOL: &str, - Request = -r#"{}"#; + Request = "{}"; Response = r#"{ "credits": 0, @@ -598,8 +591,7 @@ r#"{ define_request_and_response! { get_transaction_pool_stats (other), GET_TRANSACTION_POOL_STATS: &str, - Request = -r#"{}"#; + Request = "{}"; Response = r#"{ "credits": 0, @@ -657,8 +649,7 @@ r#"{ define_request_and_response! { stop_daemon (other), STOP_DAEMON: &str, - Request = -r#"{}"#; + Request = "{}"; Response = r#"{ "status": "OK" @@ -668,8 +659,7 @@ r#"{ define_request_and_response! { get_limit (other), GET_LIMIT: &str, - Request = -r#"{}"#; + Request = "{}"; Response = r#"{ "limit_down": 1280000, @@ -713,8 +703,7 @@ r#"{ define_request_and_response! { get_net_stats (other), GET_NET_STATS: &str, - Request = -r#"{}"#; + Request = "{}"; Response = r#"{ "start_time": 1721251858, @@ -801,8 +790,7 @@ r#"{ define_request_and_response! { UNDOCUMENTED_ENDPOINT (other), GET_TRANSACTION_POOL_HASHES: &str, - Request = -r#"{}"#; + Request = "{}"; Response = r#"{ "credits": 0, @@ -835,8 +823,7 @@ r#"{ define_request_and_response! { UNDOCUMENTED_ENDPOINT (other), GET_PUBLIC_NODES: &str, - Request = -r#"{}"#; + Request = "{}"; Response = r#"{ "status": "OK", diff --git a/test-utils/src/test_netzone.rs b/test-utils/src/test_netzone.rs index f1f7582..791533c 100644 --- a/test-utils/src/test_netzone.rs +++ b/test-utils/src/test_netzone.rs @@ -86,9 +86,8 @@ impl, MoneroWireCodec>; type Listener = Pin< Box< - dyn Stream< - Item = Result<(Option, Self::Stream, Self::Sink), std::io::Error>, - > + Send + dyn Stream, Self::Stream, Self::Sink), Error>> + + Send + 'static, >, >; diff --git a/types/src/blockchain.rs b/types/src/blockchain.rs index a38acc6..b6af04f 100644 --- a/types/src/blockchain.rs +++ b/types/src/blockchain.rs @@ -2,7 +2,6 @@ //! //! Tests that assert particular requests lead to particular //! responses are also tested in Cuprate's blockchain database crate. - //---------------------------------------------------------------------------------------------------- Import use std::{ collections::{HashMap, HashSet}, @@ -13,7 +12,7 @@ use monero_serai::block::Block; use crate::{ types::{Chain, ExtendedBlockHeader, OutputOnChain, VerifiedBlockInformation}, - HardFork, + AltBlockInformation, ChainId, HardFork, }; //---------------------------------------------------------------------------------------------------- ReadRequest @@ -129,33 +128,52 @@ pub enum BlockchainReadRequest { CompactChainHistory, /// A request to find the first unknown block ID in a list of block IDs. - //// + /// /// # Invariant /// The [`Vec`] containing the block IDs must be sorted in chronological block /// order, or else the returned response is unspecified and meaningless, /// as this request performs a binary search. FindFirstUnknown(Vec<[u8; 32]>), + /// A request for all alt blocks in the chain with the given [`ChainId`]. + AltBlocksInChain(ChainId), + /// TODO CumulativeBlockWeightLimit, } //---------------------------------------------------------------------------------------------------- WriteRequest /// A write request to the blockchain database. -/// -/// There is currently only 1 write request to the database, -/// as such, the only valid [`BlockchainResponse`] to this request is -/// the proper response for a [`BlockchainResponse::WriteBlock`]. #[derive(Debug, Clone, PartialEq, Eq)] -#[allow(clippy::large_enum_variant)] // TODO pub enum BlockchainWriteRequest { /// Request that a block be written to the database. /// /// Input is an already verified block. WriteBlock(VerifiedBlockInformation), - /// TODO - PopBlocks(u64), + /// Write an alternative block to the database, + /// + /// Input is the alternative block. + WriteAltBlock(AltBlockInformation), + + /// A request to pop some blocks from the top of the main chain + /// + /// Input is the amount of blocks to pop. + /// + /// This request flushes all alt-chains from the cache before adding the popped blocks to the + /// alt cache. + PopBlocks(usize), + + /// A request to reverse the re-org process. + /// + /// The inner value is the [`ChainId`] of the old main chain. + /// + /// # Invariant + /// It is invalid to call this with a [`ChainId`] that was not returned from [`BlockchainWriteRequest::PopBlocks`]. + ReverseReorg(ChainId), + + /// A request to flush all alternative blocks. + FlushAltBlocks, } //---------------------------------------------------------------------------------------------------- Response @@ -286,15 +304,24 @@ pub enum BlockchainResponse { /// TODO CumulativeBlockWeightLimit(usize), - //------------------------------------------------------ Writes - /// Response to [`BlockchainWriteRequest::WriteBlock`]. - /// - /// This response indicates that the requested block has - /// successfully been written to the database without error. - WriteBlock, + /// The response for [`BlockchainReadRequest::AltBlocksInChain`]. + /// Contains all the alt blocks in the alt-chain in chronological order. + AltBlocksInChain(Vec), - /// TODO - PopBlocks(usize), + //------------------------------------------------------ Writes + /// A generic Ok response to indicate a request was successfully handled. + /// + /// currently the response for: + /// - [`BlockchainWriteRequest::WriteBlock`] + /// - [`BlockchainWriteRequest::WriteAltBlock`] + /// - [`BlockchainWriteRequest::ReverseReorg`] + /// - [`BlockchainWriteRequest::FlushAltBlocks`] + Ok, + + /// The response for [`BlockchainWriteRequest::PopBlocks`]. + /// + /// The inner value is the alt-chain ID for the old main chain blocks. + PopBlocks(usize, ChainId), } //---------------------------------------------------------------------------------------------------- Tests diff --git a/types/src/hard_fork.rs b/types/src/hard_fork.rs index d3e0b1c..f958fc0 100644 --- a/types/src/hard_fork.rs +++ b/types/src/hard_fork.rs @@ -27,7 +27,6 @@ pub enum HardForkError { } /// An identifier for every hard-fork Monero has had. -#[allow(missing_docs)] #[derive(Default, Debug, PartialEq, Eq, PartialOrd, Ord, Copy, Clone, Hash)] #[cfg_attr(any(feature = "proptest"), derive(proptest_derive::Arbitrary))] #[repr(u8)] diff --git a/types/src/types.rs b/types/src/types.rs index 30baf10..79984d4 100644 --- a/types/src/types.rs +++ b/types/src/types.rs @@ -1,6 +1,8 @@ //! Various shared data types in Cuprate. //---------------------------------------------------------------------------------------------------- Import +use std::num::NonZero; + use curve25519_dalek::edwards::EdwardsPoint; use monero_serai::{ block::Block, @@ -40,8 +42,7 @@ pub struct ExtendedBlockHeader { //---------------------------------------------------------------------------------------------------- VerifiedTransactionInformation /// Verified information of a transaction. /// -/// - If this is in a [`VerifiedBlockInformation`] this represents a valid transaction -/// - If this is in an [`AltBlockInformation`] this represents a potentially valid transaction +/// This represents a valid transaction #[derive(Clone, Debug, PartialEq, Eq)] pub struct VerifiedTransactionInformation { /// The transaction itself. @@ -81,6 +82,7 @@ pub struct VerifiedBlockInformation { /// [`Block::hash`]. pub block_hash: [u8; 32], /// The block's proof-of-work hash. + // TODO: make this an option. pub pow_hash: [u8; 32], /// The block's height. pub height: usize, @@ -99,7 +101,7 @@ pub struct VerifiedBlockInformation { /// /// The inner value is meaningless. #[derive(Debug, Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Hash)] -pub struct ChainId(pub u64); +pub struct ChainId(pub NonZero); //---------------------------------------------------------------------------------------------------- Chain /// An identifier for a chain.