diff --git a/Cargo.lock b/Cargo.lock index bd1bfd1a..b5b9a0cc 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -142,9 +142,9 @@ dependencies = [ [[package]] name = "anyhow" -version = "1.0.82" +version = "1.0.83" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f538837af36e6f6a9be0faa67f9a314f8119e4e4b5867c6ab40ed60360142519" +checksum = "25bdb32cbbdce2b519a9cd7df3a678443100e265d5e25ca763b7572a5104f5f3" [[package]] name = "arc-swap" @@ -189,9 +189,9 @@ dependencies = [ [[package]] name = "async-compression" -version = "0.4.9" +version = "0.4.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4e9eabd7a98fe442131a17c316bd9349c43695e49e730c3c8e12cfb5f4da2693" +checksum = "9c90a406b4495d129f00461241616194cb8a032c8d1c53c657f0961d5f8e0498" dependencies = [ "brotli", "flate2", @@ -218,9 +218,9 @@ version = "0.3.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "16e62a023e7c117e27523144c5d2459f4397fcc3cab0085af8e2224f643a0193" dependencies = [ - "proc-macro2 1.0.81", + "proc-macro2 1.0.82", "quote 1.0.36", - "syn 2.0.60", + "syn 2.0.63", ] [[package]] @@ -229,9 +229,9 @@ version = "0.1.80" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c6fa2087f2753a7da8cc1c0dbfcf89579dd57458e36769de5ac750b4671737ca" dependencies = [ - "proc-macro2 1.0.81", + "proc-macro2 1.0.82", "quote 1.0.36", - "syn 2.0.60", + "syn 2.0.63", ] [[package]] @@ -361,9 +361,9 @@ dependencies = [ [[package]] name = "brotli" -version = "5.0.0" +version = "6.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "19483b140a7ac7174d34b5a581b406c64f84da5409d3e09cf4fff604f9270e67" +checksum = "74f7971dbd9326d58187408ab83117d8ac1bb9c17b085fdacd1cf2f598719b6b" dependencies = [ "alloc-no-stdlib", "alloc-stdlib", @@ -414,9 +414,9 @@ checksum = "5ce89b21cab1437276d2650d57e971f9d548a2d9037cc231abdc0562b97498ce" [[package]] name = "bytemuck" -version = "1.15.0" +version = "1.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5d6d68c57235a3a081186990eca2867354726650f42f7516ca50c28d6281fd15" +checksum = "78834c15cb5d5efe3452d58b1e8ba890dd62d21907f867f383358198e56ebca5" dependencies = [ "bytemuck_derive", ] @@ -427,9 +427,9 @@ version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4da9a32f3fed317401fa3c862968128267c3106685286e15d5aaa3d7389c2f60" dependencies = [ - "proc-macro2 1.0.81", + "proc-macro2 1.0.82", "quote 1.0.36", - "syn 2.0.60", + "syn 2.0.63", ] [[package]] @@ -468,7 +468,7 @@ dependencies = [ [[package]] name = "casper-binary-port" version = "1.0.0" -source = "git+https://github.com/casper-network/casper-node?branch=feat-2.0#835523fb6ac996335fe5d3c445fcb9b32682c187" +source = "git+https://github.com/casper-network/casper-node.git?branch=feat-2.0#39dcb74d97879321a9008e238cb11bb4b5276c68" dependencies = [ "bincode", "bytes", @@ -670,7 +670,7 @@ dependencies = [ [[package]] name = "casper-types" version = "5.0.0" -source = "git+https://github.com/casper-network/casper-node?branch=feat-2.0#835523fb6ac996335fe5d3c445fcb9b32682c187" +source = "git+https://github.com/casper-network/casper-node.git?branch=feat-2.0#39dcb74d97879321a9008e238cb11bb4b5276c68" dependencies = [ "base16", "base64 0.13.1", @@ -713,9 +713,9 @@ dependencies = [ [[package]] name = "cc" -version = "1.0.96" +version = "1.0.97" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "065a29261d53ba54260972629f9ca6bffa69bac13cd1fed61420f7fa68b9f8bd" +checksum = "099a5357d84c4c61eb35fc8eafa9a79a902c2f76911e5747ced4e032edd8d9b4" dependencies = [ "jobserver", "libc", @@ -767,9 +767,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "528131438037fd55894f62d6e9f068b8f45ac57ffa77517819645d10aed04f64" dependencies = [ "heck 0.5.0", - "proc-macro2 1.0.81", + "proc-macro2 1.0.82", "quote 1.0.36", - "syn 2.0.60", + "syn 2.0.63", ] [[package]] @@ -943,9 +943,9 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f46882e17999c6cc590af592290432be3bce0428cb0d5f8b6715e4dc7b383eb3" dependencies = [ - "proc-macro2 1.0.81", + "proc-macro2 1.0.82", "quote 1.0.36", - "syn 2.0.60", + "syn 2.0.63", ] [[package]] @@ -971,7 +971,7 @@ version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "613e4ee15899913285b7612004bbd490abd605be7b11d35afada5902fb6b91d5" dependencies = [ - "proc-macro2 1.0.81", + "proc-macro2 1.0.82", "quote 1.0.36", "syn 1.0.109", ] @@ -1002,7 +1002,7 @@ version = "0.5.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3418329ca0ad70234b9735dc4ceed10af4df60eff9c8e7b06cb5e520d92c3535" dependencies = [ - "proc-macro2 1.0.81", + "proc-macro2 1.0.82", "quote 1.0.36", "syn 1.0.109", ] @@ -1013,9 +1013,9 @@ version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d150dea618e920167e5973d70ae6ece4385b7164e0d799fe7c122dd0a5d912ad" dependencies = [ - "proc-macro2 1.0.81", + "proc-macro2 1.0.82", "quote 1.0.36", - "syn 2.0.60", + "syn 2.0.63", ] [[package]] @@ -1025,7 +1025,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4fb810d30a7c1953f91334de7244731fc3f3c10d7fe163338a35b9f640960321" dependencies = [ "convert_case", - "proc-macro2 1.0.81", + "proc-macro2 1.0.82", "quote 1.0.36", "rustc_version", "syn 1.0.109", @@ -1216,9 +1216,9 @@ checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" [[package]] name = "errno" -version = "0.3.8" +version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a258e46cdc063eb8519c00b9fc845fc47bcfca4130e2f08e88665ceda8474245" +checksum = "534c5cf6194dfab3db3242765c03bbe257cf92f22b38f6bc0c58d59108a820ba" dependencies = [ "libc", "windows-sys 0.52.0", @@ -1292,9 +1292,9 @@ dependencies = [ [[package]] name = "fiat-crypto" -version = "0.2.8" +version = "0.2.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38793c55593b33412e3ae40c2c9781ffaa6f438f6f8c10f24e71846fbd7ae01e" +checksum = "28dea519a9695b9977216879a3ebfddf92f1c08c05d984f8996aecd6ecdc811d" [[package]] name = "filetime" @@ -1446,9 +1446,9 @@ version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "87750cf4b7a4c0625b1529e4c543c2182106e4dedc60a2a6455e00d212c489ac" dependencies = [ - "proc-macro2 1.0.81", + "proc-macro2 1.0.82", "quote 1.0.36", - "syn 2.0.60", + "syn 2.0.63", ] [[package]] @@ -1494,9 +1494,9 @@ dependencies = [ [[package]] name = "getrandom" -version = "0.2.14" +version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "94b22e06ecb0110981051723910cbf0b5f5e09a2062dd7663334ee79a9d1286c" +checksum = "c4567c8db10ae91089c99af84c68c38da3ec2f087c3f82960bcdbf3656b6f4d7" dependencies = [ "cfg-if", "js-sys", @@ -1636,9 +1636,9 @@ dependencies = [ [[package]] name = "gix-date" -version = "0.8.5" +version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "180b130a4a41870edfbd36ce4169c7090bca70e195da783dea088dd973daa59c" +checksum = "367ee9093b0c2b04fd04c5c7c8b6a1082713534eab537597ae343663a518fa99" dependencies = [ "bstr", "itoa", @@ -1775,9 +1775,9 @@ version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1dff438f14e67e7713ab9332f5fd18c8f20eb7eb249494f6c2bf170522224032" dependencies = [ - "proc-macro2 1.0.81", + "proc-macro2 1.0.82", "quote 1.0.36", - "syn 2.0.60", + "syn 2.0.63", ] [[package]] @@ -2286,9 +2286,9 @@ version = "1.0.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0122b7114117e64a63ac49f752a5ca4624d534c7b1c7de796ac196381cd2d947" dependencies = [ - "proc-macro2 1.0.81", + "proc-macro2 1.0.82", "quote 1.0.36", - "syn 2.0.60", + "syn 2.0.63", ] [[package]] @@ -2604,9 +2604,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "af7cbce79ec385a1d4f54baa90a76401eb15d9cab93685f62e7e9f942aa00ae2" dependencies = [ "cfg-if", - "proc-macro2 1.0.81", + "proc-macro2 1.0.82", "quote 1.0.36", - "syn 2.0.60", + "syn 2.0.63", ] [[package]] @@ -2686,9 +2686,9 @@ dependencies = [ [[package]] name = "num" -version = "0.4.2" +version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3135b08af27d103b0a51f2ae0f8632117b7b185ccf931445affa8df530576a41" +checksum = "35bd024e8b2ff75562e5f34e7f4905839deb4b22955ef5e73d2fea1b9813cb23" dependencies = [ "num-bigint", "num-complex", @@ -2700,11 +2700,10 @@ dependencies = [ [[package]] name = "num-bigint" -version = "0.4.4" +version = "0.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "608e7659b5c3d7cba262d894801b9ec9d00de989e8a82bd4bef91d08da45cdc0" +checksum = "c165a9ab64cf766f73521c0dd2cfdff64f488b8f0b3e621face3462d3db536d7" dependencies = [ - "autocfg", "num-integer", "num-traits", ] @@ -2734,9 +2733,9 @@ checksum = "63335b2e2c34fae2fb0aa2cecfd9f0832a1e24b3b32ecec612c3426d46dc8aaa" [[package]] name = "num-complex" -version = "0.4.5" +version = "0.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "23c6602fda94a57c990fe0df199a035d83576b496aa29f4e634a8ac6004e68a6" +checksum = "73f88a1307638156682bada9d7604135552957b7818057dcef22705b4d509495" dependencies = [ "num-traits", ] @@ -2753,7 +2752,7 @@ version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "876a53fff98e03a936a674b29568b0e605f06b29372c2489ff4de23f1949743d" dependencies = [ - "proc-macro2 1.0.81", + "proc-macro2 1.0.82", "quote 1.0.36", "syn 1.0.109", ] @@ -2780,11 +2779,10 @@ dependencies = [ [[package]] name = "num-rational" -version = "0.4.1" +version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0638a1c9d0a3c0914158145bc76cff373a75a627e6ecbfb71cbe6f453a5a19b0" +checksum = "f83d14da390562dca69fc84082e73e548e1ad308d24accdedd2720017cb37824" dependencies = [ - "autocfg", "num-bigint", "num-integer", "num-traits", @@ -2862,9 +2860,9 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" dependencies = [ - "proc-macro2 1.0.81", + "proc-macro2 1.0.82", "quote 1.0.36", - "syn 2.0.60", + "syn 2.0.63", ] [[package]] @@ -2946,9 +2944,9 @@ dependencies = [ [[package]] name = "paste" -version = "1.0.14" +version = "1.0.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "de3145af08024dea9fa9914f381a17b8fc6034dfb00f3a84013f7ff43f29ed4c" +checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a" [[package]] name = "pbkdf2" @@ -3022,9 +3020,9 @@ version = "1.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2f38a4412a78282e09a2cf38d195ea5420d15ba0602cb375210efbc877243965" dependencies = [ - "proc-macro2 1.0.81", + "proc-macro2 1.0.82", "quote 1.0.36", - "syn 2.0.60", + "syn 2.0.63", ] [[package]] @@ -3136,7 +3134,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c" dependencies = [ "proc-macro-error-attr", - "proc-macro2 1.0.81", + "proc-macro2 1.0.82", "quote 1.0.36", "syn 1.0.109", "version_check", @@ -3148,7 +3146,7 @@ version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869" dependencies = [ - "proc-macro2 1.0.81", + "proc-macro2 1.0.82", "quote 1.0.36", "version_check", ] @@ -3164,9 +3162,9 @@ dependencies = [ [[package]] name = "proc-macro2" -version = "1.0.81" +version = "1.0.82" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3d1597b0c024618f09a9c3b8655b7e430397a36d23fdafec26d6965e9eec3eba" +checksum = "8ad3d49ab951a01fbaafe34f2ec74122942fe18a3f9814c3268f1bb72042131b" dependencies = [ "unicode-ident", ] @@ -3275,7 +3273,7 @@ version = "1.0.36" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0fa76aaf39101c457836aec0ce2316dbdc3ab723cdda1c6bd4e6ad4208acaca7" dependencies = [ - "proc-macro2 1.0.81", + "proc-macro2 1.0.82", ] [[package]] @@ -3488,9 +3486,9 @@ dependencies = [ [[package]] name = "rust-embed" -version = "8.3.0" +version = "8.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fb78f46d0066053d16d4ca7b898e9343bc3530f71c61d5ad84cd404ada068745" +checksum = "19549741604902eb99a7ed0ee177a0663ee1eda51a29f71401f166e47e77806a" dependencies = [ "rust-embed-impl", "rust-embed-utils", @@ -3499,22 +3497,22 @@ dependencies = [ [[package]] name = "rust-embed-impl" -version = "8.3.0" +version = "8.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b91ac2a3c6c0520a3fb3dd89321177c3c692937c4eb21893378219da10c44fc8" +checksum = "cb9f96e283ec64401f30d3df8ee2aaeb2561f34c824381efa24a35f79bf40ee4" dependencies = [ - "proc-macro2 1.0.81", + "proc-macro2 1.0.82", "quote 1.0.36", "rust-embed-utils", - "syn 2.0.60", + "syn 2.0.63", "walkdir", ] [[package]] name = "rust-embed-utils" -version = "8.3.0" +version = "8.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "86f69089032567ffff4eada41c573fc43ff466c7db7c5688b2e7969584345581" +checksum = "38c74a686185620830701348de757fd36bef4aa9680fd23c49fc539ddcc1af32" dependencies = [ "sha2", "walkdir", @@ -3522,9 +3520,9 @@ dependencies = [ [[package]] name = "rustc-demangle" -version = "0.1.23" +version = "0.1.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d626bb9dae77e28219937af045c257c28bfd3f69333c512553507f5f9798cb76" +checksum = "719b953e2095829ee67db738b3bfa9fa368c94900df327b3f07fe6e794d2fe1f" [[package]] name = "rustc_version" @@ -3580,9 +3578,9 @@ dependencies = [ [[package]] name = "rustversion" -version = "1.0.15" +version = "1.0.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "80af6f9131f277a45a3fba6ce8e2258037bb0477a67e610d3c1fe046ab31de47" +checksum = "092474d1a01ea8278f69e6a358998405fae5b8b963ddaeb2b0b04a128bf1dfb0" [[package]] name = "rusty-fork" @@ -3598,9 +3596,9 @@ dependencies = [ [[package]] name = "ryu" -version = "1.0.17" +version = "1.0.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e86697c916019a8588c99b5fac3cead74ec0b4b819707a682fd4d23fa0ce1ba1" +checksum = "f3cb5ba0dc43242ce17de99c180e96db90b235b8a9fdc9543c96d2209116bd9f" [[package]] name = "same-file" @@ -3622,9 +3620,9 @@ dependencies = [ [[package]] name = "schemars" -version = "0.8.17" +version = "0.8.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f55c82c700538496bdc329bb4918a81f87cc8888811bd123cf325a0f2f8d309" +checksum = "fc6e7ed6919cb46507fb01ff1654309219f62b4d603822501b0b80d42f6f21ef" dependencies = [ "dyn-clone", "indexmap 1.9.3", @@ -3635,14 +3633,14 @@ dependencies = [ [[package]] name = "schemars_derive" -version = "0.8.17" +version = "0.8.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "83263746fe5e32097f06356968a077f96089739c927a61450efa069905eec108" +checksum = "185f2b7aa7e02d418e453790dde16890256bbd2bcd04b7dc5348811052b53f49" dependencies = [ - "proc-macro2 1.0.81", + "proc-macro2 1.0.82", "quote 1.0.36", "serde_derive_internals", - "syn 2.0.60", + "syn 2.0.63", ] [[package]] @@ -3684,9 +3682,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "25a82fcb49253abcb45cdcb2adf92956060ec0928635eb21b4f7a6d8f25ab0bc" dependencies = [ "heck 0.4.1", - "proc-macro2 1.0.81", + "proc-macro2 1.0.82", "quote 1.0.36", - "syn 2.0.60", + "syn 2.0.63", "thiserror", ] @@ -3728,15 +3726,15 @@ dependencies = [ [[package]] name = "semver" -version = "1.0.22" +version = "1.0.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "92d43fe69e652f3df9bdc2b85b2854a0825b86e4fb76bc44d945137d053639ca" +checksum = "61697e0a1c7e512e84a621326239844a24d8207b4669b41bc18b32ea5cbf988b" [[package]] name = "serde" -version = "1.0.200" +version = "1.0.201" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ddc6f9cc94d67c0e21aaf7eda3a010fd3af78ebf6e096aa6e2e13c79749cce4f" +checksum = "780f1cebed1629e4753a1a38a3c72d30b97ec044f0aef68cb26650a3c5cf363c" dependencies = [ "serde_derive", ] @@ -3762,13 +3760,13 @@ dependencies = [ [[package]] name = "serde_derive" -version = "1.0.200" +version = "1.0.201" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "856f046b9400cee3c8c94ed572ecdb752444c24528c035cd35882aad6f492bcb" +checksum = "c5e405930b9796f1c00bee880d03fc7e0bb4b9a11afc776885ffe84320da2865" dependencies = [ - "proc-macro2 1.0.81", + "proc-macro2 1.0.82", "quote 1.0.36", - "syn 2.0.60", + "syn 2.0.63", ] [[package]] @@ -3777,16 +3775,16 @@ version = "0.29.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "330f01ce65a3a5fe59a60c82f3c9a024b573b8a6e875bd233fe5f934e71d54e3" dependencies = [ - "proc-macro2 1.0.81", + "proc-macro2 1.0.82", "quote 1.0.36", - "syn 2.0.60", + "syn 2.0.63", ] [[package]] name = "serde_json" -version = "1.0.116" +version = "1.0.117" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3e17db7126d17feb94eb3fad46bf1a96b034e8aacbc2e775fe81505f8b0b2813" +checksum = "455182ea6142b14f93f4bc5320a2b31c1f266b66a4a5c858b013302a5d8cbfc3" dependencies = [ "indexmap 2.2.6", "itoa", @@ -4001,7 +3999,7 @@ version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4ea40e2345eb2faa9e1e5e326db8c34711317d2b5e08d0d5741619048a803127" dependencies = [ - "proc-macro2 1.0.81", + "proc-macro2 1.0.82", "quote 1.0.36", "sqlx-core", "sqlx-macros-core", @@ -4019,7 +4017,7 @@ dependencies = [ "heck 0.4.1", "hex", "once_cell", - "proc-macro2 1.0.81", + "proc-macro2 1.0.82", "quote 1.0.36", "serde", "serde_json", @@ -4185,7 +4183,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1e385be0d24f186b4ce2f9982191e7101bb737312ad61c1f2f984f34bcf85d59" dependencies = [ "heck 0.4.1", - "proc-macro2 1.0.81", + "proc-macro2 1.0.82", "quote 1.0.36", "rustversion", "syn 1.0.109", @@ -4198,10 +4196,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "23dc1fa9ac9c169a78ba62f0b841814b7abae11bdd047b9c58f893439e309ea0" dependencies = [ "heck 0.4.1", - "proc-macro2 1.0.81", + "proc-macro2 1.0.82", "quote 1.0.36", "rustversion", - "syn 2.0.60", + "syn 2.0.63", ] [[package]] @@ -4227,18 +4225,18 @@ version = "1.0.109" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" dependencies = [ - "proc-macro2 1.0.81", + "proc-macro2 1.0.82", "quote 1.0.36", "unicode-ident", ] [[package]] name = "syn" -version = "2.0.60" +version = "2.0.63" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "909518bc7b1c9b779f1bbf07f2929d35af9f0f37e47c6e9ef7f9dddc1e1821f3" +checksum = "bf5be731623ca1a1fb7d8be6f261a3be6d3e2337b8a1f97be944d020c8fcb704" dependencies = [ - "proc-macro2 1.0.81", + "proc-macro2 1.0.82", "quote 1.0.36", "unicode-ident", ] @@ -4290,7 +4288,7 @@ checksum = "beca1b4eaceb4f2755df858b88d9b9315b7ccfd1ffd0d7a48a52602301f01a57" dependencies = [ "heck 0.4.1", "proc-macro-error", - "proc-macro2 1.0.81", + "proc-macro2 1.0.82", "quote 1.0.36", "syn 1.0.109", ] @@ -4326,22 +4324,22 @@ checksum = "3369f5ac52d5eb6ab48c6b4ffdc8efbcad6b89c765749064ba298f2c68a16a76" [[package]] name = "thiserror" -version = "1.0.59" +version = "1.0.60" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f0126ad08bff79f29fc3ae6a55cc72352056dfff61e3ff8bb7129476d44b23aa" +checksum = "579e9083ca58dd9dcf91a9923bb9054071b9ebbd800b342194c9feb0ee89fc18" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.59" +version = "1.0.60" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d1cd413b5d558b4c5bf3680e324a6fa5014e7b7c067a51e69dbdf47eb7148b66" +checksum = "e2470041c06ec3ac1ab38d0356a6119054dedaea53e12fbefc0de730a1c08524" dependencies = [ - "proc-macro2 1.0.81", + "proc-macro2 1.0.82", "quote 1.0.36", - "syn 2.0.60", + "syn 2.0.63", ] [[package]] @@ -4447,9 +4445,9 @@ version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5b8a1e28f2deaa14e508979454cb3a223b10b938b45af148bc0986de36f1923b" dependencies = [ - "proc-macro2 1.0.81", + "proc-macro2 1.0.82", "quote 1.0.36", - "syn 2.0.60", + "syn 2.0.63", ] [[package]] @@ -4568,9 +4566,9 @@ version = "0.1.27" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" dependencies = [ - "proc-macro2 1.0.81", + "proc-macro2 1.0.82", "quote 1.0.36", - "syn 2.0.60", + "syn 2.0.63", ] [[package]] @@ -4777,9 +4775,9 @@ checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a" [[package]] name = "utoipa" -version = "4.2.1" +version = "4.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e95b8d4503ee98939fb7024f6da083f7c48ff033cc3cba7521360e1bc6c1470b" +checksum = "c5afb1a60e207dca502682537fefcfd9921e71d0b83e9576060f09abc6efab23" dependencies = [ "indexmap 2.2.6", "serde", @@ -4794,9 +4792,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7bf0e16c02bc4bf5322ab65f10ab1149bdbcaa782cba66dc7057370a3f8190be" dependencies = [ "proc-macro-error", - "proc-macro2 1.0.81", + "proc-macro2 1.0.82", "quote 1.0.36", - "syn 2.0.60", + "syn 2.0.63", ] [[package]] @@ -4868,7 +4866,7 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d257817081c7dffcdbab24b9e62d2def62e2ff7d00b1c20062551e6cccc145ff" dependencies = [ - "proc-macro2 1.0.81", + "proc-macro2 1.0.82", "quote 1.0.36", ] @@ -4961,9 +4959,9 @@ dependencies = [ "bumpalo", "log", "once_cell", - "proc-macro2 1.0.81", + "proc-macro2 1.0.82", "quote 1.0.36", - "syn 2.0.60", + "syn 2.0.63", "wasm-bindgen-shared", ] @@ -4995,9 +4993,9 @@ version = "0.2.92" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e94f17b526d0a461a191c78ea52bbce64071ed5c04c9ffe424dcb38f74171bb7" dependencies = [ - "proc-macro2 1.0.81", + "proc-macro2 1.0.82", "quote 1.0.36", - "syn 2.0.60", + "syn 2.0.63", "wasm-bindgen-backend", "wasm-bindgen-shared", ] @@ -5270,22 +5268,22 @@ checksum = "09041cd90cf85f7f8b2df60c646f853b7f535ce68f85244eb6731cf89fa498ec" [[package]] name = "zerocopy" -version = "0.7.33" +version = "0.7.34" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "087eca3c1eaf8c47b94d02790dd086cd594b912d2043d4de4bfdd466b3befb7c" +checksum = "ae87e3fcd617500e5d106f0380cf7b77f3c6092aae37191433159dda23cfb087" dependencies = [ "zerocopy-derive", ] [[package]] name = "zerocopy-derive" -version = "0.7.33" +version = "0.7.34" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6f4b6c273f496d8fd4eaf18853e6b448760225dc030ff2c485a786859aea6393" +checksum = "15e934569e47891f7d9411f1a451d947a60e000ab3bd24fbb970f000387d1b3b" dependencies = [ - "proc-macro2 1.0.81", + "proc-macro2 1.0.82", "quote 1.0.36", - "syn 2.0.60", + "syn 2.0.63", ] [[package]] diff --git a/Cargo.toml b/Cargo.toml index 7395daf7..4a8f6c46 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -14,8 +14,8 @@ members = [ anyhow = "1" async-stream = "0.3.4" async-trait = "0.1.77" -casper-types = { git = "https://github.com/casper-network/casper-node", branch = "feat-2.0" } -casper-binary-port = { git = "https://github.com/casper-network/casper-node", branch = "feat-2.0" } +casper-types = { git = "https://github.com/casper-network/casper-node.git", branch = "feat-2.0" } +casper-binary-port = { git = "https://github.com/casper-network/casper-node.git", branch = "feat-2.0" } casper-event-sidecar = { path = "./event_sidecar", version = "1.0.0" } casper-event-types = { path = "./types", version = "1.0.0" } casper-rpc-sidecar = { path = "./rpc_sidecar", version = "1.0.0" } diff --git a/LEGACY_SSE_EMULATION.md b/LEGACY_SSE_EMULATION.md new file mode 100644 index 00000000..74a3b4bd --- /dev/null +++ b/LEGACY_SSE_EMULATION.md @@ -0,0 +1,566 @@ +# Rationale + +The casper node 2.x produces a different set of SSE events than the 1.x ones. Also, 1.x nodes used 3 sse endpoints (`/events/sigs`, `/events/deploys`, `/events/main`), while 2.x node exposes all SSE events under one firehose endpoint (`/events`). + +Generally the changes in 2.x regarding SSE are backwards incompatible to some extent. To harness all the details and collect all the data clients should adapt the new SSE API. However if some clients are not ready or have no need to adapt to the new SSE API, they can use the legacy SSE emulation. + +SSE emulation is by default turned off, the instruction on how to enable it is in the [main README.md](./README.md) file. + +**BEFORE YOU ENABLE LEGACY SSE EMULATION** please consider the following: + +- The legacy SSE emulation is a temporary solution and can be removed in a future major release. +- The legacy SSE emulation is not a 1:1 mapping of the 2.x events to 1.x events. Some events will be omitted, some will be transformed, some will be passed as is. More details on the limitations of the emulation are explained below. +- The legacy SSE emulation is an additional drain on resources. It will consume more resources than the "native" 2.x SSE API. + +# Premises of legacy SSE emulation + +Currently the only possible emulation is the V1 SSE API. Enabling V1 SSE api emulation requires setting `emulate_legacy_sse_apis` to `["V1"]`, like: + +``` +[sse_server] +(...) +emulate_legacy_sse_apis = ["V1"] +(...) +``` + +This will expose three additional sse endpoints: + +- `/events/sigs` -> publishes `ApiVersion`, `BlockAdded`, `DeployProcessed`, `DeployExpired`, `Fault` and `Shutdown` +- `/events/deploys`-> publishes `ApiVersion`, `TransactionAccepted` and `Shutdown` +- `/events/main` -> publishes `ApiVersion`, `FinalitySignature` and `Shutdown` events + +Those endpoints will emit events in the same format as the V1 SSE API of the casper node. There are limitations to what Casper Sidecar can and will do, here is a list of mapping assumptions: + +## Translating `ApiVersion` event + +Legacy SSE event will be the same + +## Translating `BlockAdded` event + +- When the 2.x event emits a V1 block it will be unwrapped and passed as a legacy BlockAdded, for instance a 2.x event like this: + + ```json + { + "BlockAdded": { + "block_hash": "d59359690ca5a251b513185da0767f744e77645adec82bb6ff785a89edc7591c", + "block": { + "Version1": { + "hash": "d59359690ca5a251b513185da0767f744e77645adec82bb6ff785a89edc7591c", + "header": { + "parent_hash": "90ca56a697f8b1b19cba08c642fd7f04669b8cd49bb9d652fca989f8a9f8bcea", + "state_root_hash": "9cce223fdbeab41dbbcf0b62f3fd857373131378d51776de26bb9f4fefe1e849", + "body_hash": "5f37be399c15b2394af48243ce10a62a7d12769dc5f7740b18ad3bf55bde5271", + "random_bit": true, + "accumulated_seed": "b3e1930565a80a874a443eaadefa1a340927fb8b347729bbd93e93935a47a9e4", + "era_end": { + "era_report": { + "equivocators": [ + "0203c9da857cfeccf001ce00720ae2e0d083629858b60ac05dd285ce0edae55f0c8e", + "02026fb7b629a2ec0132505cdf036f6ffb946d03a1c9b5da57245af522b842f145be" + ], + "rewards": [ + { + "validator": "01235b932586ae5cc3135f7a0dc723185b87e5bd3ae0ac126a92c14468e976ff25", + "amount": 129457537 + } + ], + "inactive_validators": [] + }, + "next_era_validator_weights": [ + { + "validator": "0198957673ad060503e2ec7d98dc71af6f90ad1f854fe18025e3e7d0d1bbe5e32b", + "weight": "1" + }, + { + "validator": "02022d6bc4e3012cc4ae467b5525111cf7ed65883b05a1d924f1e654c64fad3a027c", + "weight": "2" + } + ] + }, + "timestamp": "2024-04-25T20:00:35.640Z", + "era_id": 601701, + "height": 6017012, + "protocol_version": "1.0.0" + }, + "body": { + "proposer": "0203426736da2554ebf1f8ee1d2ce4ab11b1e33419d7dfc1ce2fe1945faf00bacc9e", + "deploy_hashes": [ + "06950e4374dc88685634ec30bcddd68e6b46c109ccf6d29e2dfcf5367df75571", + "27a89dd58e6297a5244342b68b117afe2555131b896ad6ed4321edcd4130ae7b" + ], + "transfer_hashes": [ + "3e30b6c1c5dbca9277425846b42dc832cd3d8ce889c38d6bfc8bd95b3e1c403e", + "c990ba47146270655eaacc53d4115cbd980697f3d4e9c76bccfdfce82af6ce08" + ] + } + } + } + } + } + ``` + + will be translated to 1.x emulated event: + + ```json + { + "BlockAdded": { + "block_hash": "d59359690ca5a251b513185da0767f744e77645adec82bb6ff785a89edc7591c", + "block": { + "hash": "d59359690ca5a251b513185da0767f744e77645adec82bb6ff785a89edc7591c", + "header": { + "parent_hash": "90ca56a697f8b1b19cba08c642fd7f04669b8cd49bb9d652fca989f8a9f8bcea", + "state_root_hash": "9cce223fdbeab41dbbcf0b62f3fd857373131378d51776de26bb9f4fefe1e849", + "body_hash": "5f37be399c15b2394af48243ce10a62a7d12769dc5f7740b18ad3bf55bde5271", + "random_bit": true, + "accumulated_seed": "b3e1930565a80a874a443eaadefa1a340927fb8b347729bbd93e93935a47a9e4", + "era_end": { + "era_report": { + "equivocators": [ + "0203c9da857cfeccf001ce00720ae2e0d083629858b60ac05dd285ce0edae55f0c8e", + "02026fb7b629a2ec0132505cdf036f6ffb946d03a1c9b5da57245af522b842f145be" + ], + "rewards": [ + { + "validator": "01235b932586ae5cc3135f7a0dc723185b87e5bd3ae0ac126a92c14468e976ff25", + "amount": 129457537 + } + ], + "inactive_validators": [] + }, + "next_era_validator_weights": [ + { + "validator": "0198957673ad060503e2ec7d98dc71af6f90ad1f854fe18025e3e7d0d1bbe5e32b", + "weight": "1" + }, + { + "validator": "02022d6bc4e3012cc4ae467b5525111cf7ed65883b05a1d924f1e654c64fad3a027c", + "weight": "2" + } + ] + }, + "timestamp": "2024-04-25T20:00:35.640Z", + "era_id": 601701, + "height": 6017012, + "protocol_version": "1.0.0" + }, + "body": { + "proposer": "0203426736da2554ebf1f8ee1d2ce4ab11b1e33419d7dfc1ce2fe1945faf00bacc9e", + "deploy_hashes": [ + "06950e4374dc88685634ec30bcddd68e6b46c109ccf6d29e2dfcf5367df75571", + "27a89dd58e6297a5244342b68b117afe2555131b896ad6ed4321edcd4130ae7b" + ], + "transfer_hashes": [ + "3e30b6c1c5dbca9277425846b42dc832cd3d8ce889c38d6bfc8bd95b3e1c403e", + "c990ba47146270655eaacc53d4115cbd980697f3d4e9c76bccfdfce82af6ce08" + ] + } + } + } + } + ``` + +- When the 2.x event emits a V2 block the following rules apply: + + - `block_hash` will be copied from V2 to V1 + - `block.block_hash` will be copied from V2 to V1 + - `block.header.era_end`: + - if the era_end is a V1 variety - it will be copied + - if the era_end is a V2 variety: + - V2 `next_era_validator_weights` will be copied from V2 `next_era_validator_weights` + - V1 `era_report` will be assembled from V2 `era_end.equivocators`, `era_end.rewards` and `era_end.inactive_validators` fields + - IF one of the `rewards` contains a reward that doesn't fit in a u64 (because V2 has U512 type in rewards values) - the whole `era_end` **WILL BE OMITTED** from the legacy V1 block (value None) + - V2 field `next_era_gas_price` has no equivalent in V1 and will be omitted + - `block.header.current_gas_price` this field only exists in V2 and will be omitted from the V1 block header + - `block.header.proposer` will be copied from V2 to V1 `block.body.proposer` + - other `block.header.*` fields will be copied from V2 to V1 + - `block.body.deploy_hashes` will be based on V2 `block.body.standard` transactions. Bear in mind, that only values of transactions of type `Deploy` will be copied to V1 `block.body.deploy_hashes` array + - `block.body.transfer_hashes` will be based on V2 `block.body.mint` transactions. Bear in mind, that only values of transactions of type `Deploy` will be copied to V1 `block.body.transfer_hashes` array. + + An example of the above rules. + Input V2 BlockAdded: + + ```json + { + "BlockAdded": { + "block_hash": "2df9fb8909443fba928ed0536a79780cdb4557d0c05fdf762a1fd61141121422", + "block": { + "Version2": { + "hash": "2df9fb8909443fba928ed0536a79780cdb4557d0c05fdf762a1fd61141121422", + "header": { + "proposer": "01d3eec0445635f136ae560b43e9d8f656a6ba925f01293eaf2610b39ebe0fc28d", + "parent_hash": "b8f5e9afd2e54856aa1656f962d07158f0fdf9cfac0f9992875f31f6bf2623a2", + "state_root_hash": "cbf02d08bb263aa8915507c172b5f590bbddcd68693fb1c71758b5684b011730", + "body_hash": "6041ab862a1e14a43a8e8a9a42dad27091915a337d18060c22bd3fe7b4f39607", + "random_bit": false, + "accumulated_seed": "a0e424710f4fba036ba450b40f2bd7a842b176cf136f3af1952a2a13eb02616c", + "era_end": { + "equivocators": [ + "01cc718e9dea652577bffad3471d0db7d03ba30923780a2a8fd1e3dd9b4e72dc54", + "0203e4532e401326892aa8ebc16b6986bd35a6c96a1f16c28db67fd7e87cb6913817", + "020318a52d5b2d545def8bf0ee5ea7ddea52f1fbf106c8b69848e40c5460e20c9f62" + ], + "inactive_validators": [ + "01cc718e9dea652577bffad3471d0db7d03ba30923780a2a8fd1e3dd9b4e72dc55", + "01cc718e9dea652577bffad3471d0db7d03ba30923780a2a8fd1e3dd9b4e72dc56" + ], + "next_era_validator_weights": [ + { + "validator": "02038b238d774c3c4228a0430e3a078e1a2533f9c87cccbcf695637502d8d6057a63", + "weight": "1" + }, + { + "validator": "0102ffd4d2812d68c928712edd012fbcad54367bc6c5c254db22cf696772856566", + "weight": "2" + } + ], + "rewards": { + "02028b18c949d849b377988ea5191b39340975db25f8b80f37cc829c9f79dbfb19fc": "749546792", + "02028002c063228ff4e9d22d69154c499b86a4f7fdbf1d1e20f168b62da537af64c2": "788342677", + "02038efa405f648c72f36b0e5f37db69ab213d44404591b24de21383d8cc161101ec": "86241635", + "01f6bbd4a6fd10534290c58edb6090723d481cea444a8e8f70458e5136ea8c733c": "941794198" + }, + "next_era_gas_price": 1 + }, + "timestamp": "2024-04-25T20:31:39.895Z", + "era_id": 419571, + "height": 4195710, + "protocol_version": "1.0.0", + "current_gas_price": 1 + }, + "body": { + "transactions": { + "0": [{ + "Deploy": "58aca0009fc41bd045d303db9e9f07416ff1fd8c76ecd98545eedf86f9459e80" + }, + { + "Deploy": "58aca0009fc41bd045d303db9e9f07416ff1fd8c76ecd98545eedf86f9459e81" + }, + { + "Version1": "58aca0009fc41bd045d303db9e9f07416ff1fd8c76ecd98545eedf86f9459e82" + }], + "1": [{ + "Deploy": "58aca0009fc41bd045d303db9e9f07416ff1fd8c76ecd98545eedf86f9459e83" + }, + { + "Deploy": "58aca0009fc41bd045d303db9e9f07416ff1fd8c76ecd98545eedf86f9459e84" + }, + { + "Version1": "58aca0009fc41bd045d303db9e9f07416ff1fd8c76ecd98545eedf86f9459e85" + }], + "2": [{ + "Deploy": "58aca0009fc41bd045d303db9e9f07416ff1fd8c76ecd98545eedf86f9459e86" + }, + { + "Deploy": "58aca0009fc41bd045d303db9e9f07416ff1fd8c76ecd98545eedf86f9459e87" + }, + { + "Version1": "58aca0009fc41bd045d303db9e9f07416ff1fd8c76ecd98545eedf86f9459e88" + }], + "3": [{ + "Deploy": "58aca0009fc41bd045d303db9e9f07416ff1fd8c76ecd98545eedf86f9459e89" + }, + { + "Deploy": "58aca0009fc41bd045d303db9e9f07416ff1fd8c76ecd98545eedf86f9459e90" + }, + { + "Version1": "58aca0009fc41bd045d303db9e9f07416ff1fd8c76ecd98545eedf86f9459e91" + }] + } + "rewarded_signatures": [[240], [0], [0]] + } + } + } + } + } + ``` + + Output legacy BlockAdded: + + ```json + { + "BlockAdded": { + "block_hash": "2df9fb8909443fba928ed0536a79780cdb4557d0c05fdf762a1fd61141121422", + "block": { + "hash": "2df9fb8909443fba928ed0536a79780cdb4557d0c05fdf762a1fd61141121422", + "header": { + "parent_hash": "b8f5e9afd2e54856aa1656f962d07158f0fdf9cfac0f9992875f31f6bf2623a2", + "state_root_hash": "cbf02d08bb263aa8915507c172b5f590bbddcd68693fb1c71758b5684b011730", + "body_hash": "6041ab862a1e14a43a8e8a9a42dad27091915a337d18060c22bd3fe7b4f39607", + "random_bit": false, + "accumulated_seed": "a0e424710f4fba036ba450b40f2bd7a842b176cf136f3af1952a2a13eb02616c", + "era_end": { + "era_report": { + "equivocators": [ + "01cc718e9dea652577bffad3471d0db7d03ba30923780a2a8fd1e3dd9b4e72dc54", + "0203e4532e401326892aa8ebc16b6986bd35a6c96a1f16c28db67fd7e87cb6913817", + "020318a52d5b2d545def8bf0ee5ea7ddea52f1fbf106c8b69848e40c5460e20c9f62" + ], + "rewards": [ + { + "validator": "01f6bbd4a6fd10534290c58edb6090723d481cea444a8e8f70458e5136ea8c733c", + "amount": 941794198 + }, + { + "validator": "02028002c063228ff4e9d22d69154c499b86a4f7fdbf1d1e20f168b62da537af64c2", + "amount": 788342677 + }, + { + "validator": "02028b18c949d849b377988ea5191b39340975db25f8b80f37cc829c9f79dbfb19fc", + "amount": 749546792 + }, + { + "validator": "02038efa405f648c72f36b0e5f37db69ab213d44404591b24de21383d8cc161101ec", + "amount": 86241635 + } + ], + "inactive_validators": [ + "01cc718e9dea652577bffad3471d0db7d03ba30923780a2a8fd1e3dd9b4e72dc55", + "01cc718e9dea652577bffad3471d0db7d03ba30923780a2a8fd1e3dd9b4e72dc56" + ] + }, + "next_era_validator_weights": [ + { + "validator": "0102ffd4d2812d68c928712edd012fbcad54367bc6c5c254db22cf696772856566", + "weight": "2" + }, + { + "validator": "02038b238d774c3c4228a0430e3a078e1a2533f9c87cccbcf695637502d8d6057a63", + "weight": "1" + } + ] + }, + "timestamp": "2024-04-25T20:31:39.895Z", + "era_id": 419571, + "height": 4195710, + "protocol_version": "1.0.0" + }, + "body": { + "proposer": "01d3eec0445635f136ae560b43e9d8f656a6ba925f01293eaf2610b39ebe0fc28d", + "deploy_hashes": [ + "58aca0009fc41bd045d303db9e9f07416ff1fd8c76ecd98545eedf86f9459e89", + "58aca0009fc41bd045d303db9e9f07416ff1fd8c76ecd98545eedf86f9459e90" + ], + "transfer_hashes": [ + "58aca0009fc41bd045d303db9e9f07416ff1fd8c76ecd98545eedf86f9459e80", + "58aca0009fc41bd045d303db9e9f07416ff1fd8c76ecd98545eedf86f9459e81" + ] + } + } + } + } + ``` + +## Translating `TransactionAccepted` event + +- If the event is a V1 variant - it will be unwrapped and passed, so a 2.x event: + ```json + { + "TransactionAccepted": { + "Deploy": { + "hash": "5a7709969c210db93d3c21bf49f8bf705d7c75a01609f606d04b0211af171d43", + "header": { + "account": "02022c07e061d6e0b43bbaa25717b021c2ddc0f701a223946a0883b57ae842917438", + "timestamp": "2020-08-07T01:28:27.360Z", + "ttl": "4m 22s", + "gas_price": 72, + "body_hash": "aa2a111c086628a161001160756c5884e32fde0356bb85f484a3e55682ad089f", + "dependencies": [], + "chain_name": "casper-example" + }, + "payment": { + "StoredContractByName": { + "name": "casper-example", + "entry_point": "example-entry-point", + "args": [ + [ + "amount", + { + "cl_type": "U512", + "bytes": "0400f90295", + "parsed": "2500000000" + } + ] + ] + } + }, + "session": { + "StoredContractByHash": { + "hash": "dfb621e7012df48fe1d40fd8015b5e2396c477c9587e996678551148a06d3a89", + "entry_point": "8sY9fUUCwoiFZmxKo8kj", + "args": [ + [ + "YbZWtEuL4D6oMTJmUWvj", + { + "cl_type": { + "List": "U8" + }, + "bytes": "5a000000909ffe7807b03a5db0c3c183648710db16d408d8425a4e373fc0422a4efed1ab0040bc08786553fcac4521528c9fafca0b0fb86f4c6e9fb9db7a1454dda8ed612c4ea4c9a6378b230ae1e3c236e37d6ebee94339a56cb4be582a", + "parsed": [144, 159, 254, 120, 7] + } + ] + ] + } + }, + "approvals": [ + { + "signer": "02022c07e061d6e0b43bbaa25717b021c2ddc0f701a223946a0883b57ae842917438", + "signature": "025d0a7ba37bebe6774681ca5adecb70fa4eef56821eb344bf0f6867e171a899a87edb2b8bf70f2cb47a1670a6baf2cded1fad535ee53a2f65da91c82ebf30945b" + } + ] + } + } + } + ``` + will be translated to legacy `DeployAccepted`: + ```json + { + "DeployAccepted": { + "hash": "5a7709969c210db93d3c21bf49f8bf705d7c75a01609f606d04b0211af171d43", + "header": { + "account": "02022c07e061d6e0b43bbaa25717b021c2ddc0f701a223946a0883b57ae842917438", + "timestamp": "2020-08-07T01:28:27.360Z", + "ttl": "4m 22s", + "gas_price": 72, + "body_hash": "aa2a111c086628a161001160756c5884e32fde0356bb85f484a3e55682ad089f", + "dependencies": [], + "chain_name": "casper-example" + }, + "payment": { + "StoredContractByName": { + "name": "casper-example", + "entry_point": "example-entry-point", + "args": [ + [ + "amount", + { + "cl_type": "U512", + "bytes": "0400f90295", + "parsed": "2500000000" + } + ] + ] + } + }, + "session": { + "StoredContractByHash": { + "hash": "dfb621e7012df48fe1d40fd8015b5e2396c477c9587e996678551148a06d3a89", + "entry_point": "8sY9fUUCwoiFZmxKo8kj", + "args": [ + [ + "YbZWtEuL4D6oMTJmUWvj", + { + "cl_type": { + "List": "U8" + }, + "bytes": "5a000000909ffe7807b03a5db0c3c183648710db16d408d8425a4e373fc0422a4efed1ab0040bc08786553fcac4521528c9fafca0b0fb86f4c6e9fb9db7a1454dda8ed612c4ea4c9a6378b230ae1e3c236e37d6ebee94339a56cb4be582a", + "parsed": [144, 159, 254, 120, 7] + } + ] + ] + } + }, + "approvals": [ + { + "signer": "02022c07e061d6e0b43bbaa25717b021c2ddc0f701a223946a0883b57ae842917438", + "signature": "025d0a7ba37bebe6774681ca5adecb70fa4eef56821eb344bf0f6867e171a899a87edb2b8bf70f2cb47a1670a6baf2cded1fad535ee53a2f65da91c82ebf30945b" + } + ] + } + } + ``` + +* If the event is a V2 variant - it will be omitted so a 2.x event like: + ``` + { + "TransactionAccepted": { + "Version1": { + ... + } + } + } + ``` + will be omitted from the legacy SSE streams + +## Translating `TransactionExpired` event + +- If it's a Deploy variety it will be unpacked and sent. So a 2.x `TransactionExpired` event: + + ```json + { + "TransactionExpired": { + "transaction_hash": { + "Deploy": "565d7147e28be402c34208a133fd59fde7ac785ae5f0298cb5fb7adfb1b054a8" + } + } + } + ``` + + will be sent as a legacy `DeployExpired` event: + + ```json + { + "DeployExpired": { + "deploy_hash": "565d7147e28be402c34208a133fd59fde7ac785ae5f0298cb5fb7adfb1b054a8" + } + } + ``` + +* If it's a Version1 variant it will be omitted from legacy SSE streams. So a 2.x `TransactionExpired` event: + + ```json + { + "TransactionExpired": { + "Version1": { + "hash": "565d7147e28be402c34208a133fd59fde7ac785ae5f0298cb5fb7adfb1b054a8" + } + } + } + ``` + + will be omitted + +## Translating `TransactionProcessed` event. + +- If `transaction_hash` field is a `Version1`, the event will be ignored. +- If `transaction_hash` field is a `Deploy`, it's value will be used as `DeployProcessed.deploy_hash` + - If `initiator_addr` field is not a `PublicKey` type, the event will be omitted. + - If `initiator_addr` field is a `PublicKey` type, it's value will be used as `DeployProcessed.account` + - `timestamp`, `ttl`, `block_hash` will be filled from analogous fields in the `TransactionProcessed` event + - If `execution_result` is a `Version1` type, it's value will be copied as-is do the `DeployProcessed.execution_result` field. + - If `execution_result` is a `Version2` type please see [this paragraph](#translating-executionresultv2) + +### Translating `ExecutionResultV2`. + +- When translating `ExecutionResultV2` (later in this paragraph called `ex_v2`) to legacy `ExecutionResult` (later in this paragraph called `ex_v1`) the following rules apply: + - if `ex_v2.error_message` is not empty, the `ExecutionResult` will be of type `Failure` and `ex_v1.error_message` will be set to that value. Otherwise `ex_v1` will be of type `Success` + - `ex_v1.cost` will be set to `ex_v2.cost` + - `ex_v1.transfers` will always be an empty list since 2.x node doesn't use a notion of `TransferAddr` anymore + - `ex_v1.effect` will be populated based on `ex_v2.effects` field applying rules from paragraph [Translating Effects from V2](#translating-effects-from-v2) + +### Translating `Effects` from V2 + +- Output `operations` field will always be an empty list, since 2.x node no longer uses this concept for execution results +- For `transforms` the objects will be constructed based on `ex_v2.effects` with the following exceptions: + - V2 `AddKeys` transform will be translated to V1 `NamedKeys` transform. + - V2 `Write` transform will be translated applying rules from paragraph [Translating Write transform from V2](#translating-write-transform-from-v2). If translating at least one `Write` transform is not translatable (In the paragraph it will be denoted that it yields a `None` value) - the whole transform will be an empty array. + +### Translating `Write` transform from V2 + +- When translating `Write` transforms from V2 to V1 the following rules apply: + - For `CLValue`, it will be copied to output as `WriteCLValue` transform + - For `Account` it will be copied to output as `WriteAccount` transform, taking the v2 `account_hash` as value for `WriteAccount`. + - For `ContractWasm` a `WriteContractWasm` transform will be created. Please note that `WriteContractWasm` has no data, so details from V2 will be omitted. + - For `Contract` a `WriteContract` transform will be created. Please note that `WriteContract` has no data, so details from V2 will be omitted. + - For `Contract` a `WriteContractPackage` transform will be created. Please note that `WriteContractPackage` has no data, so details from V2 will be omitted. + - For `LegacyTransfer` a `WriteTransfer` transform will be created. Data will be copied. + - For `DeployInfo` a `WriteDeployInfo` transform will be created. Data will be copied. + - For `EraInfo` a `ErInfo` transform will be created. Data will be copied. + - For `Bid` a `WriteBid` transform will be created. Data will be copied. + - For `Withdraw` a `WriteWithdraw` transform will be created. Data will be copied. + - For `NamedKey` will be translated into a `AddKeys` transform. Data will be copied. + - For `AddressableEntity` no value will be produced (a `None` value will be yielded). + - For `BidKind` no value will be produced (a `None` value will be yielded). + - For `Package` no value will be produced (a `None` value will be yielded). + - For `ByteCode` no value will be produced (a `None` value will be yielded). + - For `MessageTopic` no value will be produced (a `None` value will be yielded). + - For `Message` no value will be produced (a `None` value will be yielded). diff --git a/README.md b/README.md index 088b13c0..2a011248 100644 --- a/README.md +++ b/README.md @@ -260,7 +260,7 @@ emulate_legacy_sse_apis = ["V1"] ``` * `sse_server.enable_server` - If set to true, the SSE server will be enabled. -* `sse_server.emulate_legacy_sse_apis` - A list of legacy Casper node SSE APIs to emulate. The Sidecar will expose SSE endpoints that are compatible with specified versions. Please bear in mind that this feature is an emulation and should be used only for transition periods. In most scenarios, having a 1-to-1 mapping of new messages into old formats is impossible, so this can be a process that loses some data and/or doesn't emit all messages that come from the Casper node. +* `sse_server.emulate_legacy_sse_apis` - A list of legacy Casper node SSE APIs to emulate. The Sidecar will expose SSE endpoints that are compatible with specified versions. Please bear in mind that this feature is an emulation and should be used only for transition periods. In most scenarios, having a 1-to-1 mapping of new messages into old formats is impossible, so this can be a process that loses some data and/or doesn't emit all messages that come from the Casper node. See the [Legacy SSE Emulation](./LEGACY_SSE_EMULATION.md) page for more details. #### Configuring SSE node connections @@ -335,9 +335,7 @@ This setting will expose three legacy SSE endpoints with the following events st * `/events/deploys` - DeployAccepted events * `/events/main` - All other legacy events, including BlockAdded, DeployProcessed, DeployExpired, Fault, Step, and Shutdown events - +See the [Legacy SSE Emulation](./LEGACY_SSE_EMULATION.md) page for more details. #### Configuring the event stream diff --git a/event_sidecar/src/event_stream_server.rs b/event_sidecar/src/event_stream_server.rs index 3e92ac3f..8efcab0b 100644 --- a/event_sidecar/src/event_stream_server.rs +++ b/event_sidecar/src/event_stream_server.rs @@ -50,12 +50,7 @@ use warp::Filter; /// that a new client can retrieve the entire set of buffered events if desired. const ADDITIONAL_PERCENT_FOR_BROADCAST_CHANNEL_SIZE: u32 = 20; -pub type OutboundSender = UnboundedSender<( - Option, - SseData, - Option, - Option, -)>; +pub type OutboundSender = UnboundedSender<(Option, SseData, Option)>; #[derive(Debug)] pub(crate) struct EventStreamServer { @@ -115,19 +110,14 @@ impl EventStreamServer { } /// Broadcasts the SSE data to all clients connected to the event stream. - pub(crate) fn broadcast( - &mut self, - sse_data: SseData, - inbound_filter: Option, - maybe_json_data: Option, - ) { + pub(crate) fn broadcast(&mut self, sse_data: SseData, inbound_filter: Option) { let event_index = match sse_data { SseData::ApiVersion(..) => None, _ => Some(self.event_indexer.next_index()), }; let _ = self .sse_data_sender - .send((event_index, sse_data, inbound_filter, maybe_json_data)); + .send((event_index, sse_data, inbound_filter)); } } diff --git a/event_sidecar/src/event_stream_server/http_server.rs b/event_sidecar/src/event_stream_server/http_server.rs index 4d964c25..b5f2e580 100644 --- a/event_sidecar/src/event_stream_server/http_server.rs +++ b/event_sidecar/src/event_stream_server/http_server.rs @@ -17,9 +17,8 @@ use tokio::{ }; use tracing::{error, info, trace}; use wheelbuf::WheelBuf; -pub type InboundData = (Option, SseData, Option, Option); -pub type OutboundReceiver = - mpsc::UnboundedReceiver<(Option, SseData, Option, Option)>; +pub type InboundData = (Option, SseData, Option); +pub type OutboundReceiver = mpsc::UnboundedReceiver<(Option, SseData, Option)>; /// Run the HTTP server. /// /// * `server_with_shutdown` is the actual server as a future which can be gracefully shut down. @@ -109,13 +108,12 @@ async fn handle_incoming_data( broadcaster: &broadcast::Sender, ) -> Result<(), ()> { match maybe_data { - Some((maybe_event_index, data, inbound_filter, maybe_json_data)) => { + Some((maybe_event_index, data, inbound_filter)) => { // Buffer the data and broadcast it to subscribed clients. trace!("Event stream server received {:?}", data); let event = ServerSentEvent { id: maybe_event_index, data: data.clone(), - json_data: maybe_json_data, inbound_filter, }; match data { diff --git a/event_sidecar/src/event_stream_server/sse_server.rs b/event_sidecar/src/event_stream_server/sse_server.rs index 9da7a2d4..5d5cc496 100644 --- a/event_sidecar/src/event_stream_server/sse_server.rs +++ b/event_sidecar/src/event_stream_server/sse_server.rs @@ -107,10 +107,6 @@ pub(super) struct ServerSentEvent { pub(super) id: Option, /// Payload of the event pub(super) data: SseData, - #[allow(dead_code)] - /// TODO remove this field in another PR. - /// Optional raw input for the edge-case scenario in which the output needs to receive exactly the same text as we got from inbound. - pub(super) json_data: Option, /// Information which endpoint we got the event from pub(super) inbound_filter: Option, } @@ -121,7 +117,6 @@ impl ServerSentEvent { ServerSentEvent { id: None, data: SseData::ApiVersion(client_api_version), - json_data: None, inbound_filter: None, } } @@ -129,7 +124,6 @@ impl ServerSentEvent { ServerSentEvent { id: None, data: SseData::SidecarVersion(version), - json_data: None, inbound_filter: None, } } @@ -672,20 +666,17 @@ mod tests { let api_version = ServerSentEvent { id: None, data: SseData::random_api_version(&mut rng), - json_data: None, inbound_filter: None, }; let block_added = ServerSentEvent { id: Some(rng.gen()), data: SseData::random_block_added(&mut rng), - json_data: None, inbound_filter: None, }; let (sse_data, transaction) = SseData::random_transaction_accepted(&mut rng); let transaction_accepted = ServerSentEvent { id: Some(rng.gen()), data: sse_data, - json_data: None, inbound_filter: None, }; let mut transactions = HashMap::new(); @@ -693,43 +684,36 @@ mod tests { let transaction_processed = ServerSentEvent { id: Some(rng.gen()), data: SseData::random_transaction_processed(&mut rng), - json_data: None, inbound_filter: None, }; let transaction_expired = ServerSentEvent { id: Some(rng.gen()), data: SseData::random_transaction_expired(&mut rng), - json_data: None, inbound_filter: None, }; let fault = ServerSentEvent { id: Some(rng.gen()), data: SseData::random_fault(&mut rng), - json_data: None, inbound_filter: None, }; let finality_signature = ServerSentEvent { id: Some(rng.gen()), data: SseData::random_finality_signature(&mut rng), - json_data: None, inbound_filter: None, }; let step = ServerSentEvent { id: Some(rng.gen()), data: SseData::random_step(&mut rng), - json_data: None, inbound_filter: None, }; let shutdown = ServerSentEvent { id: Some(rng.gen()), data: SseData::Shutdown, - json_data: None, inbound_filter: Some(SseFilter::Events), }; let sidecar_api_version = ServerSentEvent { id: Some(rng.gen()), data: SseData::random_sidecar_version(&mut rng), - json_data: None, inbound_filter: None, }; @@ -801,20 +785,17 @@ mod tests { let malformed_api_version = ServerSentEvent { id: Some(rng.gen()), data: SseData::random_api_version(&mut rng), - json_data: None, inbound_filter: None, }; let malformed_block_added = ServerSentEvent { id: None, data: SseData::random_block_added(&mut rng), - json_data: None, inbound_filter: None, }; let (sse_data, transaction) = SseData::random_transaction_accepted(&mut rng); let malformed_transaction_accepted = ServerSentEvent { id: None, data: sse_data, - json_data: None, inbound_filter: None, }; let mut transactions = HashMap::new(); @@ -822,37 +803,31 @@ mod tests { let malformed_transaction_processed = ServerSentEvent { id: None, data: SseData::random_transaction_processed(&mut rng), - json_data: None, inbound_filter: None, }; let malformed_transaction_expired = ServerSentEvent { id: None, data: SseData::random_transaction_expired(&mut rng), - json_data: None, inbound_filter: None, }; let malformed_fault = ServerSentEvent { id: None, data: SseData::random_fault(&mut rng), - json_data: None, inbound_filter: None, }; let malformed_finality_signature = ServerSentEvent { id: None, data: SseData::random_finality_signature(&mut rng), - json_data: None, inbound_filter: None, }; let malformed_step = ServerSentEvent { id: None, data: SseData::random_step(&mut rng), - json_data: None, inbound_filter: None, }; let malformed_shutdown = ServerSentEvent { id: None, data: SseData::Shutdown, - json_data: None, inbound_filter: None, }; @@ -876,7 +851,7 @@ mod tests { } #[allow(clippy::too_many_lines)] - async fn should_filter_duplicate_events(path_filter: &str) { + async fn should_filter_duplicate_events(path_filter: &str, is_legacy_endpoint: bool) { let mut rng = TestRng::new(); let mut transactions = HashMap::new(); @@ -972,19 +947,46 @@ mod tests { received_event_str = starts_with_data .replace_all(received_event_str.as_str(), "") .into_owned(); - let received_data = - serde_json::from_str::(received_event_str.as_str()).unwrap(); - let expected_data = serde_json::to_value(&expected_data).unwrap(); - assert_eq!(expected_data, received_data); + if is_legacy_endpoint { + let maybe_legacy = LegacySseData::from(&expected_data); + assert!(maybe_legacy.is_some()); + let input_legacy = maybe_legacy.unwrap(); + let got_legacy = + serde_json::from_str::(received_event_str.as_str()).unwrap(); + assert_eq!(got_legacy, input_legacy); + } else { + let received_data = + serde_json::from_str::(received_event_str.as_str()).unwrap(); + let expected_data = serde_json::to_value(&expected_data).unwrap(); + assert_eq!(expected_data, received_data); + } } } } + #[tokio::test] + async fn should_filter_duplicate_main_events() { + should_filter_duplicate_events(SSE_API_MAIN_PATH, true).await + } + /// This test checks that deploy-accepted events from the initial stream which are duplicated in + /// the ongoing stream are filtered out. + #[tokio::test] + async fn should_filter_duplicate_deploys_events() { + should_filter_duplicate_events(SSE_API_DEPLOYS_PATH, true).await + } + + /// This test checks that signature events from the initial stream which are duplicated in the + /// ongoing stream are filtered out. + #[tokio::test] + async fn should_filter_duplicate_signature_events() { + should_filter_duplicate_events(SSE_API_SIGNATURES_PATH, true).await + } + /// This test checks that main events from the initial stream which are duplicated in the /// ongoing stream are filtered out. #[tokio::test] async fn should_filter_duplicate_firehose_events() { - should_filter_duplicate_events(SSE_API_ROOT_PATH).await + should_filter_duplicate_events(SSE_API_ROOT_PATH, false).await } // Returns `count` random SSE events. The events will have sequential IDs starting from `start_id`, and if the path filter @@ -1000,9 +1002,9 @@ mod tests { (start_id..(start_id + count as u32)) .map(|id| { let data = match path_filter { - SSE_API_MAIN_PATH => SseData::random_block_added(rng), + SSE_API_MAIN_PATH => make_legacy_compliant_random_block(rng), SSE_API_DEPLOYS_PATH => { - let (event, transaction) = SseData::random_transaction_accepted(rng); + let (event, transaction) = make_legacy_compliant_random_transaction(rng); assert!(transactions .insert(transaction.hash(), transaction) .is_none()); @@ -1030,13 +1032,32 @@ mod tests { ServerSentEvent { id: Some(id), data, - json_data: None, inbound_filter: None, } }) .collect() } + fn make_legacy_compliant_random_transaction(rng: &mut TestRng) -> (SseData, Transaction) { + loop { + let (event, transaction) = SseData::random_transaction_accepted(rng); + let legacy = LegacySseData::from(&event); + if legacy.is_some() { + return (event, transaction); + } + } + } + + fn make_legacy_compliant_random_block(rng: &mut TestRng) -> SseData { + loop { + let block = SseData::random_block_added(rng); + let legacy = LegacySseData::from(&block); + if legacy.is_some() { + return block; + } + } + } + // Returns `NUM_ONGOING_EVENTS` random SSE events for the ongoing stream containing // duplicates taken from the end of the initial stream. Allows for the full initial stream // to be duplicated except for its first event (the `ApiVersion` one) which has no ID. diff --git a/event_sidecar/src/event_stream_server/tests.rs b/event_sidecar/src/event_stream_server/tests.rs index 7485354b..2e248975 100644 --- a/event_sidecar/src/event_stream_server/tests.rs +++ b/event_sidecar/src/event_stream_server/tests.rs @@ -1,11 +1,16 @@ use super::*; +use casper_event_types::legacy_sse_data::LegacySseData; use casper_types::{testing::TestRng, ProtocolVersion}; use futures::{join, Stream, StreamExt}; use http::StatusCode; use pretty_assertions::assert_eq; use reqwest::Response; use serde_json::Value; -use sse_server::{Id, TransactionAccepted, QUERY_FIELD, SSE_API_ROOT_PATH as ROOT_PATH}; +use sse_server::{ + Id, TransactionAccepted, QUERY_FIELD, SSE_API_DEPLOYS_PATH as DEPLOYS_PATH, + SSE_API_MAIN_PATH as MAIN_PATH, SSE_API_ROOT_PATH as ROOT_PATH, + SSE_API_SIGNATURES_PATH as SIGS_PATH, +}; use std::{ collections::HashMap, error::Error, @@ -190,7 +195,7 @@ impl Drop for ServerStopper { struct TestFixture { storage_dir: TempDir, protocol_version: ProtocolVersion, - events: Vec<(SseData, Option)>, + events: Vec, first_event_id: Id, server_join_handle: Option>, server_stopper: ServerStopper, @@ -206,7 +211,7 @@ impl TestFixture { let protocol_version = ProtocolVersion::from_parts(1, 2, 3); let mut transactions = HashMap::new(); - let events: Vec<(SseData, Option)> = (0..EVENT_COUNT) + let events: Vec = (0..EVENT_COUNT) .map(|i| match i % DISTINCT_EVENTS_COUNT { 0 => SseData::random_block_added(rng), 1 => { @@ -223,7 +228,6 @@ impl TestFixture { 6 => SseData::random_finality_signature(rng), _ => unreachable!(), }) - .map(|x| (x, None)) .collect(); TestFixture { storage_dir, @@ -284,10 +288,8 @@ impl TestFixture { }; let api_version_event = SseData::ApiVersion(protocol_version); - server.broadcast(api_version_event.clone(), Some(SseFilter::Events), None); - for (id, (event, maybe_json_data)) in - events.iter().cycle().enumerate().take(event_count as usize) - { + server.broadcast(api_version_event.clone(), Some(SseFilter::Events)); + for (id, event) in events.iter().cycle().enumerate().take(event_count as usize) { if server_stopper.should_stop() { debug!("stopping server early"); return; @@ -295,13 +297,7 @@ impl TestFixture { server_behavior .wait_for_clients((id as Id).wrapping_add(first_event_id)) .await; - server.broadcast( - event.clone(), - Some(SseFilter::Events), - maybe_json_data - .as_ref() - .map(|el| serde_json::from_str(el.as_str()).unwrap()), - ); + server.broadcast(event.clone(), Some(SseFilter::Events)); server_behavior.sleep_if_required().await; } @@ -361,12 +357,12 @@ impl TestFixture { .chain( self.events .iter() - .filter(|(event, _)| !matches!(event, SseData::ApiVersion(..))) + .filter(|event| !matches!(event, SseData::ApiVersion(..))) .enumerate() .filter_map(|(id, event)| { let id = id as u128 + self.first_event_id as u128; - if event.0.should_include(filter) { - id_filter(id, &event.0) + if event.should_include(filter) { + id_filter(id, event) } else { None } @@ -661,7 +657,7 @@ fn parse_response(response_text: String, client_id: &str) -> Vec /// * connected before first event /// /// Expected to receive all main, transaction-accepted or signature events depending on `filter`. -async fn should_serve_events_with_no_query(path: &str) { +async fn should_serve_events_with_no_query(path: &str, is_legacy_endpoint: bool) { let mut rng = TestRng::new(); let mut fixture = TestFixture::new(&mut rng); @@ -671,15 +667,83 @@ async fn should_serve_events_with_no_query(path: &str) { let url = url(server_address, path, None); let (expected_events, final_id) = fixture.all_filtered_events(path); + let (expected_events, final_id) = + adjust_final_id(is_legacy_endpoint, expected_events, final_id); let received_events = subscribe(&url, barrier, final_id, "client").await.unwrap(); fixture.stop_server().await; + compare_received_events_for_legacy_endpoints( + is_legacy_endpoint, + expected_events, + received_events, + ); +} - assert_eq!(received_events, expected_events); +/// In legacy endpoints not all input events will be re-emitted to output. If an input (2.x) event is not translatable +/// to 1.x it will be muffled. So we need to adjust the final id to the last event that was 1.x translatable. +fn adjust_final_id( + is_legacy_endpoint: bool, + expected_events: Vec, + final_id: u32, +) -> (Vec, u32) { + let (expected_events, final_id) = if is_legacy_endpoint { + let legacy_compliant_events: Vec = expected_events + .iter() + .filter_map(|event| { + let sse_data = serde_json::from_str::(&event.data).unwrap(); + LegacySseData::from(&sse_data).map(|_| event.clone()) + }) + .collect(); + let id = legacy_compliant_events.last().and_then(|el| el.id).unwrap(); + (legacy_compliant_events, id) + } else { + (expected_events, final_id) + }; + (expected_events, final_id) +} + +/// In legacy endpoints the node produces 2.x compliant sse events, but the node transforms them into legacy format. +/// So to compare we need to apply the translation logic to input 2.x events. +fn compare_received_events_for_legacy_endpoints( + is_legacy_endpoint: bool, + expected_events: Vec, + received_events: Vec, +) { + if is_legacy_endpoint { + let expected_legacy_events: Vec = expected_events + .iter() + .filter_map(|event| { + let sse_data = serde_json::from_str::(&event.data).unwrap(); + LegacySseData::from(&sse_data) + }) + .collect(); + let received_legacy_events: Vec = received_events + .iter() + .map(|event| serde_json::from_str::(&event.data).unwrap()) + .collect(); + assert_eq!(received_legacy_events, expected_legacy_events); + } else { + assert_eq!(received_events, expected_events); + } +} + +#[tokio::test(flavor = "multi_thread", worker_threads = 2)] +async fn should_serve_main_events_with_no_query() { + should_serve_events_with_no_query(MAIN_PATH, true).await; +} + +#[tokio::test(flavor = "multi_thread", worker_threads = 2)] +async fn should_serve_deploy_accepted_events_with_no_query() { + should_serve_events_with_no_query(DEPLOYS_PATH, true).await; +} + +#[tokio::test(flavor = "multi_thread", worker_threads = 2)] +async fn should_serve_signature_events_with_no_query() { + should_serve_events_with_no_query(SIGS_PATH, true).await; } #[tokio::test(flavor = "multi_thread", worker_threads = 2)] async fn should_serve_firehose_events_with_no_query() { - should_serve_events_with_no_query(ROOT_PATH).await; + should_serve_events_with_no_query(ROOT_PATH, false).await; } /// Client setup: @@ -688,7 +752,7 @@ async fn should_serve_firehose_events_with_no_query() { /// /// Expected to receive main, transaction-accepted or signature events (depending on `path`) from ID 25 /// onwards, as events 25 to 49 should still be in the server buffer. -async fn should_serve_events_with_query(path: &str) { +async fn should_serve_events_with_query(path: &str, is_legacy_endpoint: bool) { let mut rng = TestRng::new(); let mut fixture = TestFixture::new(&mut rng); @@ -701,15 +765,36 @@ async fn should_serve_events_with_query(path: &str) { let url = url(server_address, path, Some(start_from_event_id)); let (expected_events, final_id) = fixture.filtered_events(path, start_from_event_id); + let (expected_events, final_id) = + adjust_final_id(is_legacy_endpoint, expected_events, final_id); let received_events = subscribe(&url, barrier, final_id, "client").await.unwrap(); fixture.stop_server().await; - assert_eq!(received_events, expected_events); + compare_received_events_for_legacy_endpoints( + is_legacy_endpoint, + expected_events, + received_events, + ); +} + +#[tokio::test(flavor = "multi_thread", worker_threads = 2)] +async fn should_serve_main_events_with_query() { + should_serve_events_with_query(MAIN_PATH, true).await; +} + +#[tokio::test(flavor = "multi_thread", worker_threads = 2)] +async fn should_serve_deploy_accepted_events_with_query() { + should_serve_events_with_query(DEPLOYS_PATH, true).await; +} + +#[tokio::test(flavor = "multi_thread", worker_threads = 2)] +async fn should_serve_signature_events_with_query() { + should_serve_events_with_query(SIGS_PATH, true).await; } #[tokio::test(flavor = "multi_thread", worker_threads = 2)] async fn should_serve_firehose_events_with_query() { - should_serve_events_with_query(ROOT_PATH).await; + should_serve_events_with_query(ROOT_PATH, false).await; } /// Client setup: @@ -718,7 +803,7 @@ async fn should_serve_firehose_events_with_query() { /// /// Expected to receive main, transaction-accepted or signature events (depending on `path`) from ID 25 /// onwards, as events 0 to 24 should have been purged from the server buffer. -async fn should_serve_remaining_events_with_query(path: &str) { +async fn should_serve_remaining_events_with_query(path: &str, is_legacy_endpoint: bool) { let mut rng = TestRng::new(); let mut fixture = TestFixture::new(&mut rng); @@ -732,15 +817,36 @@ async fn should_serve_remaining_events_with_query(path: &str) { let url = url(server_address, path, Some(start_from_event_id)); let expected_first_event = connect_at_event_id - BUFFER_LENGTH; let (expected_events, final_id) = fixture.filtered_events(path, expected_first_event); + let (expected_events, final_id) = + adjust_final_id(is_legacy_endpoint, expected_events, final_id); let received_events = subscribe(&url, barrier, final_id, "client").await.unwrap(); fixture.stop_server().await; - assert_eq!(received_events, expected_events); + compare_received_events_for_legacy_endpoints( + is_legacy_endpoint, + expected_events, + received_events, + ); +} + +#[tokio::test(flavor = "multi_thread", worker_threads = 2)] +async fn should_serve_remaining_main_events_with_query() { + should_serve_remaining_events_with_query(MAIN_PATH, true).await; +} + +#[tokio::test(flavor = "multi_thread", worker_threads = 2)] +async fn should_serve_remaining_deploy_accepted_events_with_query() { + should_serve_remaining_events_with_query(DEPLOYS_PATH, true).await; +} + +#[tokio::test(flavor = "multi_thread", worker_threads = 2)] +async fn should_serve_remaining_signature_events_with_query() { + should_serve_remaining_events_with_query(SIGS_PATH, true).await; } #[tokio::test(flavor = "multi_thread", worker_threads = 2)] async fn should_serve_remaining_firehose_events_with_query() { - should_serve_remaining_events_with_query(ROOT_PATH).await; + should_serve_remaining_events_with_query(ROOT_PATH, false).await; } /// Client setup: @@ -749,7 +855,7 @@ async fn should_serve_remaining_firehose_events_with_query() { /// /// Expected to receive all main, transaction-accepted or signature events (depending on `path`), as /// event 25 hasn't been added to the server buffer yet. -async fn should_serve_events_with_query_for_future_event(path: &str) { +async fn should_serve_events_with_query_for_future_event(path: &str, is_legacy_endpoint: bool) { let mut rng = TestRng::new(); let mut fixture = TestFixture::new(&mut rng); @@ -759,15 +865,36 @@ async fn should_serve_events_with_query_for_future_event(path: &str) { let url = url(server_address, path, Some(25)); let (expected_events, final_id) = fixture.all_filtered_events(path); + let (expected_events, final_id) = + adjust_final_id(is_legacy_endpoint, expected_events, final_id); let received_events = subscribe(&url, barrier, final_id, "client").await.unwrap(); fixture.stop_server().await; - assert_eq!(received_events, expected_events); + compare_received_events_for_legacy_endpoints( + is_legacy_endpoint, + expected_events, + received_events, + ); +} + +#[tokio::test(flavor = "multi_thread", worker_threads = 2)] +async fn should_serve_main_events_with_query_for_future_event() { + should_serve_events_with_query_for_future_event(MAIN_PATH, true).await; +} + +#[tokio::test(flavor = "multi_thread", worker_threads = 2)] +async fn should_serve_deploy_accepted_events_with_query_for_future_event() { + should_serve_events_with_query_for_future_event(DEPLOYS_PATH, true).await; +} + +#[tokio::test(flavor = "multi_thread", worker_threads = 2)] +async fn should_serve_signature_events_with_query_for_future_event() { + should_serve_events_with_query_for_future_event(SIGS_PATH, true).await; } #[tokio::test(flavor = "multi_thread", worker_threads = 2)] async fn should_serve_firehose_events_with_query_for_future_event() { - should_serve_events_with_query_for_future_event(ROOT_PATH).await; + should_serve_events_with_query_for_future_event(ROOT_PATH, false).await; } /// Checks that when a server is shut down (e.g. for a node upgrade), connected clients don't have @@ -917,8 +1044,9 @@ async fn should_handle_bad_url_query() { fixture.stop_server().await; } +#[allow(clippy::too_many_lines)] /// Check that a server which restarts continues from the previous numbering of event IDs. -async fn should_persist_event_ids(path: &str) { +async fn should_persist_event_ids(path: &str, is_legacy_endpoint: bool) { let mut rng = TestRng::new(); let mut fixture = TestFixture::new(&mut rng); @@ -930,7 +1058,9 @@ async fn should_persist_event_ids(path: &str) { // Consume these and stop the server. let url = url(server_address, path, None); - let (_expected_events, final_id) = fixture.all_filtered_events(path); + let (expected_events, final_id) = fixture.all_filtered_events(path); + let (_expected_events, final_id) = + adjust_final_id(is_legacy_endpoint, expected_events, final_id); let _ = subscribe(&url, barrier, final_id, "client 1") .await .unwrap(); @@ -939,7 +1069,6 @@ async fn should_persist_event_ids(path: &str) { }; assert!(first_run_final_id > 0); - { // Start a new server with a client barrier set for just before event ID 100 + 1 (the extra // event being the `Shutdown`). @@ -954,22 +1083,37 @@ async fn should_persist_event_ids(path: &str) { // Consume the events and assert their IDs are all >= `first_run_final_id`. let url = url(server_address, path, None); let (expected_events, final_id) = fixture.filtered_events(path, EVENT_COUNT + 1); + let (expected_events, final_id) = + adjust_final_id(is_legacy_endpoint, expected_events, final_id); let received_events = subscribe(&url, barrier, final_id, "client 2") .await .unwrap(); fixture.stop_server().await; - - assert_eq!(received_events, expected_events); assert!(received_events .iter() .skip(1) .all(|event| event.id.unwrap() >= first_run_final_id)); + compare_received_events_for_legacy_endpoints( + is_legacy_endpoint, + expected_events, + received_events, + ); } } +#[tokio::test(flavor = "multi_thread", worker_threads = 2)] +async fn should_persist_deploy_accepted_event_ids() { + should_persist_event_ids(DEPLOYS_PATH, true).await; +} + +#[tokio::test(flavor = "multi_thread", worker_threads = 2)] +async fn should_persist_signature_event_ids() { + should_persist_event_ids(SIGS_PATH, true).await; +} + #[tokio::test(flavor = "multi_thread", worker_threads = 2)] async fn should_persist_firehose_event_ids() { - should_persist_event_ids(ROOT_PATH).await; + should_persist_event_ids(ROOT_PATH, false).await; } /// Check that a server handles wrapping round past the maximum value for event IDs. diff --git a/event_sidecar/src/lib.rs b/event_sidecar/src/lib.rs index 7be0221f..b623833f 100644 --- a/event_sidecar/src/lib.rs +++ b/event_sidecar/src/lib.rs @@ -104,7 +104,7 @@ pub async fn run( fn start_event_broadcasting( config: &SseEventServerConfig, storage_path: String, - mut outbound_sse_data_receiver: Receiver<(SseData, Option, Option)>, + mut outbound_sse_data_receiver: Receiver<(SseData, Option)>, enable_legacy_filters: bool, ) -> JoinHandle> { let event_stream_server_port = config.event_stream_server.port; @@ -122,10 +122,8 @@ fn start_event_broadcasting( enable_legacy_filters, ) .context("Error starting EventStreamServer")?; - while let Some((sse_data, inbound_filter, maybe_json_data)) = - outbound_sse_data_receiver.recv().await - { - event_stream_server.broadcast(sse_data, inbound_filter, maybe_json_data); + while let Some((sse_data, inbound_filter)) = outbound_sse_data_receiver.recv().await { + event_stream_server.broadcast(sse_data, inbound_filter); } Err::<(), Error>(Error::msg("Event broadcasting finished")) }) @@ -136,7 +134,7 @@ fn start_sse_processors( event_listeners: Vec, sse_data_receivers: Vec>, database: Database, - outbound_sse_data_sender: Sender<(SseData, Option, Option)>, + outbound_sse_data_sender: Sender<(SseData, Option)>, ) -> JoinHandle> { tokio::spawn(async move { let mut join_handles = Vec::with_capacity(event_listeners.len()); @@ -167,7 +165,7 @@ fn start_sse_processors( let _ = join_all(join_handles).await; //Send Shutdown to the sidecar sse endpoint let _ = outbound_sse_data_sender - .send((SseData::Shutdown, None, None)) + .send((SseData::Shutdown, None)) .await; // Below sleep is a workaround to allow the above Shutdown to propagate. // If we don't do this there is a race condition between handling of the message and dropping of the outbound server @@ -183,7 +181,7 @@ fn start_sse_processors( fn spawn_sse_processor( database: &Database, sse_data_receiver: Receiver, - outbound_sse_data_sender: &Sender<(SseData, Option, Option)>, + outbound_sse_data_sender: &Sender<(SseData, Option)>, connection_config: Connection, api_version_manager: &std::sync::Arc>, ) -> JoinHandle> { @@ -290,9 +288,8 @@ async fn handle_database_save_result( entity_name: &str, entity_identifier: &str, res: Result, - outbound_sse_data_sender: &Sender<(SseData, Option, Option)>, + outbound_sse_data_sender: &Sender<(SseData, Option)>, inbound_filter: Filter, - json_data: Option, build_sse_data: F, ) where F: FnOnce() -> SseData, @@ -300,7 +297,7 @@ async fn handle_database_save_result( match res { Ok(_) => { if let Err(error) = outbound_sse_data_sender - .send((build_sse_data(), Some(inbound_filter), json_data)) + .send((build_sse_data(), Some(inbound_filter))) .await { debug!( @@ -331,7 +328,7 @@ async fn handle_single_event, Option)>, + outbound_sse_data_sender: Sender<(SseData, Option)>, api_version_manager: GuardedApiVersionManager, ) { match sse_event.data { @@ -369,7 +366,6 @@ async fn handle_single_event( sse_event: SseEvent, sqlite_database: Db, - outbound_sse_data_sender: Sender<(SseData, Option, Option)>, + outbound_sse_data_sender: Sender<(SseData, Option)>, ) { warn!("Node ({}) is unavailable", sse_event.source.to_string()); let res = sqlite_database @@ -601,11 +591,7 @@ async fn handle_shutdown>, version: ProtocolVersion, - outbound_sse_data_sender: &Sender<(SseData, Option, Option)>, + outbound_sse_data_sender: &Sender<(SseData, Option)>, filter: Filter, enable_event_logging: bool, ) { @@ -632,7 +618,7 @@ async fn handle_api_version( let changed_newest_version = manager_guard.store_version(version); if changed_newest_version { if let Err(error) = outbound_sse_data_sender - .send((SseData::ApiVersion(version), Some(filter), None)) + .send((SseData::ApiVersion(version), Some(filter))) .await { debug!( @@ -649,7 +635,7 @@ async fn handle_api_version( async fn sse_processor( inbound_sse_data_receiver: Receiver, - outbound_sse_data_sender: Sender<(SseData, Option, Option)>, + outbound_sse_data_sender: Sender<(SseData, Option)>, database: Db, database_supports_multithreaded_processing: bool, enable_event_logging: bool, @@ -687,7 +673,7 @@ async fn sse_processor( mut queue_rx: Receiver, database: Db, - outbound_sse_data_sender: Sender<(SseData, Option, Option)>, + outbound_sse_data_sender: Sender<(SseData, Option)>, api_version_manager: GuardedApiVersionManager, enable_event_logging: bool, #[cfg(feature = "additional-metrics")] metrics_sender: Sender<()>, @@ -718,7 +704,7 @@ async fn start_multi_threaded_events_consumer< Db: DatabaseReader + DatabaseWriter + Clone + Send + Sync + 'static, >( mut inbound_sse_data_receiver: Receiver, - outbound_sse_data_sender: Sender<(SseData, Option, Option)>, + outbound_sse_data_sender: Sender<(SseData, Option)>, database: Db, enable_event_logging: bool, api_version_manager: GuardedApiVersionManager, @@ -756,7 +742,7 @@ async fn start_single_threaded_events_consumer< Db: DatabaseReader + DatabaseWriter + Clone + Send + Sync, >( mut inbound_sse_data_receiver: Receiver, - outbound_sse_data_sender: Sender<(SseData, Option, Option)>, + outbound_sse_data_sender: Sender<(SseData, Option)>, database: Db, enable_event_logging: bool, api_version_manager: GuardedApiVersionManager, diff --git a/event_sidecar/src/testing/fake_event_stream.rs b/event_sidecar/src/testing/fake_event_stream.rs index 28c4c3f3..f3a303ef 100644 --- a/event_sidecar/src/testing/fake_event_stream.rs +++ b/event_sidecar/src/testing/fake_event_stream.rs @@ -194,7 +194,7 @@ async fn do_spam_testing( let broadcasting_task = tokio::spawn(async move { while let Some(event) = events_receiver.recv().await { - event_stream_server.broadcast(event, Some(SseFilter::Events), None); + event_stream_server.broadcast(event, Some(SseFilter::Events)); } }); @@ -237,7 +237,7 @@ async fn do_load_testing_transaction( let broadcasting_task = tokio::spawn(async move { while let Some(event) = events_receiver.recv().await { - event_stream_server.broadcast(event, Some(SseFilter::Events), None); + event_stream_server.broadcast(event, Some(SseFilter::Events)); } }); @@ -279,7 +279,7 @@ async fn do_load_testing_step( }); let broadcasting_task = tokio::spawn(async move { while let Some(event) = events_receiver.recv().await { - event_stream_server.broadcast(event, Some(SseFilter::Events), None); + event_stream_server.broadcast(event, Some(SseFilter::Events)); } }); let (test_rng, _) = tokio::join!(scenario_task, broadcasting_task); @@ -314,7 +314,7 @@ async fn handle_realistic_scenario( }); let broadcasting_task = tokio::spawn(async move { while let Some(event) = events_receiver.recv().await { - event_stream_server.broadcast(event, Some(SseFilter::Events), None); + event_stream_server.broadcast(event, Some(SseFilter::Events)); } }); let (test_rng, _) = tokio::join!(scenario_task, broadcasting_task); diff --git a/event_sidecar/src/testing/raw_sse_events_utils.rs b/event_sidecar/src/testing/raw_sse_events_utils.rs index fa02e656..20b8fc5f 100644 --- a/event_sidecar/src/testing/raw_sse_events_utils.rs +++ b/event_sidecar/src/testing/raw_sse_events_utils.rs @@ -15,7 +15,7 @@ pub(crate) mod tests { (None, "{\"ApiVersion\":\"2.0.1\"}".to_string()), ( Some("0".to_string()), - example_block_added_2_0_0(BLOCK_HASH_3, "3"), + example_block_added_2_0_0(BLOCK_HASH_3, 3u64), ), ] } @@ -26,7 +26,7 @@ pub(crate) mod tests { (Some("0".to_string()), shutdown()), ( Some("1".to_string()), - example_block_added_2_0_0(BLOCK_HASH_1, "1"), + example_block_added_2_0_0(BLOCK_HASH_1, 1u64), ), ] } @@ -50,7 +50,7 @@ pub(crate) mod tests { (None, format!("{{\"ApiVersion\":\"{version}\"}}")), ( Some("1".to_string()), - example_block_added_2_0_0(BLOCK_HASH_2, "2"), + example_block_added_2_0_0(BLOCK_HASH_2, 2u64), ), ] } @@ -60,7 +60,7 @@ pub(crate) mod tests { (None, "{\"ApiVersion\":\"2.0.0\"}".to_string()), ( Some("1".to_string()), - example_block_added_2_0_0(BLOCK_HASH_2, "2"), + example_block_added_2_0_0(BLOCK_HASH_2, 2u64), ), ] } @@ -80,7 +80,7 @@ pub(crate) mod tests { (None, "{\"ApiVersion\":\"2.0.0\"}".to_string()), ( Some("3".to_string()), - example_block_added_2_0_0(BLOCK_HASH_3, "3"), + example_block_added_2_0_0(BLOCK_HASH_3, 3u64), ), ] } @@ -90,11 +90,11 @@ pub(crate) mod tests { (None, "{\"ApiVersion\":\"2.0.0\"}".to_string()), ( Some("1".to_string()), - example_block_added_2_0_0(BLOCK_HASH_3, "3"), + example_block_added_2_0_0(BLOCK_HASH_3, 3u64), ), ( Some("1".to_string()), - example_block_added_2_0_0(BLOCK_HASH_4, "4"), + example_block_added_2_0_0(BLOCK_HASH_4, 4u64), ), ] } @@ -135,7 +135,7 @@ pub(crate) mod tests { if let SseData::BlockAdded { block_hash, .. } = block_added { let encoded_hash = HexFmt(block_hash.inner()).to_string(); let block_added_raw = - example_block_added_2_0_0(encoded_hash.as_str(), index.as_str()); + example_block_added_2_0_0(encoded_hash.as_str(), (i + start_index) as u64); blocks_added.push((Some(index), block_added_raw)); } else { panic!("random_block_added didn't return SseData::BlockAdded"); diff --git a/event_sidecar/src/tests/integration_tests.rs b/event_sidecar/src/tests/integration_tests.rs index 53255438..b9f8e64d 100644 --- a/event_sidecar/src/tests/integration_tests.rs +++ b/event_sidecar/src/tests/integration_tests.rs @@ -495,7 +495,7 @@ async fn sidecar_should_use_start_from_if_database_is_empty() { ) = build_test_config(); let data_of_node = vec![( Some("2".to_string()), - example_block_added_2_0_0(BLOCK_HASH_3, "3"), + example_block_added_2_0_0(BLOCK_HASH_3, 3u64), )]; let mut node_mock = MockNodeBuilder { version: "2.0.0".to_string(), diff --git a/listener/src/connection_manager.rs b/listener/src/connection_manager.rs index 4da20ad0..8b63a2cd 100644 --- a/listener/src/connection_manager.rs +++ b/listener/src/connection_manager.rs @@ -236,12 +236,8 @@ impl DefaultConnectionManager { error!(error_message); return Err(Error::msg(error_message)); } - Ok((sse_data, needs_raw_json)) => { + Ok(sse_data) => { let payload_size = event.data.len(); - let mut raw_json_data = None; - if needs_raw_json { - raw_json_data = Some(event.data); - } self.observe_bytes(sse_data.type_label(), payload_size); let api_version = self.api_version.ok_or(anyhow!( "Expected ApiVersion to be present when handling messages." @@ -250,7 +246,6 @@ impl DefaultConnectionManager { event.id.parse().unwrap_or(0), sse_data, self.bind_address.clone(), - raw_json_data, self.filter.clone(), api_version.to_string(), self.network_name.clone(), @@ -293,7 +288,7 @@ impl DefaultConnectionManager { match deserialize(&event.data) { //at this point we // are assuming that it's an ApiVersion and ApiVersion is the same across all semvers - Ok((SseData::ApiVersion(semver), _)) => { + Ok(SseData::ApiVersion(semver)) => { let payload_size = event.data.len(); self.observe_bytes("ApiVersion", payload_size); self.api_version = Some(semver); @@ -301,7 +296,6 @@ impl DefaultConnectionManager { 0, SseData::ApiVersion(semver), self.bind_address.clone(), - None, self.filter.clone(), semver.to_string(), self.network_name.clone(), @@ -413,8 +407,8 @@ pub mod tests { #[tokio::test] async fn given_data_without_api_version_should_fail() { let data = vec![ - example_block_added_2_0_0(BLOCK_HASH_1, "1"), - example_block_added_2_0_0(BLOCK_HASH_2, "2"), + example_block_added_2_0_0(BLOCK_HASH_1, 1u64), + example_block_added_2_0_0(BLOCK_HASH_2, 2u64), ]; let connector = Box::new(MockSseConnection::build_with_data(data)); let (mut connection_manager, _, _) = build_manager(connector, "test".to_string()); @@ -432,8 +426,8 @@ pub mod tests { async fn given_data_should_pass_data() { let data = vec![ example_api_version(), - example_block_added_2_0_0(BLOCK_HASH_1, "1"), - example_block_added_2_0_0(BLOCK_HASH_2, "2"), + example_block_added_2_0_0(BLOCK_HASH_1, 1u64), + example_block_added_2_0_0(BLOCK_HASH_2, 2u64), ]; let connector = Box::new(MockSseConnection::build_with_data(data)); let (mut connection_manager, data_tx, event_ids) = @@ -452,7 +446,7 @@ pub mod tests { let data = vec![ example_api_version(), "XYZ".to_string(), - example_block_added_2_0_0(BLOCK_HASH_2, "2"), + example_block_added_2_0_0(BLOCK_HASH_2, 2u64), ]; let connector = Box::new(MockSseConnection::build_with_data(data)); let (mut connection_manager, data_tx, _event_ids) = diff --git a/listener/src/types.rs b/listener/src/types.rs index db1b361a..85609f3f 100644 --- a/listener/src/types.rs +++ b/listener/src/types.rs @@ -32,9 +32,6 @@ pub struct SseEvent { pub data: SseData, /// Source from which we got the message pub source: Url, - /// In some cases it is required to emit the data exactly as we got it from the node. - /// For those situations we store the exact text of the raw payload in this field. - pub json_data: Option, /// Info from which filter we received the message. For some events (Shutdown in particularly) we want to push only to the same outbound as we received them from so we don't duplicate. pub inbound_filter: Filter, /// Api version which was reported for the node from which the event was received. @@ -48,7 +45,6 @@ impl SseEvent { id: u32, data: SseData, mut source: Url, - json_data: Option, inbound_filter: Filter, api_version: String, network_name: String, @@ -60,7 +56,6 @@ impl SseEvent { id, data, source, - json_data, inbound_filter, api_version, network_name, diff --git a/resources/test/rpc_schema.json b/resources/test/rpc_schema.json index 326deb2e..29e6c91b 100644 --- a/resources/test/rpc_schema.json +++ b/resources/test/rpc_schema.json @@ -369,7 +369,7 @@ ] }, "execution_info": { - "block_hash": "6a2dad7a71608f78e9b6b5f97eed60a374e75e70cb8cc925e6681c61c84165bd", + "block_hash": "40fa940e609972313a6d598712fcb9cced789ed237bdac67aa1fe546e624c884", "block_height": 10, "execution_result": { "Version2": { @@ -567,7 +567,7 @@ } }, "execution_info": { - "block_hash": "6a2dad7a71608f78e9b6b5f97eed60a374e75e70cb8cc925e6681c61c84165bd", + "block_hash": "40fa940e609972313a6d598712fcb9cced789ed237bdac67aa1fe546e624c884", "block_height": 10, "execution_result": { "Version2": { @@ -800,28 +800,48 @@ "api_version": "2.0.0", "entity": { "AddressableEntity": { - "protocol_version": "2.0.0", - "entity_kind": { - "Account": "account-hash-e94daaff79c2ab8d9c31d9c3058d7d0a0dd31204a5638dc1451fa67b2e3fb88c" + "entity": { + "protocol_version": "2.0.0", + "entity_kind": { + "Account": "account-hash-e94daaff79c2ab8d9c31d9c3058d7d0a0dd31204a5638dc1451fa67b2e3fb88c" + }, + "package_hash": "package-0000000000000000000000000000000000000000000000000000000000000000", + "byte_code_hash": "byte-code-0000000000000000000000000000000000000000000000000000000000000000", + "main_purse": "uref-09480c3248ef76b603d386f3f4f8a5f87f597d4eaffd475433f861af187ab5db-007", + "associated_keys": [ + { + "account_hash": "account-hash-e94daaff79c2ab8d9c31d9c3058d7d0a0dd31204a5638dc1451fa67b2e3fb88c", + "weight": 1 + } + ], + "action_thresholds": { + "deployment": 1, + "upgrade_management": 1, + "key_management": 1 + }, + "message_topics": [ + { + "topic_name": "topic", + "topic_name_hash": "0000000000000000000000000000000000000000000000000000000000000000" + } + ] }, - "package_hash": "package-0000000000000000000000000000000000000000000000000000000000000000", - "byte_code_hash": "byte-code-0000000000000000000000000000000000000000000000000000000000000000", - "main_purse": "uref-09480c3248ef76b603d386f3f4f8a5f87f597d4eaffd475433f861af187ab5db-007", - "associated_keys": [ + "named_keys": [ { - "account_hash": "account-hash-e94daaff79c2ab8d9c31d9c3058d7d0a0dd31204a5638dc1451fa67b2e3fb88c", - "weight": 1 + "name": "key", + "key": "hash-0000000000000000000000000000000000000000000000000000000000000000" } ], - "action_thresholds": { - "deployment": 1, - "upgrade_management": 1, - "key_management": 1 - }, - "message_topics": [ + "entry_points": [ { - "topic_name": "topic", - "topic_name_hash": "0000000000000000000000000000000000000000000000000000000000000000" + "V1CasperVm": { + "name": "entry_point", + "args": [], + "ret": "Unit", + "access": "Public", + "entry_point_type": "Caller", + "entry_point_payment": "Caller" + } } ] } @@ -1006,7 +1026,7 @@ { "name": "state_identifier", "value": { - "BlockHash": "6a2dad7a71608f78e9b6b5f97eed60a374e75e70cb8cc925e6681c61c84165bd" + "BlockHash": "40fa940e609972313a6d598712fcb9cced789ed237bdac67aa1fe546e624c884" } }, { @@ -1057,7 +1077,9 @@ "era_id": 1, "height": 10, "protocol_version": "1.0.0", - "current_gas_price": 1 + "proposer": "01d9bf2148748a85c89da5aad8ee0b0fc2d105fd39d41a4c796536354f0ae2900c", + "current_gas_price": 1, + "last_switch_block_hash": "0909090909090909090909090909090909090909090909090909090909090909" } }, "stored_value": { @@ -1451,7 +1473,7 @@ "chainspec_name": "casper-example", "starting_state_root_hash": "0000000000000000000000000000000000000000000000000000000000000000", "last_added_block_info": { - "hash": "6a2dad7a71608f78e9b6b5f97eed60a374e75e70cb8cc925e6681c61c84165bd", + "hash": "40fa940e609972313a6d598712fcb9cced789ed237bdac67aa1fe546e624c884", "timestamp": "2020-11-17T00:39:24.072Z", "era_id": 1, "height": 10, @@ -1633,7 +1655,7 @@ { "name": "block_identifier", "value": { - "Hash": "6a2dad7a71608f78e9b6b5f97eed60a374e75e70cb8cc925e6681c61c84165bd" + "Hash": "40fa940e609972313a6d598712fcb9cced789ed237bdac67aa1fe546e624c884" } } ], @@ -1644,11 +1666,11 @@ "block_with_signatures": { "block": { "Version2": { - "hash": "6a2dad7a71608f78e9b6b5f97eed60a374e75e70cb8cc925e6681c61c84165bd", + "hash": "40fa940e609972313a6d598712fcb9cced789ed237bdac67aa1fe546e624c884", "header": { "parent_hash": "0707070707070707070707070707070707070707070707070707070707070707", "state_root_hash": "0808080808080808080808080808080808080808080808080808080808080808", - "body_hash": "e49c0b878951cb6685cbfe86aa830090b2f8dab96304cb46ffa466879fdc8ae4", + "body_hash": "7929063af6c8431a679fd0fda108fa7e64e42a9e264df4ec8bb42ca877373631", "random_bit": true, "accumulated_seed": "ac979f51525cfd979b14aa7dc0737c5154eabe0db9280eceaa8dc8d2905b20d5", "era_end": { @@ -1679,30 +1701,33 @@ "era_id": 1, "height": 10, "protocol_version": "1.0.0", - "current_gas_price": 1 + "proposer": "01d9bf2148748a85c89da5aad8ee0b0fc2d105fd39d41a4c796536354f0ae2900c", + "current_gas_price": 1, + "last_switch_block_hash": "0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a" }, "body": { - "proposer": "01d9bf2148748a85c89da5aad8ee0b0fc2d105fd39d41a4c796536354f0ae2900c", - "mint": [ - { - "Version1": "1414141414141414141414141414141414141414141414141414141414141414" - } - ], - "auction": [ - { - "Version1": "1515151515151515151515151515151515151515151515151515151515151515" - } - ], - "install_upgrade": [ - { - "Version1": "1616161616161616161616161616161616161616161616161616161616161616" - } - ], - "standard": [ - { - "Version1": "1717171717171717171717171717171717171717171717171717171717171717" - } - ], + "transactions": { + "0": [ + { + "Version1": "1717171717171717171717171717171717171717171717171717171717171717" + } + ], + "1": [ + { + "Version1": "1414141414141414141414141414141414141414141414141414141414141414" + } + ], + "2": [ + { + "Version1": "1515151515151515151515151515151515151515151515151515151515151515" + } + ], + "3": [ + { + "Version1": "1616161616161616161616161616161616161616161616161616161616161616" + } + ] + }, "rewarded_signatures": [] } } @@ -1710,7 +1735,7 @@ "proofs": [ { "public_key": "01d9bf2148748a85c89da5aad8ee0b0fc2d105fd39d41a4c796536354f0ae2900c", - "signature": "010dae9911fdb2e62b525e13828935b93dcee028670e1479393a0e21f700e868f85fb5d8d90ad7a23e1c3e6aaabbaa3f1fdd0dfa962461c4208d02fd8e398bb90c" + "signature": "01641f904df4c58b81b5fdae972186a9d709f1c03f3da4f5c4c9b80fbf98254056fc6048c64784c238811e4580bd46a10fe97be676cde5dd6a6d2be7dafedf7005" } ] } @@ -2087,7 +2112,7 @@ { "name": "block_identifier", "value": { - "Hash": "6a2dad7a71608f78e9b6b5f97eed60a374e75e70cb8cc925e6681c61c84165bd" + "Hash": "40fa940e609972313a6d598712fcb9cced789ed237bdac67aa1fe546e624c884" } } ], @@ -2096,7 +2121,7 @@ "value": { "api_version": "2.0.0", "era_summary": { - "block_hash": "6a2dad7a71608f78e9b6b5f97eed60a374e75e70cb8cc925e6681c61c84165bd", + "block_hash": "40fa940e609972313a6d598712fcb9cced789ed237bdac67aa1fe546e624c884", "era_id": 42, "stored_value": { "EraInfo": { @@ -2262,7 +2287,7 @@ { "name": "block_identifier", "value": { - "Hash": "6a2dad7a71608f78e9b6b5f97eed60a374e75e70cb8cc925e6681c61c84165bd" + "Hash": "40fa940e609972313a6d598712fcb9cced789ed237bdac67aa1fe546e624c884" } } ], @@ -2271,7 +2296,7 @@ "value": { "api_version": "2.0.0", "era_summary": { - "block_hash": "6a2dad7a71608f78e9b6b5f97eed60a374e75e70cb8cc925e6681c61c84165bd", + "block_hash": "40fa940e609972313a6d598712fcb9cced789ed237bdac67aa1fe546e624c884", "era_id": 42, "stored_value": { "EraInfo": { @@ -6772,7 +6797,37 @@ ], "properties": { "AddressableEntity": { - "$ref": "#/components/schemas/AddressableEntity" + "type": "object", + "required": [ + "entity", + "entry_points", + "named_keys" + ], + "properties": { + "entity": { + "description": "The addressable entity.", + "allOf": [ + { + "$ref": "#/components/schemas/AddressableEntity" + } + ] + }, + "named_keys": { + "description": "The named keys of the addressable entity.", + "allOf": [ + { + "$ref": "#/components/schemas/NamedKeys" + } + ] + }, + "entry_points": { + "description": "The entry points of the addressable entity.", + "type": "array", + "items": { + "$ref": "#/components/schemas/EntryPointValue" + } + } + } } }, "additionalProperties": false @@ -7235,6 +7290,7 @@ "era_id", "height", "parent_hash", + "proposer", "protocol_version", "random_bit", "state_root_hash", @@ -7318,11 +7374,30 @@ } ] }, + "proposer": { + "description": "The public key of the validator which proposed the block.", + "allOf": [ + { + "$ref": "#/components/schemas/PublicKey" + } + ] + }, "current_gas_price": { "description": "The gas price of the era", "type": "integer", "format": "uint8", "minimum": 0.0 + }, + "last_switch_block_hash": { + "description": "The most recent switch block hash.", + "anyOf": [ + { + "$ref": "#/components/schemas/BlockHash" + }, + { + "type": "null" + } + ] } } }, @@ -7939,48 +8014,18 @@ "description": "The body portion of a block. Version 2.", "type": "object", "required": [ - "auction", - "install_upgrade", - "mint", - "proposer", "rewarded_signatures", - "standard" + "transactions" ], "properties": { - "proposer": { - "description": "The public key of the validator which proposed the block.", - "allOf": [ - { - "$ref": "#/components/schemas/PublicKey" + "transactions": { + "description": "Map of transactions mapping categories to a list of transaction hashes.", + "type": "object", + "additionalProperties": { + "type": "array", + "items": { + "$ref": "#/components/schemas/TransactionHash" } - ] - }, - "mint": { - "description": "The hashes of the mint transactions within the block.", - "type": "array", - "items": { - "$ref": "#/components/schemas/TransactionHash" - } - }, - "auction": { - "description": "The hashes of the auction transactions within the block.", - "type": "array", - "items": { - "$ref": "#/components/schemas/TransactionHash" - } - }, - "install_upgrade": { - "description": "The hashes of the installer/upgrader transactions within the block.", - "type": "array", - "items": { - "$ref": "#/components/schemas/TransactionHash" - } - }, - "standard": { - "description": "The hashes of all other transactions within the block.", - "type": "array", - "items": { - "$ref": "#/components/schemas/TransactionHash" } }, "rewarded_signatures": { diff --git a/rpc_sidecar/src/config.rs b/rpc_sidecar/src/config.rs index 6df2b677..482df230 100644 --- a/rpc_sidecar/src/config.rs +++ b/rpc_sidecar/src/config.rs @@ -163,8 +163,30 @@ impl NodeClientConfig { } } + /// Creates an instance of `NodeClientConfig` with specified listening port. #[cfg(any(feature = "testing", test))] - pub fn finite_retries_config(port: u16, num_of_retries: usize) -> Self { + pub fn new_with_port(port: u16) -> Self { + let local_socket = SocketAddr::new(IpAddr::V4(Ipv4Addr::LOCALHOST), port); + NodeClientConfig { + address: local_socket, + request_limit: DEFAULT_NODE_REQUEST_LIMIT, + max_message_size_bytes: DEFAULT_MAX_PAYLOAD_SIZE, + request_buffer_size: DEFAULT_REQUEST_BUFFER_SIZE, + message_timeout_secs: DEFAULT_MESSAGE_TIMEOUT_SECS, + client_access_timeout_secs: DEFAULT_CLIENT_ACCESS_TIMEOUT_SECS, + exponential_backoff: ExponentialBackoffConfig { + initial_delay_ms: DEFAULT_EXPONENTIAL_BACKOFF_BASE_MS, + max_delay_ms: DEFAULT_EXPONENTIAL_BACKOFF_MAX_MS, + coefficient: DEFAULT_EXPONENTIAL_BACKOFF_COEFFICIENT, + max_attempts: MaxAttempts::Infinite, + }, + } + } + + /// Creates an instance of `NodeClientConfig` with specified listening port and maximum number + /// of reconnection retries. + #[cfg(any(feature = "testing", test))] + pub fn new_with_port_and_retries(port: u16, num_of_retries: usize) -> Self { let local_socket = SocketAddr::new(IpAddr::V4(Ipv4Addr::LOCALHOST), port); NodeClientConfig { address: local_socket, diff --git a/rpc_sidecar/src/node_client.rs b/rpc_sidecar/src/node_client.rs index e2ee8e52..64f3a0df 100644 --- a/rpc_sidecar/src/node_client.rs +++ b/rpc_sidecar/src/node_client.rs @@ -15,8 +15,8 @@ use casper_binary_port::{ BalanceResponse, BinaryMessage, BinaryMessageCodec, BinaryRequest, BinaryRequestHeader, BinaryResponse, BinaryResponseAndRequest, ConsensusValidatorChanges, DictionaryItemIdentifier, DictionaryQueryResult, ErrorCode, GetRequest, GetTrieFullResult, GlobalStateQueryResult, - GlobalStateRequest, InformationRequest, NodeStatus, PayloadEntity, PurseIdentifier, RecordId, - SpeculativeExecutionResult, TransactionWithExecutionInfo, + GlobalStateRequest, InformationRequest, KeyPrefix, NodeStatus, PayloadEntity, PurseIdentifier, + RecordId, SpeculativeExecutionResult, TransactionWithExecutionInfo, }; use casper_types::{ bytesrepr::{self, FromBytes, ToBytes}, @@ -30,7 +30,7 @@ use std::{ }; use tokio::{ net::TcpStream, - sync::{Notify, RwLock, RwLockWriteGuard, Semaphore}, + sync::{futures::Notified, RwLock, RwLockWriteGuard, Semaphore}, }; use tracing::{error, field, info, warn}; @@ -87,6 +87,21 @@ pub trait NodeClient: Send + Sync { parse_response::>(&resp.into())?.ok_or(Error::EmptyEnvelope) } + async fn query_global_state_by_prefix( + &self, + state_identifier: Option, + key_prefix: KeyPrefix, + ) -> Result, Error> { + let get = GlobalStateRequest::ItemsByPrefix { + state_identifier, + key_prefix, + }; + let resp = self + .send_request(BinaryRequest::Get(GetRequest::State(Box::new(get)))) + .await?; + parse_response::>(&resp.into())?.ok_or(Error::EmptyEnvelope) + } + async fn read_balance( &self, state_identifier: Option, @@ -261,9 +276,55 @@ impl Error { fn from_error_code(code: u8) -> Self { match ErrorCode::try_from(code) { Ok(ErrorCode::FunctionDisabled) => Self::FunctionIsDisabled, - Ok(ErrorCode::InvalidTransaction) => Self::InvalidTransaction, Ok(ErrorCode::RootNotFound) => Self::UnknownStateRootHash, Ok(ErrorCode::FailedQuery) => Self::QueryFailedToExecute, + Ok( + ErrorCode::InvalidDeployChainName + | ErrorCode::InvalidDeployDependenciesNoLongerSupported + | ErrorCode::InvalidDeployExcessiveSize + | ErrorCode::InvalidDeployExcessiveTimeToLive + | ErrorCode::InvalidDeployTimestampInFuture + | ErrorCode::InvalidDeployBodyHash + | ErrorCode::InvalidDeployHash + | ErrorCode::InvalidDeployEmptyApprovals + | ErrorCode::InvalidDeployApproval + | ErrorCode::InvalidDeployExcessiveSessionArgsLength + | ErrorCode::InvalidDeployExcessivePaymentArgsLength + | ErrorCode::InvalidDeployMissingPaymentAmount + | ErrorCode::InvalidDeployFailedToParsePaymentAmount + | ErrorCode::InvalidDeployExceededBlockGasLimit + | ErrorCode::InvalidDeployMissingTransferAmount + | ErrorCode::InvalidDeployFailedToParseTransferAmount + | ErrorCode::InvalidDeployInsufficientTransferAmount + | ErrorCode::InvalidDeployExcessiveApprovals + | ErrorCode::InvalidDeployUnableToCalculateGasLimit + | ErrorCode::InvalidDeployUnableToCalculateGasCost + | ErrorCode::InvalidDeployUnspecified + | ErrorCode::InvalidTransactionChainName + | ErrorCode::InvalidTransactionExcessiveSize + | ErrorCode::InvalidTransactionExcessiveTimeToLive + | ErrorCode::InvalidTransactionTimestampInFuture + | ErrorCode::InvalidTransactionBodyHash + | ErrorCode::InvalidTransactionHash + | ErrorCode::InvalidTransactionEmptyApprovals + | ErrorCode::InvalidTransactionInvalidApproval + | ErrorCode::InvalidTransactionExcessiveArgsLength + | ErrorCode::InvalidTransactionExcessiveApprovals + | ErrorCode::InvalidTransactionExceedsBlockGasLimit + | ErrorCode::InvalidTransactionMissingArg + | ErrorCode::InvalidTransactionUnexpectedArgType + | ErrorCode::InvalidTransactionInvalidArg + | ErrorCode::InvalidTransactionInsufficientTransferAmount + | ErrorCode::InvalidTransactionEntryPointCannotBeCustom + | ErrorCode::InvalidTransactionEntryPointMustBeCustom + | ErrorCode::InvalidTransactionEmptyModuleBytes + | ErrorCode::InvalidTransactionGasPriceConversion + | ErrorCode::InvalidTransactionUnableToCalculateGasLimit + | ErrorCode::InvalidTransactionUnableToCalculateGasCost + | ErrorCode::InvalidTransactionPricingMode + | ErrorCode::InvalidTransactionUnspecified + | ErrorCode::InvalidTransactionOrDeployUnspecified, + ) => Self::InvalidTransaction, // TODO: map transaction errors to proper variants Ok(err @ (ErrorCode::WasmPreprocessing | ErrorCode::InvalidItemVariant)) => { Self::SpecExecutionFailed(err.to_string()) } @@ -279,10 +340,35 @@ impl Error { } } +struct Reconnect; +struct Shutdown; + +struct Notify { + inner: tokio::sync::Notify, + phantom: std::marker::PhantomData, +} + +impl Notify { + fn new() -> Arc { + Arc::new(Self { + inner: tokio::sync::Notify::new(), + phantom: std::marker::PhantomData, + }) + } + + fn notified(&self) -> Notified { + self.inner.notified() + } + + fn notify_one(&self) { + self.inner.notify_one() + } +} + pub struct FramedNodeClient { client: Arc>>, - reconnect: Arc, - shutdown: Arc, + reconnect: Arc>, + shutdown: Arc>, config: NodeClientConfig, request_limit: Semaphore, } @@ -292,14 +378,14 @@ impl FramedNodeClient { config: NodeClientConfig, ) -> Result<(Self, impl Future>), AnyhowError> { let stream = Arc::new(RwLock::new(Self::connect_with_retries(&config).await?)); - let shutdown = Arc::new(Notify::new()); - let reconnect = Arc::new(Notify::new()); + let shutdown = Notify::::new(); + let reconnect = Notify::::new(); let reconnect_loop = Self::reconnect_loop( config.clone(), Arc::clone(&stream), - Arc::clone(&reconnect), Arc::clone(&shutdown), + Arc::clone(&reconnect), ); Ok(( @@ -317,15 +403,15 @@ impl FramedNodeClient { async fn reconnect_loop( config: NodeClientConfig, client: Arc>>, - shutdown: Arc, - reconnect: Arc, + shutdown: Arc>, + reconnect: Arc>, ) -> Result<(), AnyhowError> { loop { tokio::select! { _ = reconnect.notified() => { - let mut lock = client.write().await; - let new_client = Self::reconnect(&config.clone()).await?; - *lock = new_client; + let mut lock = client.write().await; + let new_client = Self::reconnect(&config.clone()).await?; + *lock = new_client; }, _ = shutdown.notified() => { info!("node client shutdown has been requested"); @@ -460,7 +546,7 @@ impl NodeClient for FramedNodeClient { fn handle_response( resp: BinaryResponseAndRequest, - shutdown: &Notify, + shutdown: &Notify, ) -> Result { let version = resp.response().protocol_version(); @@ -565,7 +651,7 @@ mod tests { #[tokio::test] async fn should_reject_bad_major_version() { - let notify = Notify::new(); + let notify = Notify::::new(); let bad_version = ProtocolVersion::from_parts(10, 0, 0); let result = handle_response( @@ -582,7 +668,7 @@ mod tests { #[tokio::test] async fn should_accept_different_minor_version() { - let notify = Notify::new(); + let notify = Notify::::new(); let version = ProtocolVersion::new(SemVer { minor: SUPPORTED_PROTOCOL_VERSION.value().minor + 1, ..SUPPORTED_PROTOCOL_VERSION.value() @@ -608,7 +694,7 @@ mod tests { #[tokio::test] async fn should_accept_different_patch_version() { - let notify = Notify::new(); + let notify = Notify::::new(); let version = ProtocolVersion::new(SemVer { patch: SUPPORTED_PROTOCOL_VERSION.value().patch + 1, ..SUPPORTED_PROTOCOL_VERSION.value() @@ -634,7 +720,7 @@ mod tests { #[tokio::test] async fn given_client_and_no_node_should_fail_after_tries() { - let config = NodeClientConfig::finite_retries_config(1111, 2); + let config = NodeClientConfig::new_with_port_and_retries(1111, 2); let res = FramedNodeClient::new(config).await; assert!(res.is_err()); @@ -648,8 +734,10 @@ mod tests { async fn given_client_and_node_should_connect_and_do_request() { let port = get_port(); let mut rng = TestRng::new(); - let _mock_server_handle = start_mock_binary_port_responding_with_stored_value(port).await; - let config = NodeClientConfig::finite_retries_config(port, 2); + let shutdown = Arc::new(tokio::sync::Notify::new()); + let _mock_server_handle = + start_mock_binary_port_responding_with_stored_value(port, Arc::clone(&shutdown)).await; + let config = NodeClientConfig::new_with_port_and_retries(port, 2); let (c, _) = FramedNodeClient::new(config).await.unwrap(); let res = query_global_state_for_string_value(&mut rng, &c) @@ -663,12 +751,14 @@ mod tests { async fn given_client_should_try_until_node_starts() { let mut rng = TestRng::new(); let port = get_port(); + let shutdown = Arc::new(tokio::sync::Notify::new()); tokio::spawn(async move { sleep(Duration::from_secs(5)).await; let _mock_server_handle = - start_mock_binary_port_responding_with_stored_value(port).await; + start_mock_binary_port_responding_with_stored_value(port, Arc::clone(&shutdown)) + .await; }); - let config = NodeClientConfig::finite_retries_config(port, 5); + let config = NodeClientConfig::new_with_port_and_retries(port, 5); let (client, _) = FramedNodeClient::new(config).await.unwrap(); let res = query_global_state_for_string_value(&mut rng, &client) @@ -694,4 +784,47 @@ mod tests { .ok_or(Error::NoResponseBody) .map(|query_res| query_res.into_inner().0) } + + #[tokio::test] + async fn given_client_should_reconnect_to_restarted_node_and_do_request() { + let port = get_port(); + let mut rng = TestRng::new(); + let shutdown = Arc::new(tokio::sync::Notify::new()); + let mock_server_handle = + start_mock_binary_port_responding_with_stored_value(port, Arc::clone(&shutdown)).await; + let config = NodeClientConfig::new_with_port(port); + let (c, reconnect_loop) = FramedNodeClient::new(config).await.unwrap(); + + let scenario = async { + assert!(query_global_state_for_string_value(&mut rng, &c) + .await + .is_ok()); + + shutdown.notify_one(); + let _ = mock_server_handle.await; + + let err = query_global_state_for_string_value(&mut rng, &c) + .await + .unwrap_err(); + assert!(matches!( + err, + Error::RequestFailed(e) if e == "disconnected" + )); + + let _mock_server_handle = + start_mock_binary_port_responding_with_stored_value(port, Arc::clone(&shutdown)) + .await; + + tokio::time::sleep(Duration::from_secs(2)).await; + + assert!(query_global_state_for_string_value(&mut rng, &c) + .await + .is_ok()); + }; + + tokio::select! { + _ = scenario => (), + _ = reconnect_loop => panic!("reconnect loop should not exit"), + } + } } diff --git a/rpc_sidecar/src/rpcs/account.rs b/rpc_sidecar/src/rpcs/account.rs index 79b851bd..8b1395c2 100644 --- a/rpc_sidecar/src/rpcs/account.rs +++ b/rpc_sidecar/src/rpcs/account.rs @@ -257,7 +257,7 @@ mod tests { BinaryRequest::TryAcceptTransaction { .. } => { Ok(BinaryResponseAndRequest::new( BinaryResponse::new_error( - BinaryPortErrorCode::InvalidTransaction, + BinaryPortErrorCode::InvalidTransactionBodyHash, SUPPORTED_PROTOCOL_VERSION, ), &[], diff --git a/rpc_sidecar/src/rpcs/common.rs b/rpc_sidecar/src/rpcs/common.rs index 9a247de5..74c64751 100644 --- a/rpc_sidecar/src/rpcs/common.rs +++ b/rpc_sidecar/src/rpcs/common.rs @@ -1,13 +1,16 @@ -use casper_binary_port::GlobalStateQueryResult; +use std::collections::BTreeMap; + +use casper_binary_port::{GlobalStateQueryResult, KeyPrefix}; use once_cell::sync::Lazy; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use crate::rpcs::error::Error; use casper_types::{ - account::AccountHash, bytesrepr::ToBytes, global_state::TrieMerkleProof, Account, - AddressableEntity, AvailableBlockRange, BlockHeader, BlockIdentifier, EntityAddr, - GlobalStateIdentifier, Key, SignedBlock, StoredValue, + account::AccountHash, addressable_entity::NamedKeys, bytesrepr::ToBytes, + global_state::TrieMerkleProof, Account, AddressableEntity, AvailableBlockRange, BlockHeader, + BlockIdentifier, EntityAddr, EntryPointValue, GlobalStateIdentifier, Key, SignedBlock, + StoredValue, }; use crate::NodeClient; @@ -44,7 +47,14 @@ pub enum ErrorData { #[derive(Debug, PartialEq, Eq, Serialize, Deserialize, JsonSchema)] pub enum EntityOrAccount { /// An addressable entity. - AddressableEntity(AddressableEntity), + AddressableEntity { + /// The addressable entity. + entity: AddressableEntity, + /// The named keys of the addressable entity. + named_keys: NamedKeys, + /// The entry points of the addressable entity. + entry_points: Vec, + }, /// A legacy account. LegacyAccount(Account), } @@ -140,10 +150,23 @@ pub async fn resolve_account_hash( else { return Ok(None); }; - let entity = value - .into_addressable_entity() - .ok_or(Error::InvalidAddressableEntity)?; - (EntityOrAccount::AddressableEntity(entity), merkle_proof) + let (Key::AddressableEntity(entity_addr), StoredValue::AddressableEntity(entity)) = + (key, value) + else { + return Err(Error::InvalidAddressableEntity); + }; + let named_keys = + get_entity_named_keys(node_client, entity_addr, state_identifier).await?; + let entry_points = + get_entity_entry_points(node_client, entity_addr, state_identifier).await?; + ( + EntityOrAccount::AddressableEntity { + entity, + named_keys, + entry_points, + }, + merkle_proof, + ) } _ => return Err(Error::InvalidAccountInfo), }; @@ -176,6 +199,73 @@ pub async fn resolve_entity_addr( })) } +pub async fn get_entity_named_keys( + node_client: &dyn NodeClient, + entity_addr: EntityAddr, + state_identifier: Option, +) -> Result { + let stored_values = node_client + .query_global_state_by_prefix(state_identifier, KeyPrefix::NamedKeysByEntity(entity_addr)) + .await + .map_err(|err| Error::NodeRequest("entity named keys", err))?; + let named_keys = stored_values + .into_iter() + .map(|stored_value| { + if let StoredValue::NamedKey(named_key) = stored_value { + let key = named_key + .get_key() + .map_err(|err| Error::InvalidNamedKeys(err.to_string()))?; + let name = named_key + .get_name() + .map_err(|err| Error::InvalidNamedKeys(err.to_string()))?; + Ok((name, key)) + } else { + Err(Error::InvalidNamedKeys(format!( + "unexpected stored value: {}", + stored_value.type_name() + ))) + } + }) + .collect::, Error>>()?; + Ok(NamedKeys::from(named_keys)) +} + +pub async fn get_entity_entry_points( + node_client: &dyn NodeClient, + entity_addr: EntityAddr, + state_identifier: Option, +) -> Result, Error> { + let stored_values_v1 = node_client + .query_global_state_by_prefix( + state_identifier, + KeyPrefix::EntryPointsV1ByEntity(entity_addr), + ) + .await + .map_err(|err| Error::NodeRequest("entity named keys", err))?; + let stored_values_v2 = node_client + .query_global_state_by_prefix( + state_identifier, + KeyPrefix::EntryPointsV2ByEntity(entity_addr), + ) + .await + .map_err(|err| Error::NodeRequest("entity named keys", err))?; + + stored_values_v1 + .into_iter() + .chain(stored_values_v2) + .map(|stored_value| { + if let StoredValue::EntryPoint(entry_point) = stored_value { + Ok(entry_point) + } else { + Err(Error::InvalidNamedKeys(format!( + "unexpected stored value: {}", + stored_value.type_name() + ))) + } + }) + .collect::>() +} + pub fn encode_proof(proof: &Vec>) -> Result { Ok(base16::encode_lower( &proof.to_bytes().map_err(Error::BytesreprFailure)?, diff --git a/rpc_sidecar/src/rpcs/error.rs b/rpc_sidecar/src/rpcs/error.rs index 6d600030..49bfcb85 100644 --- a/rpc_sidecar/src/rpcs/error.rs +++ b/rpc_sidecar/src/rpcs/error.rs @@ -55,6 +55,10 @@ pub enum Error { InvalidAddressableEntity, #[error("the auction state was invalid")] InvalidAuctionState, + #[error("the named keys were invalid: {0}")] + InvalidNamedKeys(String), + #[error("the entry points were invalid: {0}")] + InvalidEntryPoints(String), #[error("speculative execution returned nothing")] SpecExecReturnedNothing, #[error("unexpected bytesrepr failure: {0}")] @@ -98,6 +102,8 @@ impl Error { Error::InvalidAccountInfo | Error::InvalidAddressableEntity | Error::InvalidAuctionState + | Error::InvalidNamedKeys(_) + | Error::InvalidEntryPoints(_) | Error::BytesreprFailure(_) => None, } } diff --git a/rpc_sidecar/src/rpcs/state.rs b/rpc_sidecar/src/rpcs/state.rs index 22055919..7b89aafa 100644 --- a/rpc_sidecar/src/rpcs/state.rs +++ b/rpc_sidecar/src/rpcs/state.rs @@ -29,8 +29,8 @@ use casper_types::{ AUCTION, }, AddressableEntity, AddressableEntityHash, AuctionState, BlockHash, BlockHeader, BlockHeaderV2, - BlockIdentifier, BlockTime, BlockV2, CLValue, Digest, EntityAddr, GlobalStateIdentifier, Key, - KeyTag, PublicKey, SecretKey, StoredValue, URef, U512, + BlockIdentifier, BlockTime, BlockV2, CLValue, Digest, EntityAddr, EntryPoint, EntryPointValue, + GlobalStateIdentifier, Key, KeyTag, PublicKey, SecretKey, StoredValue, URef, U512, }; #[cfg(test)] use rand::Rng; @@ -87,7 +87,17 @@ static GET_ADDRESSABLE_ENTITY_RESULT: Lazy = Lazy::new(|| GetAddressableEntityResult { api_version: DOCS_EXAMPLE_API_VERSION, merkle_proof: MERKLE_PROOF.clone(), - entity: EntityOrAccount::AddressableEntity(AddressableEntity::example().clone()), + entity: EntityOrAccount::AddressableEntity { + entity: AddressableEntity::example().clone(), + named_keys: [("key".to_string(), Key::Hash([0u8; 32]))] + .iter() + .cloned() + .collect::>() + .into(), + entry_points: vec![EntryPointValue::new_v1_entry_point_value( + EntryPoint::default_with_name("entry_point"), + )], + }, }); static GET_DICTIONARY_ITEM_PARAMS: Lazy = Lazy::new(|| GetDictionaryItemParams { @@ -334,11 +344,7 @@ impl RpcWithOptionalParams for GetAuctionInfo { maybe_params: Option, ) -> Result { let block_identifier = maybe_params.map(|params| params.block_identifier); - let block_header = node_client - .read_block_header(block_identifier) - .await - .map_err(|err| Error::NodeRequest("block header", err))? - .unwrap(); + let block_header = common::get_block_header(&*node_client, block_identifier).await?; let state_identifier = block_identifier.map(GlobalStateIdentifier::from); let legacy_bid_stored_values = node_client @@ -580,8 +586,16 @@ impl RpcWithParams for GetAddressableEntity { let result = common::resolve_entity_addr(&*node_client, addr, state_identifier) .await? .ok_or(Error::AddressableEntityNotFound)?; + let named_keys = + common::get_entity_named_keys(&*node_client, addr, state_identifier).await?; + let entry_points = + common::get_entity_entry_points(&*node_client, addr, state_identifier).await?; ( - EntityOrAccount::AddressableEntity(result.value), + EntityOrAccount::AddressableEntity { + entity: result.value, + named_keys, + entry_points, + }, result.merkle_proof, ) } @@ -1121,15 +1135,15 @@ mod tests { use casper_binary_port::{ BalanceResponse, BinaryRequest, BinaryResponse, BinaryResponseAndRequest, DictionaryQueryResult, GetRequest, GlobalStateQueryResult, GlobalStateRequest, - InformationRequestTag, + InformationRequestTag, KeyPrefix, }; use casper_types::{ - addressable_entity::{MessageTopics, NamedKeys}, + addressable_entity::{MessageTopics, NamedKeyValue, NamedKeys}, global_state::{TrieMerkleProof, TrieMerkleProofStep}, system::auction::{Bid, BidKind, ValidatorBid}, testing::TestRng, - AccessRights, AddressableEntity, Block, ByteCodeHash, EntityKind, PackageHash, - ProtocolVersion, TestBlockBuilder, TransactionRuntime, + AccessRights, AddressableEntity, AvailableBlockRange, Block, ByteCodeHash, EntityKind, + PackageHash, ProtocolVersion, TestBlockBuilder, TransactionRuntime, }; use pretty_assertions::assert_eq; use rand::Rng; @@ -1352,14 +1366,8 @@ mod tests { } #[tokio::test] - async fn should_read_entity() { - use casper_types::addressable_entity::{ActionThresholds, AssociatedKeys}; - - struct ClientMock { - block: Block, - entity: AddressableEntity, - entity_hash: AddressableEntityHash, - } + async fn should_fail_auction_info_when_block_not_found() { + struct ClientMock; #[async_trait] impl NodeClient for ClientMock { @@ -1371,15 +1379,54 @@ mod tests { BinaryRequest::Get(GetRequest::Information { info_type_tag, .. }) if InformationRequestTag::try_from(info_type_tag) == Ok(InformationRequestTag::BlockHeader) => + { + Ok(BinaryResponseAndRequest::new( + BinaryResponse::new_empty(SUPPORTED_PROTOCOL_VERSION), + &[], + )) + } + BinaryRequest::Get(GetRequest::Information { info_type_tag, .. }) + if InformationRequestTag::try_from(info_type_tag) + == Ok(InformationRequestTag::AvailableBlockRange) => { Ok(BinaryResponseAndRequest::new( BinaryResponse::from_value( - self.block.clone_header(), + AvailableBlockRange::RANGE_0_0, SUPPORTED_PROTOCOL_VERSION, ), &[], )) } + req => unimplemented!("unexpected request: {:?}", req), + } + } + } + + let err = GetAuctionInfo::do_handle_request(Arc::new(ClientMock), None) + .await + .expect_err("should reject request"); + + assert_eq!(err.code(), ErrorCode::NoSuchBlock as i64); + } + + #[tokio::test] + async fn should_read_entity() { + use casper_types::addressable_entity::{ActionThresholds, AssociatedKeys}; + + struct ClientMock { + entity: AddressableEntity, + named_keys: NamedKeys, + entry_points: Vec, + entity_hash: AddressableEntityHash, + } + + #[async_trait] + impl NodeClient for ClientMock { + async fn send_request( + &self, + req: BinaryRequest, + ) -> Result { + match req { BinaryRequest::Get(GetRequest::State(req)) if matches!( &*req, @@ -1423,13 +1470,75 @@ mod tests { &[], )) } + BinaryRequest::Get(GetRequest::State(req)) + if matches!( + &*req, + GlobalStateRequest::ItemsByPrefix { + key_prefix: KeyPrefix::NamedKeysByEntity(_), + .. + } + ) => + { + Ok(BinaryResponseAndRequest::new( + BinaryResponse::from_value( + self.named_keys + .iter() + .map(|(name, key)| { + StoredValue::NamedKey( + NamedKeyValue::from_concrete_values(*key, name.clone()) + .expect("should create named key"), + ) + }) + .collect::>(), + SUPPORTED_PROTOCOL_VERSION, + ), + &[], + )) + } + BinaryRequest::Get(GetRequest::State(req)) + if matches!( + &*req, + GlobalStateRequest::ItemsByPrefix { + key_prefix: KeyPrefix::EntryPointsV1ByEntity(_), + .. + } + ) => + { + Ok(BinaryResponseAndRequest::new( + BinaryResponse::from_value( + self.entry_points + .iter() + .cloned() + .map(StoredValue::EntryPoint) + .collect::>(), + SUPPORTED_PROTOCOL_VERSION, + ), + &[], + )) + } + BinaryRequest::Get(GetRequest::State(req)) + if matches!( + &*req, + GlobalStateRequest::ItemsByPrefix { + key_prefix: KeyPrefix::EntryPointsV2ByEntity(_), + .. + } + ) => + { + Ok(BinaryResponseAndRequest::new( + BinaryResponse::from_value( + Vec::::new(), + SUPPORTED_PROTOCOL_VERSION, + ), + &[], + )) + } req => unimplemented!("unexpected request: {:?}", req), } } } let rng = &mut TestRng::new(); - let block = Block::V2(TestBlockBuilder::new().build(rng)); let entity = AddressableEntity::new( PackageHash::new(rng.gen()), ByteCodeHash::new(rng.gen()), @@ -1441,12 +1550,29 @@ mod tests { EntityKind::SmartContract(TransactionRuntime::VmCasperV2), ); let entity_hash: AddressableEntityHash = rng.gen(); + + let named_key_count = rng.gen_range(0..10); + let named_keys: NamedKeys = + iter::repeat_with(|| (rng.random_string(1..36), Key::Hash(rng.gen()))) + .take(named_key_count) + .collect::>() + .into(); + let entry_point_count = rng.gen_range(0..10); + let entry_points = iter::repeat_with(|| { + EntryPointValue::new_v1_entry_point_value(EntryPoint::default_with_name( + rng.random_string(1..10), + )) + }) + .take(entry_point_count) + .collect::>(); + let entity_identifier = EntityIdentifier::random(rng); let resp = GetAddressableEntity::do_handle_request( Arc::new(ClientMock { - block: block.clone(), entity: entity.clone(), + named_keys: named_keys.clone(), + entry_points: entry_points.clone(), entity_hash, }), GetAddressableEntityParams { @@ -1461,7 +1587,11 @@ mod tests { resp, GetAddressableEntityResult { api_version: CURRENT_API_VERSION, - entity: EntityOrAccount::AddressableEntity(entity), + entity: EntityOrAccount::AddressableEntity { + entity, + named_keys, + entry_points + }, merkle_proof: String::from("00000000"), } ); diff --git a/rpc_sidecar/src/testing/mod.rs b/rpc_sidecar/src/testing/mod.rs index 5f0cd45c..f8d9ce60 100644 --- a/rpc_sidecar/src/testing/mod.rs +++ b/rpc_sidecar/src/testing/mod.rs @@ -1,3 +1,4 @@ +use std::sync::Arc; use std::time::Duration; use casper_binary_port::{ @@ -6,6 +7,7 @@ use casper_binary_port::{ }; use casper_types::{bytesrepr::ToBytes, CLValue, ProtocolVersion, StoredValue}; use futures::{SinkExt, StreamExt}; +use tokio::sync::Notify; use tokio::task::JoinHandle; use tokio::{ net::{TcpListener, TcpStream}, @@ -26,20 +28,27 @@ impl BinaryPortMock { Self { port, response } } - pub async fn start(&self) { + pub async fn start(&self, shutdown: Arc) { let port = self.port; let addr = format!("{}:{}", LOCALHOST, port); let listener = TcpListener::bind(addr.clone()) .await .expect("failed to listen"); loop { - match listener.accept().await { - Ok((stream, _addr)) => { - let response_payload = self.response.clone(); - tokio::spawn(handle_client(stream, response_payload)); + tokio::select! { + _ = shutdown.notified() => { + break; } - Err(io_err) => { - println!("acceptance failure: {:?}", io_err); + val = listener.accept() => { + match val { + Ok((stream, _addr)) => { + let response_payload = self.response.clone(); + tokio::spawn(handle_client(stream, response_payload)); + } + Err(io_err) => { + println!("acceptance failure: {:?}", io_err); + } + } } } } @@ -63,20 +72,23 @@ pub fn get_port() -> u16 { portpicker::pick_unused_port().unwrap() } -pub async fn start_mock_binary_port_responding_with_stored_value(port: u16) -> JoinHandle<()> { +pub async fn start_mock_binary_port_responding_with_stored_value( + port: u16, + shutdown: Arc, +) -> JoinHandle<()> { let value = StoredValue::CLValue(CLValue::from_t("Foo").unwrap()); let data = GlobalStateQueryResult::new(value, vec![]); let protocol_version = ProtocolVersion::from_parts(2, 0, 0); let val = BinaryResponse::from_value(data, protocol_version); let request = []; let response = BinaryResponseAndRequest::new(val, &request); - start_mock_binary_port(port, response.to_bytes().unwrap()).await + start_mock_binary_port(port, response.to_bytes().unwrap(), shutdown).await } -async fn start_mock_binary_port(port: u16, data: Vec) -> JoinHandle<()> { +async fn start_mock_binary_port(port: u16, data: Vec, shutdown: Arc) -> JoinHandle<()> { let handler = tokio::spawn(async move { let binary_port = BinaryPortMock::new(port, data); - binary_port.start().await; + binary_port.start(shutdown).await; }); sleep(Duration::from_secs(3)).await; // This should be handled differently, preferably the mock binary port should inform that it already bound to the port handler diff --git a/sidecar/src/component.rs b/sidecar/src/component.rs index 6e9242b8..aea1a451 100644 --- a/sidecar/src/component.rs +++ b/sidecar/src/component.rs @@ -234,6 +234,8 @@ impl Component for RpcApiComponent { #[cfg(test)] mod tests { + use std::sync::Arc; + use super::*; use crate::config::SidecarConfig; use casper_rpc_sidecar::{ @@ -355,11 +357,13 @@ mod tests { #[tokio::test] async fn given_rpc_api_server_component_when_config_should_return_some() { let port = get_port(); - let _mock_server_handle = start_mock_binary_port_responding_with_stored_value(port).await; + let shutdown = Arc::new(tokio::sync::Notify::new()); + let _mock_server_handle = + start_mock_binary_port_responding_with_stored_value(port, Arc::clone(&shutdown)).await; let component = RpcApiComponent::new(); let mut config = all_components_all_enabled(); config.rpc_server.as_mut().unwrap().node_client = - NodeClientConfig::finite_retries_config(port, 1); + NodeClientConfig::new_with_port_and_retries(port, 1); config.rpc_server.as_mut().unwrap().main_server.address = format!("0.0.0.0:{}", port); config .rpc_server diff --git a/types/src/legacy_sse_data/fixtures.rs b/types/src/legacy_sse_data/fixtures.rs index ed5b389a..37611635 100644 --- a/types/src/legacy_sse_data/fixtures.rs +++ b/types/src/legacy_sse_data/fixtures.rs @@ -1,5 +1,19 @@ -use super::LegacySseData; +use std::collections::{BTreeMap, BTreeSet}; +use std::str::FromStr; + +use casper_types::system::auction::ValidatorWeights; +use casper_types::{ + BlockHash, BlockV2, Deploy, DeployHash, Digest, EraEndV2, EraId, ProtocolVersion, PublicKey, + RewardedSignatures, SingleBlockRewardedSignatures, TimeDiff, Timestamp, Transaction, + TransactionV1, TransactionV1Hash, U512, +}; +use rand::Rng; + +use super::{structs, LegacySseData}; use crate::sse_data::SseData; +use crate::testing::{parse_block_hash, parse_digest, parse_public_key}; +use casper_types::testing::TestRng; +use casper_types::TestBlockBuilder; pub fn legacy_block_added() -> LegacySseData { serde_json::from_str(RAW_LEGACY_BLOCK_ADDED).unwrap() @@ -77,6 +91,242 @@ pub fn legacy_deploy_processed() -> LegacySseData { serde_json::from_str(RAW_LEGACY_DEPLOY_PROCESSED).unwrap() } +pub fn parent_hash() -> BlockHash { + parse_block_hash("90a4ade2849634e9c1ad0e02cb30645d0984056f68075cad8f6cad2b42a824ba") +} + +pub fn state_root_hash() -> Digest { + parse_digest("9cce223fdbeab41dbbcf0b62f3fd857373131378d51776de26bb9f4fefe1e849") +} + +pub fn timestamp() -> Timestamp { + Timestamp::from_str("2020-08-07T01:30:25.521Z").unwrap() +} + +pub fn proposer() -> PublicKey { + parse_public_key("0203426736da2554ebf1f8ee1d2ce4ab11b1e33419d7dfc1ce2fe1945faf00bacc9e") +} + +#[allow(clippy::too_many_arguments)] +pub fn block_v2_with_transactions( + rng: &mut TestRng, + parent_hash: BlockHash, + state_root_hash: Digest, + timestamp: Timestamp, + era_id: EraId, + height: u64, + proposer: PublicKey, + transactions: Vec<&Transaction>, +) -> BlockV2 { + let mut validator_weights = ValidatorWeights::new(); + let key_1 = + parse_public_key("0198957673ad060503e2ec7d98dc71af6f90ad1f854fe18025e3e7d0d1bbe5e32b"); + let key_2 = + parse_public_key("02022d6bc4e3012cc4ae467b5525111cf7ed65883b05a1d924f1e654c64fad3a027c"); + let key_3 = + parse_public_key("0202fd52dbda97f41def3e3252704d5f8f5adbec1919368282e02e9500bd88845a80"); + let key_4 = + parse_public_key("02022d6bc4e3012cc4ae467b5525111cf7ed65883b05a1d924f1e654c64fad3a027e"); + validator_weights.insert(key_1.clone(), U512::from_dec_str("1").unwrap()); + validator_weights.insert(key_2.clone(), U512::from_dec_str("2").unwrap()); + let mut public_keys = BTreeSet::new(); + public_keys.insert(key_1.clone()); + public_keys.insert(key_4.clone()); + let all_validators = vec![&key_1, &key_2, &key_3, &key_4]; + let single_block_sigs = + SingleBlockRewardedSignatures::from_validator_set(&public_keys, all_validators); + let rewarded_signatures = RewardedSignatures::new(vec![single_block_sigs]); + TestBlockBuilder::default() + .parent_hash(parent_hash) + .state_root_hash(state_root_hash) + .timestamp(timestamp) + .era(era_id) + .height(height) + .protocol_version(ProtocolVersion::V2_0_0) + .proposer(proposer) + .switch_block(true) + .validator_weights(validator_weights) + .rewarded_signatures(rewarded_signatures) + .transactions(transactions) + .build(rng) +} + +pub fn sample_transactions( + rng: &mut TestRng, +) -> ( + Vec, + DeployHash, + TransactionV1Hash, + DeployHash, + TransactionV1Hash, + TransactionV1Hash, + TransactionV1Hash, +) { + let timestamp = Timestamp::now(); + let ttl = TimeDiff::from_seconds(rng.gen_range(60..300)); + + let deploy = Deploy::random_with_valid_session_package_by_name(rng); + let standard_deploy_hash = *deploy.hash(); + let standard_deploy = Transaction::Deploy(deploy); + + let version_1 = TransactionV1::random_standard(rng, None, None); + let standard_version_1_hash = *version_1.hash(); + let standard_version_1 = Transaction::V1(version_1); + + let deploy = Deploy::random_valid_native_transfer_with_timestamp_and_ttl(rng, timestamp, ttl); + let mint_deploy_hash = *deploy.hash(); + let mint_deploy = Transaction::Deploy(deploy); + + let version_1 = TransactionV1::random_transfer(rng, Some(timestamp), Some(ttl)); + let mint_version_1_hash = *version_1.hash(); + let mint_version_1 = Transaction::V1(version_1); + + let version_1 = TransactionV1::random_install_upgrade(rng, Some(timestamp), Some(ttl)); + let install_upgrade_v1_hash = *version_1.hash(); + let install_upgrade_v1 = Transaction::V1(version_1); + + let version_1 = TransactionV1::random_staking(rng, Some(timestamp), Some(ttl)); + let auction_v1_hash = *version_1.hash(); + let auction_v1 = Transaction::V1(version_1); + + ( + vec![ + standard_deploy, + standard_version_1, + mint_deploy, + mint_version_1, + install_upgrade_v1, + auction_v1, + ], + standard_deploy_hash, + standard_version_1_hash, + mint_deploy_hash, + mint_version_1_hash, + install_upgrade_v1_hash, + auction_v1_hash, + ) +} + +pub fn block_v2( + rng: &mut TestRng, + parent_hash: BlockHash, + state_root_hash: Digest, + timestamp: Timestamp, + era_id: EraId, + height: u64, + proposer: PublicKey, +) -> BlockV2 { + block_v2_with_transactions( + rng, + parent_hash, + state_root_hash, + timestamp, + era_id, + height, + proposer, + vec![], + ) +} + +#[allow(clippy::too_many_arguments)] +pub fn block_v1_no_deploys_no_era( + parent_hash: BlockHash, + state_root_hash: Digest, + body_hash: Digest, + random_bit: bool, + accumulated_seed: Digest, + timestamp: Timestamp, + era_id: EraId, + height: u64, + proposer: PublicKey, + block_hash: BlockHash, +) -> structs::BlockV1 { + structs::BlockV1::new( + parent_hash, + state_root_hash, + body_hash, + random_bit, + accumulated_seed, + None, + timestamp, + era_id, + height, + ProtocolVersion::V2_0_0, + proposer, + block_hash, + vec![], + vec![], + ) +} + +pub fn era_end_v2() -> EraEndV2 { + let mut next_era_validator_weights = BTreeMap::new(); + next_era_validator_weights.insert( + parse_public_key("0198957673ad060503e2ec7d98dc71af6f90ad1f854fe18025e3e7d0d1bbe5e32b"), + U512::from_dec_str("1").unwrap(), + ); + next_era_validator_weights.insert( + parse_public_key("02022d6bc4e3012cc4ae467b5525111cf7ed65883b05a1d924f1e654c64fad3a027c"), + U512::from_dec_str("2").unwrap(), + ); + let mut rewards = BTreeMap::new(); + rewards.insert( + parse_public_key("01235b932586ae5cc3135f7a0dc723185b87e5bd3ae0ac126a92c14468e976ff25"), + U512::from_dec_str("129457537").unwrap(), + ); + EraEndV2::new( + vec![ + parse_public_key("010a10a45ea0aff7af1ffef92287d00ec4cf01c5e9e2952e018a2fbb0f0ede2b50"), + parse_public_key( + "02037c17d279d6e54375f7cfb3559730d5434bfedc8638a3f95e55f6e85fc9e8f611", + ), + parse_public_key( + "02026d4b741a0ece4b3d6d61294a8db28a28dbd734133694582d38f240686ec61d05", + ), + ], + vec![parse_public_key( + "010a10a45ea0aff7af1ffef92287d00ec4cf01c5e9e2952e018a2fbb0f0ede2b51", + )], + next_era_validator_weights, + rewards, + 1, + ) +} + +pub fn era_end_v2_with_reward_exceeding_u64() -> EraEndV2 { + let mut next_era_validator_weights = BTreeMap::new(); + next_era_validator_weights.insert( + parse_public_key("0198957673ad060503e2ec7d98dc71af6f90ad1f854fe18025e3e7d0d1bbe5e32b"), + U512::from_dec_str("1").unwrap(), + ); + next_era_validator_weights.insert( + parse_public_key("02022d6bc4e3012cc4ae467b5525111cf7ed65883b05a1d924f1e654c64fad3a027c"), + U512::from_dec_str("2").unwrap(), + ); + let mut rewards = BTreeMap::new(); + rewards.insert( + parse_public_key("01235b932586ae5cc3135f7a0dc723185b87e5bd3ae0ac126a92c14468e976ff25"), + U512::from_dec_str("18446744073709551616").unwrap(), + ); + EraEndV2::new( + vec![ + parse_public_key("010a10a45ea0aff7af1ffef92287d00ec4cf01c5e9e2952e018a2fbb0f0ede2b50"), + parse_public_key( + "02037c17d279d6e54375f7cfb3559730d5434bfedc8638a3f95e55f6e85fc9e8f611", + ), + parse_public_key( + "02026d4b741a0ece4b3d6d61294a8db28a28dbd734133694582d38f240686ec61d05", + ), + ], + vec![parse_public_key( + "010a10a45ea0aff7af1ffef92287d00ec4cf01c5e9e2952e018a2fbb0f0ede2b51", + )], + next_era_validator_weights, + rewards, + 1, + ) +} + const RAW_API_VERSION: &str = r#"{"ApiVersion":"2.0.0"}"#; const RAW_FINALITY_SIGNATURE_V2: &str = r#"{ @@ -405,7 +655,7 @@ const RAW_LEGACY_BLOCK_ADDED: &str = r#" "protocol_version": "1.0.0" }, "body": { - "proposer": "0108c3b531fbbbb53f4752ab3c3c6ba72c9fb4b9852e2822622d8f936428819881", + "proposer": "0203426736da2554ebf1f8ee1d2ce4ab11b1e33419d7dfc1ce2fe1945faf00bacc9e", "deploy_hashes": [ "06950e4374dc88685634ec30bcddd68e6b46c109ccf6d29e2dfcf5367df75571", "27a89dd58e6297a5244342b68b117afe2555131b896ad6ed4321edcd4130ae7b" @@ -463,7 +713,7 @@ const RAW_BLOCK_ADDED_V1: &str = r#" "protocol_version": "1.0.0" }, "body": { - "proposer": "0108c3b531fbbbb53f4752ab3c3c6ba72c9fb4b9852e2822622d8f936428819881", + "proposer": "0203426736da2554ebf1f8ee1d2ce4ab11b1e33419d7dfc1ce2fe1945faf00bacc9e", "deploy_hashes": [ "06950e4374dc88685634ec30bcddd68e6b46c109ccf6d29e2dfcf5367df75571", "27a89dd58e6297a5244342b68b117afe2555131b896ad6ed4321edcd4130ae7b" @@ -486,6 +736,7 @@ const RAW_BLOCK_ADDED_V2: &str = r#"{ "Version2": { "hash": "2df9fb8909443fba928ed0536a79780cdb4557d0c05fdf762a1fd61141121422", "header": { + "proposer": "01d3eec0445635f136ae560b43e9d8f656a6ba925f01293eaf2610b39ebe0fc28d", "parent_hash": "b8f5e9afd2e54856aa1656f962d07158f0fdf9cfac0f9992875f31f6bf2623a2", "state_root_hash": "cbf02d08bb263aa8915507c172b5f590bbddcd68693fb1c71758b5684b011730", "body_hash": "6041ab862a1e14a43a8e8a9a42dad27091915a337d18060c22bd3fe7b4f39607", @@ -517,11 +768,12 @@ const RAW_BLOCK_ADDED_V2: &str = r#"{ "current_gas_price": 1 }, "body": { - "proposer": "01d3eec0445635f136ae560b43e9d8f656a6ba925f01293eaf2610b39ebe0fc28d", - "mint": [{"Deploy": "58aca0009fc41bd045d303db9e9f07416ff1fd8c76ecd98545eedf86f9459e80"}, {"Deploy": "58aca0009fc41bd045d303db9e9f07416ff1fd8c76ecd98545eedf86f9459e81"}, {"Version1": "58aca0009fc41bd045d303db9e9f07416ff1fd8c76ecd98545eedf86f9459e82"}], - "auction": [{"Deploy": "58aca0009fc41bd045d303db9e9f07416ff1fd8c76ecd98545eedf86f9459e83"}, {"Deploy": "58aca0009fc41bd045d303db9e9f07416ff1fd8c76ecd98545eedf86f9459e84"}, {"Version1": "58aca0009fc41bd045d303db9e9f07416ff1fd8c76ecd98545eedf86f9459e85"}], - "install_upgrade": [{"Deploy": "58aca0009fc41bd045d303db9e9f07416ff1fd8c76ecd98545eedf86f9459e86"}, {"Deploy": "58aca0009fc41bd045d303db9e9f07416ff1fd8c76ecd98545eedf86f9459e87"}, {"Version1": "58aca0009fc41bd045d303db9e9f07416ff1fd8c76ecd98545eedf86f9459e88"}], - "standard": [{"Deploy": "58aca0009fc41bd045d303db9e9f07416ff1fd8c76ecd98545eedf86f9459e89"}, {"Deploy": "58aca0009fc41bd045d303db9e9f07416ff1fd8c76ecd98545eedf86f9459e90"}, {"Version1": "58aca0009fc41bd045d303db9e9f07416ff1fd8c76ecd98545eedf86f9459e91"}], + "transactions": { + "0": [{"Deploy": "58aca0009fc41bd045d303db9e9f07416ff1fd8c76ecd98545eedf86f9459e89"}, {"Deploy": "58aca0009fc41bd045d303db9e9f07416ff1fd8c76ecd98545eedf86f9459e90"}, {"Version1": "58aca0009fc41bd045d303db9e9f07416ff1fd8c76ecd98545eedf86f9459e91"}], + "1": [{"Deploy": "58aca0009fc41bd045d303db9e9f07416ff1fd8c76ecd98545eedf86f9459e80"}, {"Deploy": "58aca0009fc41bd045d303db9e9f07416ff1fd8c76ecd98545eedf86f9459e81"}, {"Version1": "58aca0009fc41bd045d303db9e9f07416ff1fd8c76ecd98545eedf86f9459e82"}], + "2": [{"Deploy": "58aca0009fc41bd045d303db9e9f07416ff1fd8c76ecd98545eedf86f9459e83"}, {"Deploy": "58aca0009fc41bd045d303db9e9f07416ff1fd8c76ecd98545eedf86f9459e84"}, {"Version1": "58aca0009fc41bd045d303db9e9f07416ff1fd8c76ecd98545eedf86f9459e85"}], + "3": [{"Deploy": "58aca0009fc41bd045d303db9e9f07416ff1fd8c76ecd98545eedf86f9459e86"}, {"Deploy": "58aca0009fc41bd045d303db9e9f07416ff1fd8c76ecd98545eedf86f9459e87"}, {"Version1": "58aca0009fc41bd045d303db9e9f07416ff1fd8c76ecd98545eedf86f9459e88"}] + }, "rewarded_signatures": [[240],[0],[0]] } } diff --git a/types/src/legacy_sse_data/mod.rs b/types/src/legacy_sse_data/mod.rs index 47a6d78e..26fa90c5 100644 --- a/types/src/legacy_sse_data/mod.rs +++ b/types/src/legacy_sse_data/mod.rs @@ -203,25 +203,71 @@ mod tests { #[test] fn should_translate_sse_to_legacy() { - for (sse_data, expected) in sse_translation_scenarios() { + for (sse_data, expected, scenario_name) in sse_translation_scenarios() { let legacy_fs = LegacySseData::from(&sse_data); - assert_eq!(legacy_fs, expected); + assert_eq!( + legacy_fs, + expected, + "Failed when executing scenario {}", + scenario_name.as_str() + ); } } - fn sse_translation_scenarios() -> Vec<(SseData, Option)> { + #[allow(clippy::too_many_lines)] + fn sse_translation_scenarios() -> Vec<(SseData, Option, String)> { vec![ - (api_version(), Some(legacy_api_version())), - (finality_signature_v1(), Some(legacy_finality_signature())), - (finality_signature_v2(), Some(legacy_finality_signature())), - (transaction_accepted(), None), - (deploy_accepted(), Some(legacy_deploy_accepted())), - (deploy_expired(), Some(legacy_deploy_expired())), - (transaction_expired(), None), - (fault(), Some(legacy_fault())), - (block_added_v1(), Some(legacy_block_added())), - (block_added_v2(), Some(legacy_block_added_from_v2())), - (deploy_processed(), Some(legacy_deploy_processed())), + ( + api_version(), + Some(legacy_api_version()), + "api_version".to_string(), + ), + ( + finality_signature_v1(), + Some(legacy_finality_signature()), + "finality_signature_v1".to_string(), + ), + ( + finality_signature_v2(), + Some(legacy_finality_signature()), + "finality_signature_v2".to_string(), + ), + ( + transaction_accepted(), + None, + "transaction_accepted".to_string(), + ), + ( + deploy_accepted(), + Some(legacy_deploy_accepted()), + "legacy_deploy_accepted".to_string(), + ), + ( + deploy_expired(), + Some(legacy_deploy_expired()), + "legacy_deploy_expired".to_string(), + ), + ( + transaction_expired(), + None, + "transaction_expired".to_string(), + ), + (fault(), Some(legacy_fault()), "fault".to_string()), + ( + block_added_v1(), + Some(legacy_block_added()), + "block_added_v1".to_string(), + ), + ( + block_added_v2(), + Some(legacy_block_added_from_v2()), + "block_added_v2".to_string(), + ), + ( + deploy_processed(), + Some(legacy_deploy_processed()), + "deploy_processed".to_string(), + ), ] } } diff --git a/types/src/legacy_sse_data/structs.rs b/types/src/legacy_sse_data/structs.rs index e4330bcd..d8347db4 100644 --- a/types/src/legacy_sse_data/structs.rs +++ b/types/src/legacy_sse_data/structs.rs @@ -45,12 +45,11 @@ impl BlockV1 { protocol_version, OnceCell::from(block_hash), ); - Self::new_from_header_and_body(header, body) - } - - pub fn new_from_header_and_body(header: BlockHeaderV1, body: BlockBodyV1) -> Self { - let hash = header.block_hash(); - BlockV1 { hash, header, body } + Self { + hash: block_hash, + header, + body, + } } pub fn from(hash: BlockHash, header: &BlockHeaderV1, body: &casper_types::BlockBodyV1) -> Self { diff --git a/types/src/legacy_sse_data/translate_block_added.rs b/types/src/legacy_sse_data/translate_block_added.rs index 205a23f4..f5b8e2c7 100644 --- a/types/src/legacy_sse_data/translate_block_added.rs +++ b/types/src/legacy_sse_data/translate_block_added.rs @@ -36,6 +36,7 @@ impl EraEndV2Translator for DefaultEraEndV2Translator { //We're not able to cast the reward to u64, so we skip this era end. return None; } + println!("Reward: {:?} {:?} {:?}", k.clone(), v, v.as_u64()); rewards.insert(k.clone(), v.as_u64()); } let era_report = EraReport::new( @@ -83,7 +84,7 @@ where let protocol_version = block_v2.header().protocol_version(); let block_hash = block_v2.hash(); let body = block_v2.body(); - let proposer = body.proposer().clone(); + let proposer = header.proposer().clone(); let deploy_hashes = self.deploy_hash_translator.translate(body); let transfer_hashes = self.transfer_hash_translator.translate(body); let block_v1 = structs::BlockV1::new( @@ -157,3 +158,234 @@ where } } } + +#[cfg(test)] +mod tests { + use std::collections::BTreeMap; + + use casper_types::{testing::TestRng, DeployHash, EraEndV1, EraId, EraReport, PublicKey, U512}; + use mockall::predicate; + use pretty_assertions::assert_eq; + use rand::Rng; + use serde::Serialize; + + use super::{ + BlockV2Translator, DefaultBlockV2Translator, DefaultEraEndV2Translator, EraEndV2Translator, + MockEraEndV2Translator, + }; + use crate::{ + legacy_sse_data::{fixtures::*, translate_deploy_hashes::MockDeployHashTranslator}, + testing::parse_public_key, + }; + + #[test] + pub fn default_block_v2_translator_translates_without_era_end_and_deploys() { + let mut test_rng = TestRng::new(); + let (mut era_end_translator, mut deploy_hash_translator, mut transfer_hash_translator) = + prepare_mocks(); + let block_v2 = block_v2( + &mut test_rng, + parent_hash(), + state_root_hash(), + timestamp(), + EraId::new(15678276), + 345678987, + proposer(), + ); + let era_end_ref = block_v2.header().era_end().unwrap(); + prepare_era_end_mock(&mut era_end_translator, era_end_ref, None); + prepare_deploys_mock(&mut deploy_hash_translator, &block_v2, vec![]); + prepare_transfer_mock(&mut transfer_hash_translator, &block_v2, vec![]); + let under_test = DefaultBlockV2Translator { + era_end_translator, + deploy_hash_translator, + transfer_hash_translator, + }; + + let got = under_test.translate(&block_v2); + + assert!(got.is_some()); + let expected = block_v1_no_deploys_no_era( + *block_v2.parent_hash(), + *block_v2.state_root_hash(), + *block_v2.body_hash(), + block_v2.random_bit(), + *block_v2.accumulated_seed(), + block_v2.timestamp(), + block_v2.era_id(), + block_v2.height(), + block_v2.proposer().clone(), + *block_v2.hash(), + ); + compare_as_json(&expected, &got.unwrap()); + } + + #[test] + pub fn default_block_v2_translator_passes_era_end_info_and_deploys() { + let mut test_rng = TestRng::new(); + let (mut era_end_translator, mut deploy_hash_translator, mut transfer_hash_translator) = + prepare_mocks(); + let block_v2 = block_v2( + &mut test_rng, + parent_hash(), + state_root_hash(), + timestamp(), + EraId::new(15678276), + 345678987, + proposer(), + ); + let era_end_ref = block_v2.header().era_end().unwrap(); + let report = EraReport::random(&mut test_rng); + let validator_weights = random_validator_weights(&mut test_rng); + let era_end = EraEndV1::new(report, validator_weights); + let deploy_hashes_1: Vec = + (0..3).map(|_| DeployHash::random(&mut test_rng)).collect(); + let deploy_hashes_2: Vec = + (0..3).map(|_| DeployHash::random(&mut test_rng)).collect(); + prepare_era_end_mock(&mut era_end_translator, era_end_ref, Some(era_end.clone())); + prepare_deploys_mock( + &mut deploy_hash_translator, + &block_v2, + deploy_hashes_1.clone(), + ); + prepare_transfer_mock( + &mut transfer_hash_translator, + &block_v2, + deploy_hashes_2.clone(), + ); + + let under_test = DefaultBlockV2Translator { + era_end_translator, + deploy_hash_translator, + transfer_hash_translator, + }; + + let got = under_test.translate(&block_v2).unwrap(); + assert_eq!(got.body.deploy_hashes, deploy_hashes_1); + assert_eq!(got.body.transfer_hashes, deploy_hashes_2); + } + + #[test] + fn default_era_end_v2_translator_translates_all_data() { + let under_test = DefaultEraEndV2Translator; + let era_end_v2 = era_end_v2(); + let maybe_translated = under_test.translate(&era_end_v2); + assert!(maybe_translated.is_some(), "{:?}", maybe_translated); + let translated = maybe_translated.unwrap(); + let mut expected_validator_weights = BTreeMap::new(); + expected_validator_weights.insert( + parse_public_key("0198957673ad060503e2ec7d98dc71af6f90ad1f854fe18025e3e7d0d1bbe5e32b"), + U512::from(1), + ); + expected_validator_weights.insert( + parse_public_key( + "02022d6bc4e3012cc4ae467b5525111cf7ed65883b05a1d924f1e654c64fad3a027c", + ), + U512::from(2), + ); + let mut rewards = BTreeMap::new(); + rewards.insert( + parse_public_key("01235b932586ae5cc3135f7a0dc723185b87e5bd3ae0ac126a92c14468e976ff25"), + 129457537, + ); + let report = EraReport::new( + vec![ + parse_public_key( + "010a10a45ea0aff7af1ffef92287d00ec4cf01c5e9e2952e018a2fbb0f0ede2b50", + ), + parse_public_key( + "02037c17d279d6e54375f7cfb3559730d5434bfedc8638a3f95e55f6e85fc9e8f611", + ), + parse_public_key( + "02026d4b741a0ece4b3d6d61294a8db28a28dbd734133694582d38f240686ec61d05", + ), + ], + rewards, + vec![parse_public_key( + "010a10a45ea0aff7af1ffef92287d00ec4cf01c5e9e2952e018a2fbb0f0ede2b51", + )], + ); + let expected = EraEndV1::new(report, expected_validator_weights); + assert_eq!(translated, expected); + } + + #[test] + fn default_era_end_v2_translator_returns_none_when_reward_exceeds_u64() { + let under_test = DefaultEraEndV2Translator; + let era_end_v2 = era_end_v2_with_reward_exceeding_u64(); + let maybe_translated = under_test.translate(&era_end_v2); + assert!(maybe_translated.is_none()); + } + + fn compare_as_json(left: &T, right: &Y) + where + T: Serialize, + Y: Serialize, + { + let left_value = serde_json::to_value(left).unwrap(); + let right_value = serde_json::to_value(right).unwrap(); + assert_eq!(left_value, right_value); + } + + fn prepare_deploys_mock( + deploy_hash_translator: &mut MockDeployHashTranslator, + block_v2: &casper_types::BlockV2, + deploys: Vec, + ) { + deploy_hash_translator + .expect_translate() + .times(1) + .with(predicate::eq(block_v2.body().clone())) + .return_const(deploys); + } + + fn prepare_transfer_mock( + transfer_hash_translator: &mut MockDeployHashTranslator, + block_v2: &casper_types::BlockV2, + deploys: Vec, + ) { + transfer_hash_translator + .expect_translate() + .times(1) + .with(predicate::eq(block_v2.body().clone())) + .return_const(deploys); + } + + fn prepare_era_end_mock( + era_end_translator: &mut MockEraEndV2Translator, + era_end_ref: &casper_types::EraEndV2, + returned: Option, + ) { + era_end_translator + .expect_translate() + .times(1) + .with(predicate::eq(era_end_ref.clone())) + .return_const(returned); + } + + fn prepare_mocks() -> ( + MockEraEndV2Translator, + MockDeployHashTranslator, + MockDeployHashTranslator, + ) { + let era_end_translator = MockEraEndV2Translator::new(); + let deploy_hash_translator = MockDeployHashTranslator::new(); + let transfer_hash_translator = MockDeployHashTranslator::new(); + ( + era_end_translator, + deploy_hash_translator, + transfer_hash_translator, + ) + } + + fn random_validator_weights( + test_rng: &mut TestRng, + ) -> std::collections::BTreeMap { + let mut tree = BTreeMap::new(); + let number_of_weights = test_rng.gen_range(5..=10); + for _ in 0..number_of_weights { + tree.insert(PublicKey::random(test_rng), test_rng.gen()); + } + tree + } +} diff --git a/types/src/legacy_sse_data/translate_deploy_hashes.rs b/types/src/legacy_sse_data/translate_deploy_hashes.rs index 58b59d5f..70b0fe88 100644 --- a/types/src/legacy_sse_data/translate_deploy_hashes.rs +++ b/types/src/legacy_sse_data/translate_deploy_hashes.rs @@ -17,7 +17,7 @@ impl DeployHashTranslator for StandardDeployHashesTranslator { block_body_v2 .standard() .filter_map(|el| match el { - TransactionHash::Deploy(deploy_hash) => Some(*deploy_hash), + TransactionHash::Deploy(deploy_hash) => Some(deploy_hash), TransactionHash::V1(_) => None, }) .collect() @@ -29,9 +29,76 @@ impl DeployHashTranslator for TransferDeployHashesTranslator { block_body_v2 .mint() .filter_map(|el| match el { - TransactionHash::Deploy(deploy_hash) => Some(*deploy_hash), + TransactionHash::Deploy(deploy_hash) => Some(deploy_hash), TransactionHash::V1(_) => None, }) .collect() } } + +#[cfg(test)] +mod tests { + use casper_types::{testing::TestRng, EraId}; + + use crate::legacy_sse_data::fixtures::*; + + use super::*; + + #[test] + fn standard_deploy_hashes_translator_uses_standard_deploy_transaction_hashes() { + let mut test_rng = TestRng::new(); + let under_test = StandardDeployHashesTranslator; + let ( + transactions, + standard_deploy_hash, + _standard_v1_hash, + _mint_deploy_hash, + _mint_v1_hash, + _install_upgrade_v1, + _auction_v1, + ) = sample_transactions(&mut test_rng); + let block_v2 = block_v2_with_transactions( + &mut test_rng, + parent_hash(), + state_root_hash(), + timestamp(), + EraId::new(15678276), + 345678987, + proposer(), + transactions.iter().collect(), + ); + let block_body = block_v2.body(); + assert_eq!(block_body.all_transactions().collect::>().len(), 6); + let translated = under_test.translate(block_body); + assert_eq!(translated, vec![standard_deploy_hash,]) + } + + #[test] + fn transfer_deploy_hashes_translator_uses_mint_deploy_transaction_hashes() { + let mut test_rng = TestRng::new(); + let under_test = TransferDeployHashesTranslator; + let ( + transactions, + _standard_deploy_hash, + _standard_v1_hash, + mint_deploy_hash, + _mint_v1_hash, + _install_upgrade_v1, + _auction_v1, + ) = sample_transactions(&mut test_rng); + let block_v2 = block_v2_with_transactions( + &mut test_rng, + parent_hash(), + state_root_hash(), + timestamp(), + EraId::new(15678276), + 345678987, + proposer(), + transactions.iter().collect(), + ); + let block_body = block_v2.body(); + assert_eq!(block_body.all_transactions().collect::>().len(), 6); + let translated = under_test.translate(block_body); + assert_eq!(translated, vec![mint_deploy_hash,]) + } +} diff --git a/types/src/legacy_sse_data/translate_execution_result.rs b/types/src/legacy_sse_data/translate_execution_result.rs index b35b1c5e..a293be14 100644 --- a/types/src/legacy_sse_data/translate_execution_result.rs +++ b/types/src/legacy_sse_data/translate_execution_result.rs @@ -66,7 +66,7 @@ impl ExecutionEffectsTranslator for DefaultExecutionEffectsTranslator { let maybe_transform_kind = map_transform_v2(ex_ef); if let Some(transform_kind) = maybe_transform_kind { let transform = TransformV1 { - key: key.to_string(), + key: key.to_formatted_string(), transform: transform_kind, }; transforms.push(transform); @@ -120,7 +120,7 @@ fn handle_named_keys(keys: &NamedKeys) -> Option { for (name, key) in keys.iter() { let named_key = NamedKey { name: name.to_string(), - key: key.to_string(), + key: key.to_formatted_string(), }; named_keys.push(named_key); } @@ -128,7 +128,6 @@ fn handle_named_keys(keys: &NamedKeys) -> Option { } fn maybe_tanslate_stored_value(stored_value: &StoredValue) -> Option { - //TODO stored_value this shouldn't be a reference. we should take ownership and reassign to V1 enum to avoid potentially expensive clones. match stored_value { StoredValue::CLValue(cl_value) => Some(TransformKindV1::WriteCLValue(cl_value.clone())), StoredValue::Account(acc) => Some(TransformKindV1::WriteAccount(acc.account_hash())), @@ -157,7 +156,7 @@ fn maybe_tanslate_stored_value(stored_value: &StoredValue) -> Option None, StoredValue::BidKind(_) => None, StoredValue::Package(_) => None, @@ -171,14 +170,23 @@ fn maybe_tanslate_stored_value(stored_value: &StoredValue) -> Option Vec { + let transform_1 = TransformV1 { + key: key_1.to_formatted_string(), + transform: TransformKindV1::Identity, + }; + let transform_2 = TransformV1 { + key: key_2.to_formatted_string(), + transform: TransformKindV1::AddKeys(vec![ + NamedKey { + name: "key_1".to_string(), + key: key_1.to_formatted_string(), + }, + NamedKey { + name: "key_2".to_string(), + key: key_2.to_formatted_string(), + }, + ]), + }; + let transform_3 = TransformV1 { + key: key_3.to_formatted_string(), + transform: TransformKindV1::AddUInt64(1235), + }; + let expected_transforms = vec![transform_1, transform_2, transform_3]; + expected_transforms + } + + fn build_example_effects(key_1: Key, key_2: Key, key_3: Key) -> Effects { + let mut effects = Effects::new(); + effects.push(TransformV2::new(key_1, TransformKindV2::Identity)); + let mut named_keys = NamedKeys::new(); + named_keys.insert("key_1".to_string(), key_1); + named_keys.insert("key_2".to_string(), key_2); + effects.push(TransformV2::new( + key_2, + TransformKindV2::AddKeys(named_keys), + )); + effects.push(TransformV2::new(key_3, TransformKindV2::AddUInt64(1235))); + effects } fn random_account() -> Account { diff --git a/types/src/lib.rs b/types/src/lib.rs index dcaa2273..b7f12768 100644 --- a/types/src/lib.rs +++ b/types/src/lib.rs @@ -7,7 +7,7 @@ extern crate alloc; mod filter; pub mod legacy_sse_data; pub mod sse_data; -#[cfg(feature = "sse-data-testing")] +#[cfg(any(feature = "sse-data-testing", test))] mod testing; use casper_types::ProtocolVersion; diff --git a/types/src/sse_data.rs b/types/src/sse_data.rs index afcfd33b..a4111039 100644 --- a/types/src/sse_data.rs +++ b/types/src/sse_data.rs @@ -48,13 +48,11 @@ pub(crate) fn to_error(msg: String) -> SseDataDeserializeError { /// Deserializes a string which should contain json data and returns a result of either SseData (which is 2.0.x compliant) or an SseDataDeserializeError /// /// * `json_raw`: string slice which should contain raw json data. -pub fn deserialize(json_raw: &str) -> Result<(SseData, bool), SseDataDeserializeError> { - serde_json::from_str::(json_raw) - .map(|el| (el, false)) - .map_err(|err| { - let error_message = format!("Serde Error: {}", err); - to_error(error_message) - }) +pub fn deserialize(json_raw: &str) -> Result { + serde_json::from_str::(json_raw).map_err(|err| { + let error_message = format!("Serde Error: {}", err); + to_error(error_message) + }) } /// The "data" field of the events sent on the event stream to clients. @@ -245,6 +243,8 @@ impl SseData { #[cfg(feature = "sse-data-testing")] pub mod test_support { + use serde_json::json; + pub const BLOCK_HASH_1: &str = "ca52062424e9d5631a34b7b401e123927ce29d4bd10bc97c7df0aa752f131bb7"; pub const BLOCK_HASH_2: &str = @@ -262,8 +262,8 @@ pub mod test_support { "\"Shutdown\"".to_string() } - pub fn example_block_added_2_0_0(hash: &str, height: &str) -> String { - let raw_block_added = format!("{{\"BlockAdded\":{{\"block_hash\":\"{hash}\",\"block\":{{\"Version2\":{{\"hash\":\"{hash}\",\"header\":{{\"parent_hash\":\"12e135355e7eca479d67809e71c36c2e29060607e34f378037f92e8edf406719\",\"state_root_hash\":\"f3e13be7e02273c9362f7c5eb4483811012f8a5d42b8855910caebdc7d8d3eb4\",\"body_hash\":\"ddebade25c99fb8a81a595d63aafb86a478358907d04d5dd8548e7d2bca9eff7\",\"random_bit\":true,\"accumulated_seed\":\"2966bcd7bda50ca5e904eeadc9284b5c355530641696715c02b7828ae5e13b37\",\"era_end\":null,\"timestamp\":\"2024-03-21T09:57:44.123Z\",\"era_id\":116390,\"height\":{height},\"protocol_version\":\"1.0.0\",\"current_gas_price\":1}},\"body\":{{\"proposer\":\"02034aeded2db627239d86eda1f5c8c01f14e26840007af1af698567e13fcef18fa7\",\"mint\":[],\"auction\":[],\"install_upgrade\":[],\"standard\":[],\"rewarded_signatures\":[]}}}}}}}}}}"); + pub fn example_block_added_2_0_0(hash: &str, height: u64) -> String { + let raw_block_added = json!({"BlockAdded":{"block_hash":hash,"block":{"Version2":{"hash":hash,"header":{"parent_hash":"327a6be4f8b23115e089875428ff03d9071a7020ce3e0f4734c43e4279ad77fc","state_root_hash":"4f1638725e8a92ad6432a76124ba4a6db365b00ff352beb58b8c48ed9ed4b68d","body_hash":"337a4c9e510e01e142a19e5d81203bdc43e59a4f9039288c01f7b89370e1d104","random_bit":true,"accumulated_seed":"7b7d7b18668dcc8ffecda5f5de1037f26cd61394f72357cdc9ba84f0f48e37c8","era_end":null,"timestamp":"2024-05-10T19:55:20.415Z","era_id":77,"height":height,"protocol_version":"2.0.0","proposer":"01cee2ff4318180282a73bfcd1446f8145e4d80508fecd76fc38dce13af491f0e5","current_gas_price":1,"last_switch_block_hash":"a3533c2625c6413be2287e581c5fca1a0165ebac02b051f9f07ccf1ad483cf2d"},"body":{"transactions":{"0":[],"1":[],"2":[],"3":[]},"rewarded_signatures":[[248],[0],[0]]}}}}}).to_string(); super::deserialize(&raw_block_added).unwrap(); // deserializing to make sure that the raw json string is in correct form raw_block_added } diff --git a/types/src/testing.rs b/types/src/testing.rs index c9496fb4..019ff9a2 100644 --- a/types/src/testing.rs +++ b/types/src/testing.rs @@ -3,11 +3,18 @@ //! Contains various parts and components to aid writing tests and simulations using the //! `casper-node` library. +#[cfg(feature = "sse-data-testing")] use casper_types::{ testing::TestRng, Deploy, TimeDiff, Timestamp, Transaction, TransactionV1Builder, }; +#[cfg(test)] +use casper_types::{BlockHash, Digest, PublicKey}; +#[cfg(feature = "sse-data-testing")] use rand::Rng; +#[cfg(test)] +use serde_json::Value; +#[cfg(feature = "sse-data-testing")] /// Creates a test deploy created at given instant and with given ttl. pub fn create_test_transaction( created_ago: TimeDiff, @@ -32,6 +39,7 @@ pub fn create_test_transaction( } } +#[cfg(feature = "sse-data-testing")] /// Creates a random deploy that is considered expired. pub fn create_expired_transaction(now: Timestamp, test_rng: &mut TestRng) -> Transaction { create_test_transaction( @@ -41,3 +49,18 @@ pub fn create_expired_transaction(now: Timestamp, test_rng: &mut TestRng) -> Tra test_rng, ) } + +#[cfg(test)] +pub fn parse_public_key(arg: &str) -> PublicKey { + serde_json::from_value(Value::String(arg.to_string())).unwrap() +} + +#[cfg(test)] +pub fn parse_block_hash(arg: &str) -> BlockHash { + serde_json::from_value(Value::String(arg.to_string())).unwrap() +} + +#[cfg(test)] +pub fn parse_digest(arg: &str) -> Digest { + serde_json::from_value(Value::String(arg.to_string())).unwrap() +}