diff --git a/Cargo.lock b/Cargo.lock index bd1bfd1a..1fb85b11 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -142,9 +142,9 @@ dependencies = [ [[package]] name = "anyhow" -version = "1.0.82" +version = "1.0.83" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f538837af36e6f6a9be0faa67f9a314f8119e4e4b5867c6ab40ed60360142519" +checksum = "25bdb32cbbdce2b519a9cd7df3a678443100e265d5e25ca763b7572a5104f5f3" [[package]] name = "arc-swap" @@ -218,9 +218,9 @@ version = "0.3.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "16e62a023e7c117e27523144c5d2459f4397fcc3cab0085af8e2224f643a0193" dependencies = [ - "proc-macro2 1.0.81", + "proc-macro2 1.0.82", "quote 1.0.36", - "syn 2.0.60", + "syn 2.0.61", ] [[package]] @@ -229,9 +229,9 @@ version = "0.1.80" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c6fa2087f2753a7da8cc1c0dbfcf89579dd57458e36769de5ac750b4671737ca" dependencies = [ - "proc-macro2 1.0.81", + "proc-macro2 1.0.82", "quote 1.0.36", - "syn 2.0.60", + "syn 2.0.61", ] [[package]] @@ -427,9 +427,9 @@ version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4da9a32f3fed317401fa3c862968128267c3106685286e15d5aaa3d7389c2f60" dependencies = [ - "proc-macro2 1.0.81", + "proc-macro2 1.0.82", "quote 1.0.36", - "syn 2.0.60", + "syn 2.0.61", ] [[package]] @@ -713,9 +713,9 @@ dependencies = [ [[package]] name = "cc" -version = "1.0.96" +version = "1.0.97" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "065a29261d53ba54260972629f9ca6bffa69bac13cd1fed61420f7fa68b9f8bd" +checksum = "099a5357d84c4c61eb35fc8eafa9a79a902c2f76911e5747ced4e032edd8d9b4" dependencies = [ "jobserver", "libc", @@ -767,9 +767,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "528131438037fd55894f62d6e9f068b8f45ac57ffa77517819645d10aed04f64" dependencies = [ "heck 0.5.0", - "proc-macro2 1.0.81", + "proc-macro2 1.0.82", "quote 1.0.36", - "syn 2.0.60", + "syn 2.0.61", ] [[package]] @@ -943,9 +943,9 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f46882e17999c6cc590af592290432be3bce0428cb0d5f8b6715e4dc7b383eb3" dependencies = [ - "proc-macro2 1.0.81", + "proc-macro2 1.0.82", "quote 1.0.36", - "syn 2.0.60", + "syn 2.0.61", ] [[package]] @@ -971,7 +971,7 @@ version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "613e4ee15899913285b7612004bbd490abd605be7b11d35afada5902fb6b91d5" dependencies = [ - "proc-macro2 1.0.81", + "proc-macro2 1.0.82", "quote 1.0.36", "syn 1.0.109", ] @@ -1002,7 +1002,7 @@ version = "0.5.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3418329ca0ad70234b9735dc4ceed10af4df60eff9c8e7b06cb5e520d92c3535" dependencies = [ - "proc-macro2 1.0.81", + "proc-macro2 1.0.82", "quote 1.0.36", "syn 1.0.109", ] @@ -1013,9 +1013,9 @@ version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d150dea618e920167e5973d70ae6ece4385b7164e0d799fe7c122dd0a5d912ad" dependencies = [ - "proc-macro2 1.0.81", + "proc-macro2 1.0.82", "quote 1.0.36", - "syn 2.0.60", + "syn 2.0.61", ] [[package]] @@ -1025,7 +1025,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4fb810d30a7c1953f91334de7244731fc3f3c10d7fe163338a35b9f640960321" dependencies = [ "convert_case", - "proc-macro2 1.0.81", + "proc-macro2 1.0.82", "quote 1.0.36", "rustc_version", "syn 1.0.109", @@ -1446,9 +1446,9 @@ version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "87750cf4b7a4c0625b1529e4c543c2182106e4dedc60a2a6455e00d212c489ac" dependencies = [ - "proc-macro2 1.0.81", + "proc-macro2 1.0.82", "quote 1.0.36", - "syn 2.0.60", + "syn 2.0.61", ] [[package]] @@ -1494,9 +1494,9 @@ dependencies = [ [[package]] name = "getrandom" -version = "0.2.14" +version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "94b22e06ecb0110981051723910cbf0b5f5e09a2062dd7663334ee79a9d1286c" +checksum = "c4567c8db10ae91089c99af84c68c38da3ec2f087c3f82960bcdbf3656b6f4d7" dependencies = [ "cfg-if", "js-sys", @@ -1775,9 +1775,9 @@ version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1dff438f14e67e7713ab9332f5fd18c8f20eb7eb249494f6c2bf170522224032" dependencies = [ - "proc-macro2 1.0.81", + "proc-macro2 1.0.82", "quote 1.0.36", - "syn 2.0.60", + "syn 2.0.61", ] [[package]] @@ -2286,9 +2286,9 @@ version = "1.0.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0122b7114117e64a63ac49f752a5ca4624d534c7b1c7de796ac196381cd2d947" dependencies = [ - "proc-macro2 1.0.81", + "proc-macro2 1.0.82", "quote 1.0.36", - "syn 2.0.60", + "syn 2.0.61", ] [[package]] @@ -2604,9 +2604,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "af7cbce79ec385a1d4f54baa90a76401eb15d9cab93685f62e7e9f942aa00ae2" dependencies = [ "cfg-if", - "proc-macro2 1.0.81", + "proc-macro2 1.0.82", "quote 1.0.36", - "syn 2.0.60", + "syn 2.0.61", ] [[package]] @@ -2700,11 +2700,10 @@ dependencies = [ [[package]] name = "num-bigint" -version = "0.4.4" +version = "0.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "608e7659b5c3d7cba262d894801b9ec9d00de989e8a82bd4bef91d08da45cdc0" +checksum = "c165a9ab64cf766f73521c0dd2cfdff64f488b8f0b3e621face3462d3db536d7" dependencies = [ - "autocfg", "num-integer", "num-traits", ] @@ -2753,7 +2752,7 @@ version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "876a53fff98e03a936a674b29568b0e605f06b29372c2489ff4de23f1949743d" dependencies = [ - "proc-macro2 1.0.81", + "proc-macro2 1.0.82", "quote 1.0.36", "syn 1.0.109", ] @@ -2862,9 +2861,9 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" dependencies = [ - "proc-macro2 1.0.81", + "proc-macro2 1.0.82", "quote 1.0.36", - "syn 2.0.60", + "syn 2.0.61", ] [[package]] @@ -2946,9 +2945,9 @@ dependencies = [ [[package]] name = "paste" -version = "1.0.14" +version = "1.0.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "de3145af08024dea9fa9914f381a17b8fc6034dfb00f3a84013f7ff43f29ed4c" +checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a" [[package]] name = "pbkdf2" @@ -3022,9 +3021,9 @@ version = "1.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2f38a4412a78282e09a2cf38d195ea5420d15ba0602cb375210efbc877243965" dependencies = [ - "proc-macro2 1.0.81", + "proc-macro2 1.0.82", "quote 1.0.36", - "syn 2.0.60", + "syn 2.0.61", ] [[package]] @@ -3136,7 +3135,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c" dependencies = [ "proc-macro-error-attr", - "proc-macro2 1.0.81", + "proc-macro2 1.0.82", "quote 1.0.36", "syn 1.0.109", "version_check", @@ -3148,7 +3147,7 @@ version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869" dependencies = [ - "proc-macro2 1.0.81", + "proc-macro2 1.0.82", "quote 1.0.36", "version_check", ] @@ -3164,9 +3163,9 @@ dependencies = [ [[package]] name = "proc-macro2" -version = "1.0.81" +version = "1.0.82" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3d1597b0c024618f09a9c3b8655b7e430397a36d23fdafec26d6965e9eec3eba" +checksum = "8ad3d49ab951a01fbaafe34f2ec74122942fe18a3f9814c3268f1bb72042131b" dependencies = [ "unicode-ident", ] @@ -3275,7 +3274,7 @@ version = "1.0.36" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0fa76aaf39101c457836aec0ce2316dbdc3ab723cdda1c6bd4e6ad4208acaca7" dependencies = [ - "proc-macro2 1.0.81", + "proc-macro2 1.0.82", ] [[package]] @@ -3503,10 +3502,10 @@ version = "8.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b91ac2a3c6c0520a3fb3dd89321177c3c692937c4eb21893378219da10c44fc8" dependencies = [ - "proc-macro2 1.0.81", + "proc-macro2 1.0.82", "quote 1.0.36", "rust-embed-utils", - "syn 2.0.60", + "syn 2.0.61", "walkdir", ] @@ -3580,9 +3579,9 @@ dependencies = [ [[package]] name = "rustversion" -version = "1.0.15" +version = "1.0.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "80af6f9131f277a45a3fba6ce8e2258037bb0477a67e610d3c1fe046ab31de47" +checksum = "092474d1a01ea8278f69e6a358998405fae5b8b963ddaeb2b0b04a128bf1dfb0" [[package]] name = "rusty-fork" @@ -3598,9 +3597,9 @@ dependencies = [ [[package]] name = "ryu" -version = "1.0.17" +version = "1.0.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e86697c916019a8588c99b5fac3cead74ec0b4b819707a682fd4d23fa0ce1ba1" +checksum = "f3cb5ba0dc43242ce17de99c180e96db90b235b8a9fdc9543c96d2209116bd9f" [[package]] name = "same-file" @@ -3622,9 +3621,9 @@ dependencies = [ [[package]] name = "schemars" -version = "0.8.17" +version = "0.8.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f55c82c700538496bdc329bb4918a81f87cc8888811bd123cf325a0f2f8d309" +checksum = "fc6e7ed6919cb46507fb01ff1654309219f62b4d603822501b0b80d42f6f21ef" dependencies = [ "dyn-clone", "indexmap 1.9.3", @@ -3635,14 +3634,14 @@ dependencies = [ [[package]] name = "schemars_derive" -version = "0.8.17" +version = "0.8.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "83263746fe5e32097f06356968a077f96089739c927a61450efa069905eec108" +checksum = "185f2b7aa7e02d418e453790dde16890256bbd2bcd04b7dc5348811052b53f49" dependencies = [ - "proc-macro2 1.0.81", + "proc-macro2 1.0.82", "quote 1.0.36", "serde_derive_internals", - "syn 2.0.60", + "syn 2.0.61", ] [[package]] @@ -3684,9 +3683,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "25a82fcb49253abcb45cdcb2adf92956060ec0928635eb21b4f7a6d8f25ab0bc" dependencies = [ "heck 0.4.1", - "proc-macro2 1.0.81", + "proc-macro2 1.0.82", "quote 1.0.36", - "syn 2.0.60", + "syn 2.0.61", "thiserror", ] @@ -3728,9 +3727,9 @@ dependencies = [ [[package]] name = "semver" -version = "1.0.22" +version = "1.0.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "92d43fe69e652f3df9bdc2b85b2854a0825b86e4fb76bc44d945137d053639ca" +checksum = "61697e0a1c7e512e84a621326239844a24d8207b4669b41bc18b32ea5cbf988b" [[package]] name = "serde" @@ -3766,9 +3765,9 @@ version = "1.0.200" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "856f046b9400cee3c8c94ed572ecdb752444c24528c035cd35882aad6f492bcb" dependencies = [ - "proc-macro2 1.0.81", + "proc-macro2 1.0.82", "quote 1.0.36", - "syn 2.0.60", + "syn 2.0.61", ] [[package]] @@ -3777,9 +3776,9 @@ version = "0.29.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "330f01ce65a3a5fe59a60c82f3c9a024b573b8a6e875bd233fe5f934e71d54e3" dependencies = [ - "proc-macro2 1.0.81", + "proc-macro2 1.0.82", "quote 1.0.36", - "syn 2.0.60", + "syn 2.0.61", ] [[package]] @@ -4001,7 +4000,7 @@ version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4ea40e2345eb2faa9e1e5e326db8c34711317d2b5e08d0d5741619048a803127" dependencies = [ - "proc-macro2 1.0.81", + "proc-macro2 1.0.82", "quote 1.0.36", "sqlx-core", "sqlx-macros-core", @@ -4019,7 +4018,7 @@ dependencies = [ "heck 0.4.1", "hex", "once_cell", - "proc-macro2 1.0.81", + "proc-macro2 1.0.82", "quote 1.0.36", "serde", "serde_json", @@ -4185,7 +4184,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1e385be0d24f186b4ce2f9982191e7101bb737312ad61c1f2f984f34bcf85d59" dependencies = [ "heck 0.4.1", - "proc-macro2 1.0.81", + "proc-macro2 1.0.82", "quote 1.0.36", "rustversion", "syn 1.0.109", @@ -4198,10 +4197,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "23dc1fa9ac9c169a78ba62f0b841814b7abae11bdd047b9c58f893439e309ea0" dependencies = [ "heck 0.4.1", - "proc-macro2 1.0.81", + "proc-macro2 1.0.82", "quote 1.0.36", "rustversion", - "syn 2.0.60", + "syn 2.0.61", ] [[package]] @@ -4227,18 +4226,18 @@ version = "1.0.109" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" dependencies = [ - "proc-macro2 1.0.81", + "proc-macro2 1.0.82", "quote 1.0.36", "unicode-ident", ] [[package]] name = "syn" -version = "2.0.60" +version = "2.0.61" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "909518bc7b1c9b779f1bbf07f2929d35af9f0f37e47c6e9ef7f9dddc1e1821f3" +checksum = "c993ed8ccba56ae856363b1845da7266a7cb78e1d146c8a32d54b45a8b831fc9" dependencies = [ - "proc-macro2 1.0.81", + "proc-macro2 1.0.82", "quote 1.0.36", "unicode-ident", ] @@ -4290,7 +4289,7 @@ checksum = "beca1b4eaceb4f2755df858b88d9b9315b7ccfd1ffd0d7a48a52602301f01a57" dependencies = [ "heck 0.4.1", "proc-macro-error", - "proc-macro2 1.0.81", + "proc-macro2 1.0.82", "quote 1.0.36", "syn 1.0.109", ] @@ -4326,22 +4325,22 @@ checksum = "3369f5ac52d5eb6ab48c6b4ffdc8efbcad6b89c765749064ba298f2c68a16a76" [[package]] name = "thiserror" -version = "1.0.59" +version = "1.0.60" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f0126ad08bff79f29fc3ae6a55cc72352056dfff61e3ff8bb7129476d44b23aa" +checksum = "579e9083ca58dd9dcf91a9923bb9054071b9ebbd800b342194c9feb0ee89fc18" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.59" +version = "1.0.60" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d1cd413b5d558b4c5bf3680e324a6fa5014e7b7c067a51e69dbdf47eb7148b66" +checksum = "e2470041c06ec3ac1ab38d0356a6119054dedaea53e12fbefc0de730a1c08524" dependencies = [ - "proc-macro2 1.0.81", + "proc-macro2 1.0.82", "quote 1.0.36", - "syn 2.0.60", + "syn 2.0.61", ] [[package]] @@ -4447,9 +4446,9 @@ version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5b8a1e28f2deaa14e508979454cb3a223b10b938b45af148bc0986de36f1923b" dependencies = [ - "proc-macro2 1.0.81", + "proc-macro2 1.0.82", "quote 1.0.36", - "syn 2.0.60", + "syn 2.0.61", ] [[package]] @@ -4568,9 +4567,9 @@ version = "0.1.27" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" dependencies = [ - "proc-macro2 1.0.81", + "proc-macro2 1.0.82", "quote 1.0.36", - "syn 2.0.60", + "syn 2.0.61", ] [[package]] @@ -4777,9 +4776,9 @@ checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a" [[package]] name = "utoipa" -version = "4.2.1" +version = "4.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e95b8d4503ee98939fb7024f6da083f7c48ff033cc3cba7521360e1bc6c1470b" +checksum = "c5afb1a60e207dca502682537fefcfd9921e71d0b83e9576060f09abc6efab23" dependencies = [ "indexmap 2.2.6", "serde", @@ -4794,9 +4793,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7bf0e16c02bc4bf5322ab65f10ab1149bdbcaa782cba66dc7057370a3f8190be" dependencies = [ "proc-macro-error", - "proc-macro2 1.0.81", + "proc-macro2 1.0.82", "quote 1.0.36", - "syn 2.0.60", + "syn 2.0.61", ] [[package]] @@ -4868,7 +4867,7 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d257817081c7dffcdbab24b9e62d2def62e2ff7d00b1c20062551e6cccc145ff" dependencies = [ - "proc-macro2 1.0.81", + "proc-macro2 1.0.82", "quote 1.0.36", ] @@ -4961,9 +4960,9 @@ dependencies = [ "bumpalo", "log", "once_cell", - "proc-macro2 1.0.81", + "proc-macro2 1.0.82", "quote 1.0.36", - "syn 2.0.60", + "syn 2.0.61", "wasm-bindgen-shared", ] @@ -4995,9 +4994,9 @@ version = "0.2.92" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e94f17b526d0a461a191c78ea52bbce64071ed5c04c9ffe424dcb38f74171bb7" dependencies = [ - "proc-macro2 1.0.81", + "proc-macro2 1.0.82", "quote 1.0.36", - "syn 2.0.60", + "syn 2.0.61", "wasm-bindgen-backend", "wasm-bindgen-shared", ] @@ -5270,22 +5269,22 @@ checksum = "09041cd90cf85f7f8b2df60c646f853b7f535ce68f85244eb6731cf89fa498ec" [[package]] name = "zerocopy" -version = "0.7.33" +version = "0.7.34" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "087eca3c1eaf8c47b94d02790dd086cd594b912d2043d4de4bfdd466b3befb7c" +checksum = "ae87e3fcd617500e5d106f0380cf7b77f3c6092aae37191433159dda23cfb087" dependencies = [ "zerocopy-derive", ] [[package]] name = "zerocopy-derive" -version = "0.7.33" +version = "0.7.34" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6f4b6c273f496d8fd4eaf18853e6b448760225dc030ff2c485a786859aea6393" +checksum = "15e934569e47891f7d9411f1a451d947a60e000ab3bd24fbb970f000387d1b3b" dependencies = [ - "proc-macro2 1.0.81", + "proc-macro2 1.0.82", "quote 1.0.36", - "syn 2.0.60", + "syn 2.0.61", ] [[package]] diff --git a/LEGACY_SSE_EMULATION.md b/LEGACY_SSE_EMULATION.md new file mode 100644 index 00000000..117b3637 --- /dev/null +++ b/LEGACY_SSE_EMULATION.md @@ -0,0 +1,512 @@ +# Rationale + +The casper node 2.x produces a different set of SSE events than the 1.x ones. Also, 1.x nodes used 3 sse endpoints (`/events/sigs`, `/events/deploys`, `/events/main`), while 2.x node exposed all sse events under one firehose endpoint (`/events`). + +Generally the changes in 2.x regarding SSE are backwards incompatible to some extend. To harness all the details and collect all the data clients should adapt the new SSE API. However if some clients are not ready or have no need to adapt to the new SSE API, they can use the legacy SSE emulation. + +SSE emulation is by default turned off, the instruction on how to enable it is in the (main README.md)[./README.md] file. + +**BEFORE YOU ENABLE LEGACY SSE EMULATION** please consider the following: +* The legacy SSE emulation is a temporary solution and can be removed in a future major release. +* The legacy SSE emulation is not a 1:1 mapping of the 2.x events to 1.x events. Some events will be omitted, some will be transformed, some will be passed as is. More details on the limitations of the emulation are explained below. +* The legacy SSE emulation is an additional drain on resources. It will consume more resources than the "native" 2.x SSE API. + +# Premises of legacy SSE emulation + +Currently the only possible emulation is the V1 SSE API. Enabling V1 SSE api emulation requires setting `emulate_legacy_sse_apis` to `["V1"]`, like: +``` +[sse_server] +(...) +emulate_legacy_sse_apis = ["V1"] +(...) +``` + +This will expose three additional sse endpoints: +* `/events/sigs` -> publishes `ApiVersion`, `BlockAdded`, `DeployProcessed`, `DeployExpired`, `Fault` and `Shutdown` +* `/events/deploys`-> publishes `ApiVersion`, `TransactionAccepted` and `Shutdown` +* `/events/main` -> publishes `ApiVersion`, `FinalitySignature` and `Shutdown` events + +Those endpoints will emit events in the same format as the V1 SSE API of the casper node. There are limitations to what Casper Sidecar can and will do, here is a list of mapping assumptions: + +## Translating `ApiVersion` event + +Legacy SSE event will be the same + +## Translating `BlockAdded` event + * When the 2.x event emits a V1 block it will be unwrapped and passed as a legacy BlockAdded, for instance a 2.x event like this: + ```json + { + "BlockAdded": { + "block_hash": "d59359690ca5a251b513185da0767f744e77645adec82bb6ff785a89edc7591c", + "block": { + "Version1": { + "hash": "d59359690ca5a251b513185da0767f744e77645adec82bb6ff785a89edc7591c", + "header": { + "parent_hash": "90ca56a697f8b1b19cba08c642fd7f04669b8cd49bb9d652fca989f8a9f8bcea", + "state_root_hash": "9cce223fdbeab41dbbcf0b62f3fd857373131378d51776de26bb9f4fefe1e849", + "body_hash": "5f37be399c15b2394af48243ce10a62a7d12769dc5f7740b18ad3bf55bde5271", + "random_bit": true, + "accumulated_seed": "b3e1930565a80a874a443eaadefa1a340927fb8b347729bbd93e93935a47a9e4", + "era_end": { + "era_report": { + "equivocators": [ + "0203c9da857cfeccf001ce00720ae2e0d083629858b60ac05dd285ce0edae55f0c8e", + "02026fb7b629a2ec0132505cdf036f6ffb946d03a1c9b5da57245af522b842f145be" + ], + "rewards": [ + { + "validator": "01235b932586ae5cc3135f7a0dc723185b87e5bd3ae0ac126a92c14468e976ff25", + "amount": 129457537 + } + ], + "inactive_validators": [] + }, + "next_era_validator_weights": [ + { + "validator": "0198957673ad060503e2ec7d98dc71af6f90ad1f854fe18025e3e7d0d1bbe5e32b", + "weight": "1" + }, + { + "validator": "02022d6bc4e3012cc4ae467b5525111cf7ed65883b05a1d924f1e654c64fad3a027c", + "weight": "2" + } + ] + }, + "timestamp": "2024-04-25T20:00:35.640Z", + "era_id": 601701, + "height": 6017012, + "protocol_version": "1.0.0" + }, + "body": { + "proposer": "0108c3b531fbbbb53f4752ab3c3c6ba72c9fb4b9852e2822622d8f936428819881", + "deploy_hashes": [ + "06950e4374dc88685634ec30bcddd68e6b46c109ccf6d29e2dfcf5367df75571", + "27a89dd58e6297a5244342b68b117afe2555131b896ad6ed4321edcd4130ae7b" + ], + "transfer_hashes": [ + "3e30b6c1c5dbca9277425846b42dc832cd3d8ce889c38d6bfc8bd95b3e1c403e", + "c990ba47146270655eaacc53d4115cbd980697f3d4e9c76bccfdfce82af6ce08" + ] + } + } + } + } + } + ``` + will be translated to 1.x emulated event: + ```json + { + "BlockAdded": { + "block_hash": "d59359690ca5a251b513185da0767f744e77645adec82bb6ff785a89edc7591c", + "block": { + "hash": "d59359690ca5a251b513185da0767f744e77645adec82bb6ff785a89edc7591c", + "header": { + "parent_hash": "90ca56a697f8b1b19cba08c642fd7f04669b8cd49bb9d652fca989f8a9f8bcea", + "state_root_hash": "9cce223fdbeab41dbbcf0b62f3fd857373131378d51776de26bb9f4fefe1e849", + "body_hash": "5f37be399c15b2394af48243ce10a62a7d12769dc5f7740b18ad3bf55bde5271", + "random_bit": true, + "accumulated_seed": "b3e1930565a80a874a443eaadefa1a340927fb8b347729bbd93e93935a47a9e4", + "era_end": { + "era_report": { + "equivocators": [ + "0203c9da857cfeccf001ce00720ae2e0d083629858b60ac05dd285ce0edae55f0c8e", + "02026fb7b629a2ec0132505cdf036f6ffb946d03a1c9b5da57245af522b842f145be" + ], + "rewards": [ + { + "validator": "01235b932586ae5cc3135f7a0dc723185b87e5bd3ae0ac126a92c14468e976ff25", + "amount": 129457537 + } + ], + "inactive_validators": [] + }, + "next_era_validator_weights": [ + { + "validator": "0198957673ad060503e2ec7d98dc71af6f90ad1f854fe18025e3e7d0d1bbe5e32b", + "weight": "1" + }, + { + "validator": "02022d6bc4e3012cc4ae467b5525111cf7ed65883b05a1d924f1e654c64fad3a027c", + "weight": "2" + } + ] + }, + "timestamp": "2024-04-25T20:00:35.640Z", + "era_id": 601701, + "height": 6017012, + "protocol_version": "1.0.0" + }, + "body": { + "proposer": "0108c3b531fbbbb53f4752ab3c3c6ba72c9fb4b9852e2822622d8f936428819881", + "deploy_hashes": [ + "06950e4374dc88685634ec30bcddd68e6b46c109ccf6d29e2dfcf5367df75571", + "27a89dd58e6297a5244342b68b117afe2555131b896ad6ed4321edcd4130ae7b" + ], + "transfer_hashes": [ + "3e30b6c1c5dbca9277425846b42dc832cd3d8ce889c38d6bfc8bd95b3e1c403e", + "c990ba47146270655eaacc53d4115cbd980697f3d4e9c76bccfdfce82af6ce08" + ] + } + } + } + } + ``` + * When the 2.x event emits a V2 block the following rules apply: + * `block_hash` will be copied from V2 to V1 + * `block.block_hash` will be copied from V2 to V1 + * `block.header.era_end`: + * if the era_end is a V1 variety - it will be copied + * if the era_end is a V2 variety: + * V2 `next_era_validator_weights` will be copied from V2 `next_era_validator_weights` + * V1 `era_report` will be assembled from V2 `era_end.equivocators`, `era_end.rewards` and `era_end.inactive_validators` fields + * IF one of the `rewards` contains a reward that doesn't fit in a u64 (because V2 has U512 type in rewards values) - the whole `era_end` **WILL BE OMITTED** from the legacy V1 block (value None) + * V2 field `next_era_gas_price` has no equivalent in V1 and will be omitted + * `block.header.current_gas_price` this field only exists in V2 and will be omitted from the V1 block header + * other `block.header.*` fields will be copied from V2 to V1 + * `block.body.propser` will be copied from V2 to V1 + * `block.body.deploy_hashes` will be based on V2 `block.body.standard` transactions. Bear in mind, that only values of transactions of type `Deploy` will be copied to V1 `block.body.deploy_hashes` array + * `block.body.transfer_hashes` will be based on V2 `block.body.mint` transactions. Bear in mind, that only values of transactions of type `Deploy` will be copied to V1 `block.body.transfer_hashes` array. + + An example of the above rules are: + Input V2 BlockAdded: + ```json + { + "BlockAdded": { + "block_hash": "2df9fb8909443fba928ed0536a79780cdb4557d0c05fdf762a1fd61141121422", + "block": { + "Version2": { + "hash": "2df9fb8909443fba928ed0536a79780cdb4557d0c05fdf762a1fd61141121422", + "header": { + "parent_hash": "b8f5e9afd2e54856aa1656f962d07158f0fdf9cfac0f9992875f31f6bf2623a2", + "state_root_hash": "cbf02d08bb263aa8915507c172b5f590bbddcd68693fb1c71758b5684b011730", + "body_hash": "6041ab862a1e14a43a8e8a9a42dad27091915a337d18060c22bd3fe7b4f39607", + "random_bit": false, + "accumulated_seed": "a0e424710f4fba036ba450b40f2bd7a842b176cf136f3af1952a2a13eb02616c", + "era_end": { + "equivocators": [ + "01cc718e9dea652577bffad3471d0db7d03ba30923780a2a8fd1e3dd9b4e72dc54", + "0203e4532e401326892aa8ebc16b6986bd35a6c96a1f16c28db67fd7e87cb6913817", + "020318a52d5b2d545def8bf0ee5ea7ddea52f1fbf106c8b69848e40c5460e20c9f62" + ], + "inactive_validators": ["01cc718e9dea652577bffad3471d0db7d03ba30923780a2a8fd1e3dd9b4e72dc55", "01cc718e9dea652577bffad3471d0db7d03ba30923780a2a8fd1e3dd9b4e72dc56"], + "next_era_validator_weights": [ + {"validator": "02038b238d774c3c4228a0430e3a078e1a2533f9c87cccbcf695637502d8d6057a63", "weight": "1"}, + {"validator": "0102ffd4d2812d68c928712edd012fbcad54367bc6c5c254db22cf696772856566", "weight": "2"} + ], + "rewards": { + "02028b18c949d849b377988ea5191b39340975db25f8b80f37cc829c9f79dbfb19fc": "749546792", + "02028002c063228ff4e9d22d69154c499b86a4f7fdbf1d1e20f168b62da537af64c2": "788342677", + "02038efa405f648c72f36b0e5f37db69ab213d44404591b24de21383d8cc161101ec": "86241635", + "01f6bbd4a6fd10534290c58edb6090723d481cea444a8e8f70458e5136ea8c733c": "941794198" + }, + "next_era_gas_price": 1 + }, + "timestamp": "2024-04-25T20:31:39.895Z", + "era_id": 419571, + "height": 4195710, + "protocol_version": "1.0.0", + "current_gas_price": 1 + }, + "body": { + "proposer": "01d3eec0445635f136ae560b43e9d8f656a6ba925f01293eaf2610b39ebe0fc28d", + "mint": [{"Deploy": "58aca0009fc41bd045d303db9e9f07416ff1fd8c76ecd98545eedf86f9459e80"}, {"Deploy": "58aca0009fc41bd045d303db9e9f07416ff1fd8c76ecd98545eedf86f9459e81"}, {"Version1": "58aca0009fc41bd045d303db9e9f07416ff1fd8c76ecd98545eedf86f9459e82"}], + "auction": [{"Deploy": "58aca0009fc41bd045d303db9e9f07416ff1fd8c76ecd98545eedf86f9459e83"}, {"Deploy": "58aca0009fc41bd045d303db9e9f07416ff1fd8c76ecd98545eedf86f9459e84"}, {"Version1": "58aca0009fc41bd045d303db9e9f07416ff1fd8c76ecd98545eedf86f9459e85"}], + "install_upgrade": [{"Deploy": "58aca0009fc41bd045d303db9e9f07416ff1fd8c76ecd98545eedf86f9459e86"}, {"Deploy": "58aca0009fc41bd045d303db9e9f07416ff1fd8c76ecd98545eedf86f9459e87"}, {"Version1": "58aca0009fc41bd045d303db9e9f07416ff1fd8c76ecd98545eedf86f9459e88"}], + "standard": [{"Deploy": "58aca0009fc41bd045d303db9e9f07416ff1fd8c76ecd98545eedf86f9459e89"}, {"Deploy": "58aca0009fc41bd045d303db9e9f07416ff1fd8c76ecd98545eedf86f9459e90"}, {"Version1": "58aca0009fc41bd045d303db9e9f07416ff1fd8c76ecd98545eedf86f9459e91"}], + "rewarded_signatures": [[240],[0],[0]] + } + } + } + } + } + ``` + Output legacy BlockAdded: + ```json + { + "BlockAdded": { + "block_hash": "2df9fb8909443fba928ed0536a79780cdb4557d0c05fdf762a1fd61141121422", + "block": { + "hash": "2df9fb8909443fba928ed0536a79780cdb4557d0c05fdf762a1fd61141121422", + "header": { + "parent_hash": "b8f5e9afd2e54856aa1656f962d07158f0fdf9cfac0f9992875f31f6bf2623a2", + "state_root_hash": "cbf02d08bb263aa8915507c172b5f590bbddcd68693fb1c71758b5684b011730", + "body_hash": "6041ab862a1e14a43a8e8a9a42dad27091915a337d18060c22bd3fe7b4f39607", + "random_bit": false, + "accumulated_seed": "a0e424710f4fba036ba450b40f2bd7a842b176cf136f3af1952a2a13eb02616c", + "era_end": { + "era_report": { + "equivocators": [ + "01cc718e9dea652577bffad3471d0db7d03ba30923780a2a8fd1e3dd9b4e72dc54", + "0203e4532e401326892aa8ebc16b6986bd35a6c96a1f16c28db67fd7e87cb6913817", + "020318a52d5b2d545def8bf0ee5ea7ddea52f1fbf106c8b69848e40c5460e20c9f62" + ], + "rewards": [ + { + "validator": "01f6bbd4a6fd10534290c58edb6090723d481cea444a8e8f70458e5136ea8c733c", + "amount": 941794198 + }, + { + "validator": "02028002c063228ff4e9d22d69154c499b86a4f7fdbf1d1e20f168b62da537af64c2", + "amount": 788342677 + }, + { + "validator": "02028b18c949d849b377988ea5191b39340975db25f8b80f37cc829c9f79dbfb19fc", + "amount": 749546792 + }, + { + "validator": "02038efa405f648c72f36b0e5f37db69ab213d44404591b24de21383d8cc161101ec", + "amount": 86241635 + } + ], + "inactive_validators": [ + "01cc718e9dea652577bffad3471d0db7d03ba30923780a2a8fd1e3dd9b4e72dc55", + "01cc718e9dea652577bffad3471d0db7d03ba30923780a2a8fd1e3dd9b4e72dc56" + ] + }, + "next_era_validator_weights": [ + { + "validator": "0102ffd4d2812d68c928712edd012fbcad54367bc6c5c254db22cf696772856566", + "weight": "2" + }, + { + "validator": "02038b238d774c3c4228a0430e3a078e1a2533f9c87cccbcf695637502d8d6057a63", + "weight": "1" + } + ] + }, + "timestamp": "2024-04-25T20:31:39.895Z", + "era_id": 419571, + "height": 4195710, + "protocol_version": "1.0.0" + }, + "body": { + "proposer": "01d3eec0445635f136ae560b43e9d8f656a6ba925f01293eaf2610b39ebe0fc28d", + "deploy_hashes": [ + "58aca0009fc41bd045d303db9e9f07416ff1fd8c76ecd98545eedf86f9459e89", + "58aca0009fc41bd045d303db9e9f07416ff1fd8c76ecd98545eedf86f9459e90" + ], + "transfer_hashes": [ + "58aca0009fc41bd045d303db9e9f07416ff1fd8c76ecd98545eedf86f9459e80", + "58aca0009fc41bd045d303db9e9f07416ff1fd8c76ecd98545eedf86f9459e81" + ] + } + } + } + }``` + +## Translating `TransactionAccepted` event + * If the event is a V1 variety - it will be unwrapped and passed, so a 2.x event: + ```json + { + "TransactionAccepted": { + "Deploy": { + "hash": "5a7709969c210db93d3c21bf49f8bf705d7c75a01609f606d04b0211af171d43", + "header": { + "account": "02022c07e061d6e0b43bbaa25717b021c2ddc0f701a223946a0883b57ae842917438", + "timestamp": "2020-08-07T01:28:27.360Z", + "ttl": "4m 22s", + "gas_price": 72, + "body_hash": "aa2a111c086628a161001160756c5884e32fde0356bb85f484a3e55682ad089f", + "dependencies": [], + "chain_name": "casper-example" + }, + "payment": { + "StoredContractByName": { + "name": "casper-example", + "entry_point": "example-entry-point", + "args": [ + [ + "amount", + { + "cl_type": "U512", + "bytes": "0400f90295", + "parsed": "2500000000" + } + ] + ] + } + }, + "session": { + "StoredContractByHash": { + "hash": "dfb621e7012df48fe1d40fd8015b5e2396c477c9587e996678551148a06d3a89", + "entry_point": "8sY9fUUCwoiFZmxKo8kj", + "args": [ + [ + "YbZWtEuL4D6oMTJmUWvj", + { + "cl_type": { + "List": "U8" + }, + "bytes": "5a000000909ffe7807b03a5db0c3c183648710db16d408d8425a4e373fc0422a4efed1ab0040bc08786553fcac4521528c9fafca0b0fb86f4c6e9fb9db7a1454dda8ed612c4ea4c9a6378b230ae1e3c236e37d6ebee94339a56cb4be582a", + "parsed": [ + 144, + 159, + 254, + 120, + 7 + ] + } + ] + ] + } + }, + "approvals": [ + { + "signer": "02022c07e061d6e0b43bbaa25717b021c2ddc0f701a223946a0883b57ae842917438", + "signature": "025d0a7ba37bebe6774681ca5adecb70fa4eef56821eb344bf0f6867e171a899a87edb2b8bf70f2cb47a1670a6baf2cded1fad535ee53a2f65da91c82ebf30945b" + } + ] + } + } + } + ``` + will be translated to legacy `DeployAccepted`: + ```json + { + "DeployAccepted": { + "hash": "5a7709969c210db93d3c21bf49f8bf705d7c75a01609f606d04b0211af171d43", + "header": { + "account": "02022c07e061d6e0b43bbaa25717b021c2ddc0f701a223946a0883b57ae842917438", + "timestamp": "2020-08-07T01:28:27.360Z", + "ttl": "4m 22s", + "gas_price": 72, + "body_hash": "aa2a111c086628a161001160756c5884e32fde0356bb85f484a3e55682ad089f", + "dependencies": [], + "chain_name": "casper-example" + }, + "payment": { + "StoredContractByName": { + "name": "casper-example", + "entry_point": "example-entry-point", + "args": [ + [ + "amount", + { + "cl_type": "U512", + "bytes": "0400f90295", + "parsed": "2500000000" + } + ] + ] + } + }, + "session": { + "StoredContractByHash": { + "hash": "dfb621e7012df48fe1d40fd8015b5e2396c477c9587e996678551148a06d3a89", + "entry_point": "8sY9fUUCwoiFZmxKo8kj", + "args": [ + [ + "YbZWtEuL4D6oMTJmUWvj", + { + "cl_type": { + "List": "U8" + }, + "bytes": "5a000000909ffe7807b03a5db0c3c183648710db16d408d8425a4e373fc0422a4efed1ab0040bc08786553fcac4521528c9fafca0b0fb86f4c6e9fb9db7a1454dda8ed612c4ea4c9a6378b230ae1e3c236e37d6ebee94339a56cb4be582a", + "parsed": [ + 144, + 159, + 254, + 120, + 7 + ] + } + ] + ] + } + }, + "approvals": [ + { + "signer": "02022c07e061d6e0b43bbaa25717b021c2ddc0f701a223946a0883b57ae842917438", + "signature": "025d0a7ba37bebe6774681ca5adecb70fa4eef56821eb344bf0f6867e171a899a87edb2b8bf70f2cb47a1670a6baf2cded1fad535ee53a2f65da91c82ebf30945b" + } + ] + } + } + ``` + * If the event is a V2 variety - it will be omitted so a 2.x event like: + ``` + { + "TransactionAccepted": { + "Version1": { + ... + } + } + } + ``` + will be omitted from the legacy SSE streams + +## Translating `TransactionExpired` event + * If it's a Deploy variety it will be unpacked and sent. So a 2.x `TransactionExpired` event: + ```json + { + "TransactionExpired": { + "transaction_hash": { + "Deploy": "565d7147e28be402c34208a133fd59fde7ac785ae5f0298cb5fb7adfb1b054a8" + } + } + } + ``` + will be sent as a legacy `DeployExpired` event: + ```json + { + "DeployExpired": { + "deploy_hash": "565d7147e28be402c34208a133fd59fde7ac785ae5f0298cb5fb7adfb1b054a8" + } + } + ``` + + * If it's a Version1 variety it will be omitted from legacy SSE streams. So a 2.x `TransactionExpired` event: + ```json + { + "TransactionExpired": { + "Version1": { + "hash": "565d7147e28be402c34208a133fd59fde7ac785ae5f0298cb5fb7adfb1b054a8" + } + } + } + ``` + will be omitted + +## Translating `TransactionProcessed` event. + * If `transaction_hash` field is a `Version1`, the event will be ignored. + * If `transaction_hash` field is a `Deploy`, it's value will be used as `DeployProcessed.deploy_hash` + * If `initiator_addr` field is not a `PublicKey` type, the event will be omitted. + * If `initiator_addr` field is a `PublicKey` type, it's value will be used as `DeployProcessed.account` + * `timestamp`, `ttl`, `block_hash` will be filled from analogous fields in the `TransactionProcessed` event + * If `execution_result` is a `Version1` type, it's value will be copied as-is do the `DeployProcessed.execution_result` field. + * If `execution_result` is a `Version2` type please see (this paragraph)[#translating-executionresultv2] + +### Translating `ExecutionResultV2`. +When transalting `ExecutionResultV2` (later in this paragraph called `ex_v2`)to legacy `ExecutionResult` (later in this paragraph called `ex_v1`) the following rules apply: + * if `ex_v2.error_message` is not empty, the `ExecutionResult` will be of type `Failure` and `ex_v1.error_message` will be set to that value. Otherwise `ex_v1` will be of type `Success` + * `ex_v1.cost` will be set to `ex_v2.cost` + * `ex_v1.transfers` will always be an empty list since 2.x node doesn't use a notion of `TransferAddr` anymore + * `ex_v1.effect` will be populated based on `ex_v2.effects` field applying rules from paragraph (Translating Effects from V2)[#translating-effects-from-v2] + +### Translating `Effects` from V2 + * Output `operations` field will always be an empty list, since 2.x node no longer uses this concept for execution results + * For `transforms` the objects will be constructed based on `ex_v2.effects` with the following exceptions: + * V2 `AddKeys` transform will be translated to V1 `NamedKeys` transform. + * V2 `Write` transform will be translated applying rules from paragraph (Translating Write transform from V2)[#translating-write-transform-from-v2]. If translating at least one `Write` transform is not translatable (In the paragraph it will be denoted that it yields a `None` value) - the whole transform will be an empty array. + +### Translating `Write` transform from V2 + When translating `Write` transforms from V2 to V1 the following rules apply: + * For `CLValue`, it will be copied to output as `WriteCLValue` transform + * For `Account` it will be copied to output as `WriteAccount` transform, taking the v2 `account_hash` as value for `WriteAccount`. + * For `ContractWasm` a `WriteContractWasm` transform will be created. Please note that `WriteContractWasm` has no data, so details from V2 will be omitted. + * For `Contract` a `WriteContract` transform will be created. Please note that `WriteContract` has no data, so details from V2 will be omitted. + * For `Contract` a `WriteContractPackage` transform will be created. Please note that `WriteContractPackage` has no data, so details from V2 will be omitted. + * For `LegacyTransfer` a `WriteTransfer` transform will be created. Data will be copied. + * For `DeployInfo` a `WriteDeployInfo` transform will be created. Data will be copied. + * For `EraInfo` a `ErInfo` transform will be created. Data will be copied. + * For `Bid` a `WriteBid` transform will be created. Data will be copied. + * For `Withdraw` a `WriteWithdraw` transform will be created. Data will be copied. + * For `NamedKey` will be translated into a `AddKeys` transform. Data will be copied. + * For `AddressableEntity` no value will be produced (a `None` value will be yielded). + * For `BidKind` no value will be produced (a `None` value will be yielded). + * For `Package` no value will be produced (a `None` value will be yielded). + * For `ByteCode` no value will be produced (a `None` value will be yielded). + * For `MessageTopic` no value will be produced (a `None` value will be yielded). + * For `Message` no value will be produced (a `None` value will be yielded). + + diff --git a/README.md b/README.md index 4b8cbef5..7e34e0e2 100644 --- a/README.md +++ b/README.md @@ -273,22 +273,8 @@ sleep_between_keep_alive_checks_in_seconds = 30 #### Event Stream Server SSE legacy emulations -Currently the only possible emulation is the V1 SSE API. Enabling V1 SSE api emulation requires setting `emulate_legacy_sse_apis` to `["V1"]`, like: -``` -[sse_server] -(...) -emulate_legacy_sse_apis = ["V1"] -(...) -``` - -This will expose three additional sse endpoints: -* `/events/sigs` -* `/events/deploys` -* `/events/main` - -Those endpoints will emit events in the same format as the V1 SSE API of the casper node. There are limitations to what Casper Sidecar can and will do, here is a list of assumptions: +Please see [Legacy sse emulation file](./LEGACY_SSE_EMULATION.md) -TODO -> fill this in the next PR when mapping is implemented ### Storage diff --git a/event_sidecar/src/event_stream_server.rs b/event_sidecar/src/event_stream_server.rs index 3e92ac3f..8efcab0b 100644 --- a/event_sidecar/src/event_stream_server.rs +++ b/event_sidecar/src/event_stream_server.rs @@ -50,12 +50,7 @@ use warp::Filter; /// that a new client can retrieve the entire set of buffered events if desired. const ADDITIONAL_PERCENT_FOR_BROADCAST_CHANNEL_SIZE: u32 = 20; -pub type OutboundSender = UnboundedSender<( - Option, - SseData, - Option, - Option, -)>; +pub type OutboundSender = UnboundedSender<(Option, SseData, Option)>; #[derive(Debug)] pub(crate) struct EventStreamServer { @@ -115,19 +110,14 @@ impl EventStreamServer { } /// Broadcasts the SSE data to all clients connected to the event stream. - pub(crate) fn broadcast( - &mut self, - sse_data: SseData, - inbound_filter: Option, - maybe_json_data: Option, - ) { + pub(crate) fn broadcast(&mut self, sse_data: SseData, inbound_filter: Option) { let event_index = match sse_data { SseData::ApiVersion(..) => None, _ => Some(self.event_indexer.next_index()), }; let _ = self .sse_data_sender - .send((event_index, sse_data, inbound_filter, maybe_json_data)); + .send((event_index, sse_data, inbound_filter)); } } diff --git a/event_sidecar/src/event_stream_server/http_server.rs b/event_sidecar/src/event_stream_server/http_server.rs index 4d964c25..b5f2e580 100644 --- a/event_sidecar/src/event_stream_server/http_server.rs +++ b/event_sidecar/src/event_stream_server/http_server.rs @@ -17,9 +17,8 @@ use tokio::{ }; use tracing::{error, info, trace}; use wheelbuf::WheelBuf; -pub type InboundData = (Option, SseData, Option, Option); -pub type OutboundReceiver = - mpsc::UnboundedReceiver<(Option, SseData, Option, Option)>; +pub type InboundData = (Option, SseData, Option); +pub type OutboundReceiver = mpsc::UnboundedReceiver<(Option, SseData, Option)>; /// Run the HTTP server. /// /// * `server_with_shutdown` is the actual server as a future which can be gracefully shut down. @@ -109,13 +108,12 @@ async fn handle_incoming_data( broadcaster: &broadcast::Sender, ) -> Result<(), ()> { match maybe_data { - Some((maybe_event_index, data, inbound_filter, maybe_json_data)) => { + Some((maybe_event_index, data, inbound_filter)) => { // Buffer the data and broadcast it to subscribed clients. trace!("Event stream server received {:?}", data); let event = ServerSentEvent { id: maybe_event_index, data: data.clone(), - json_data: maybe_json_data, inbound_filter, }; match data { diff --git a/event_sidecar/src/event_stream_server/sse_server.rs b/event_sidecar/src/event_stream_server/sse_server.rs index 9da7a2d4..5d5cc496 100644 --- a/event_sidecar/src/event_stream_server/sse_server.rs +++ b/event_sidecar/src/event_stream_server/sse_server.rs @@ -107,10 +107,6 @@ pub(super) struct ServerSentEvent { pub(super) id: Option, /// Payload of the event pub(super) data: SseData, - #[allow(dead_code)] - /// TODO remove this field in another PR. - /// Optional raw input for the edge-case scenario in which the output needs to receive exactly the same text as we got from inbound. - pub(super) json_data: Option, /// Information which endpoint we got the event from pub(super) inbound_filter: Option, } @@ -121,7 +117,6 @@ impl ServerSentEvent { ServerSentEvent { id: None, data: SseData::ApiVersion(client_api_version), - json_data: None, inbound_filter: None, } } @@ -129,7 +124,6 @@ impl ServerSentEvent { ServerSentEvent { id: None, data: SseData::SidecarVersion(version), - json_data: None, inbound_filter: None, } } @@ -672,20 +666,17 @@ mod tests { let api_version = ServerSentEvent { id: None, data: SseData::random_api_version(&mut rng), - json_data: None, inbound_filter: None, }; let block_added = ServerSentEvent { id: Some(rng.gen()), data: SseData::random_block_added(&mut rng), - json_data: None, inbound_filter: None, }; let (sse_data, transaction) = SseData::random_transaction_accepted(&mut rng); let transaction_accepted = ServerSentEvent { id: Some(rng.gen()), data: sse_data, - json_data: None, inbound_filter: None, }; let mut transactions = HashMap::new(); @@ -693,43 +684,36 @@ mod tests { let transaction_processed = ServerSentEvent { id: Some(rng.gen()), data: SseData::random_transaction_processed(&mut rng), - json_data: None, inbound_filter: None, }; let transaction_expired = ServerSentEvent { id: Some(rng.gen()), data: SseData::random_transaction_expired(&mut rng), - json_data: None, inbound_filter: None, }; let fault = ServerSentEvent { id: Some(rng.gen()), data: SseData::random_fault(&mut rng), - json_data: None, inbound_filter: None, }; let finality_signature = ServerSentEvent { id: Some(rng.gen()), data: SseData::random_finality_signature(&mut rng), - json_data: None, inbound_filter: None, }; let step = ServerSentEvent { id: Some(rng.gen()), data: SseData::random_step(&mut rng), - json_data: None, inbound_filter: None, }; let shutdown = ServerSentEvent { id: Some(rng.gen()), data: SseData::Shutdown, - json_data: None, inbound_filter: Some(SseFilter::Events), }; let sidecar_api_version = ServerSentEvent { id: Some(rng.gen()), data: SseData::random_sidecar_version(&mut rng), - json_data: None, inbound_filter: None, }; @@ -801,20 +785,17 @@ mod tests { let malformed_api_version = ServerSentEvent { id: Some(rng.gen()), data: SseData::random_api_version(&mut rng), - json_data: None, inbound_filter: None, }; let malformed_block_added = ServerSentEvent { id: None, data: SseData::random_block_added(&mut rng), - json_data: None, inbound_filter: None, }; let (sse_data, transaction) = SseData::random_transaction_accepted(&mut rng); let malformed_transaction_accepted = ServerSentEvent { id: None, data: sse_data, - json_data: None, inbound_filter: None, }; let mut transactions = HashMap::new(); @@ -822,37 +803,31 @@ mod tests { let malformed_transaction_processed = ServerSentEvent { id: None, data: SseData::random_transaction_processed(&mut rng), - json_data: None, inbound_filter: None, }; let malformed_transaction_expired = ServerSentEvent { id: None, data: SseData::random_transaction_expired(&mut rng), - json_data: None, inbound_filter: None, }; let malformed_fault = ServerSentEvent { id: None, data: SseData::random_fault(&mut rng), - json_data: None, inbound_filter: None, }; let malformed_finality_signature = ServerSentEvent { id: None, data: SseData::random_finality_signature(&mut rng), - json_data: None, inbound_filter: None, }; let malformed_step = ServerSentEvent { id: None, data: SseData::random_step(&mut rng), - json_data: None, inbound_filter: None, }; let malformed_shutdown = ServerSentEvent { id: None, data: SseData::Shutdown, - json_data: None, inbound_filter: None, }; @@ -876,7 +851,7 @@ mod tests { } #[allow(clippy::too_many_lines)] - async fn should_filter_duplicate_events(path_filter: &str) { + async fn should_filter_duplicate_events(path_filter: &str, is_legacy_endpoint: bool) { let mut rng = TestRng::new(); let mut transactions = HashMap::new(); @@ -972,19 +947,46 @@ mod tests { received_event_str = starts_with_data .replace_all(received_event_str.as_str(), "") .into_owned(); - let received_data = - serde_json::from_str::(received_event_str.as_str()).unwrap(); - let expected_data = serde_json::to_value(&expected_data).unwrap(); - assert_eq!(expected_data, received_data); + if is_legacy_endpoint { + let maybe_legacy = LegacySseData::from(&expected_data); + assert!(maybe_legacy.is_some()); + let input_legacy = maybe_legacy.unwrap(); + let got_legacy = + serde_json::from_str::(received_event_str.as_str()).unwrap(); + assert_eq!(got_legacy, input_legacy); + } else { + let received_data = + serde_json::from_str::(received_event_str.as_str()).unwrap(); + let expected_data = serde_json::to_value(&expected_data).unwrap(); + assert_eq!(expected_data, received_data); + } } } } + #[tokio::test] + async fn should_filter_duplicate_main_events() { + should_filter_duplicate_events(SSE_API_MAIN_PATH, true).await + } + /// This test checks that deploy-accepted events from the initial stream which are duplicated in + /// the ongoing stream are filtered out. + #[tokio::test] + async fn should_filter_duplicate_deploys_events() { + should_filter_duplicate_events(SSE_API_DEPLOYS_PATH, true).await + } + + /// This test checks that signature events from the initial stream which are duplicated in the + /// ongoing stream are filtered out. + #[tokio::test] + async fn should_filter_duplicate_signature_events() { + should_filter_duplicate_events(SSE_API_SIGNATURES_PATH, true).await + } + /// This test checks that main events from the initial stream which are duplicated in the /// ongoing stream are filtered out. #[tokio::test] async fn should_filter_duplicate_firehose_events() { - should_filter_duplicate_events(SSE_API_ROOT_PATH).await + should_filter_duplicate_events(SSE_API_ROOT_PATH, false).await } // Returns `count` random SSE events. The events will have sequential IDs starting from `start_id`, and if the path filter @@ -1000,9 +1002,9 @@ mod tests { (start_id..(start_id + count as u32)) .map(|id| { let data = match path_filter { - SSE_API_MAIN_PATH => SseData::random_block_added(rng), + SSE_API_MAIN_PATH => make_legacy_compliant_random_block(rng), SSE_API_DEPLOYS_PATH => { - let (event, transaction) = SseData::random_transaction_accepted(rng); + let (event, transaction) = make_legacy_compliant_random_transaction(rng); assert!(transactions .insert(transaction.hash(), transaction) .is_none()); @@ -1030,13 +1032,32 @@ mod tests { ServerSentEvent { id: Some(id), data, - json_data: None, inbound_filter: None, } }) .collect() } + fn make_legacy_compliant_random_transaction(rng: &mut TestRng) -> (SseData, Transaction) { + loop { + let (event, transaction) = SseData::random_transaction_accepted(rng); + let legacy = LegacySseData::from(&event); + if legacy.is_some() { + return (event, transaction); + } + } + } + + fn make_legacy_compliant_random_block(rng: &mut TestRng) -> SseData { + loop { + let block = SseData::random_block_added(rng); + let legacy = LegacySseData::from(&block); + if legacy.is_some() { + return block; + } + } + } + // Returns `NUM_ONGOING_EVENTS` random SSE events for the ongoing stream containing // duplicates taken from the end of the initial stream. Allows for the full initial stream // to be duplicated except for its first event (the `ApiVersion` one) which has no ID. diff --git a/event_sidecar/src/event_stream_server/tests.rs b/event_sidecar/src/event_stream_server/tests.rs index 7485354b..019e784a 100644 --- a/event_sidecar/src/event_stream_server/tests.rs +++ b/event_sidecar/src/event_stream_server/tests.rs @@ -1,11 +1,16 @@ use super::*; +use casper_event_types::legacy_sse_data::LegacySseData; use casper_types::{testing::TestRng, ProtocolVersion}; use futures::{join, Stream, StreamExt}; use http::StatusCode; use pretty_assertions::assert_eq; use reqwest::Response; use serde_json::Value; -use sse_server::{Id, TransactionAccepted, QUERY_FIELD, SSE_API_ROOT_PATH as ROOT_PATH}; +use sse_server::{ + Id, TransactionAccepted, QUERY_FIELD, SSE_API_DEPLOYS_PATH as DEPLOYS_PATH, + SSE_API_MAIN_PATH as MAIN_PATH, SSE_API_ROOT_PATH as ROOT_PATH, + SSE_API_SIGNATURES_PATH as SIGS_PATH, +}; use std::{ collections::HashMap, error::Error, @@ -190,7 +195,7 @@ impl Drop for ServerStopper { struct TestFixture { storage_dir: TempDir, protocol_version: ProtocolVersion, - events: Vec<(SseData, Option)>, + events: Vec, first_event_id: Id, server_join_handle: Option>, server_stopper: ServerStopper, @@ -206,7 +211,7 @@ impl TestFixture { let protocol_version = ProtocolVersion::from_parts(1, 2, 3); let mut transactions = HashMap::new(); - let events: Vec<(SseData, Option)> = (0..EVENT_COUNT) + let events: Vec = (0..EVENT_COUNT) .map(|i| match i % DISTINCT_EVENTS_COUNT { 0 => SseData::random_block_added(rng), 1 => { @@ -223,7 +228,6 @@ impl TestFixture { 6 => SseData::random_finality_signature(rng), _ => unreachable!(), }) - .map(|x| (x, None)) .collect(); TestFixture { storage_dir, @@ -284,10 +288,8 @@ impl TestFixture { }; let api_version_event = SseData::ApiVersion(protocol_version); - server.broadcast(api_version_event.clone(), Some(SseFilter::Events), None); - for (id, (event, maybe_json_data)) in - events.iter().cycle().enumerate().take(event_count as usize) - { + server.broadcast(api_version_event.clone(), Some(SseFilter::Events)); + for (id, event) in events.iter().cycle().enumerate().take(event_count as usize) { if server_stopper.should_stop() { debug!("stopping server early"); return; @@ -295,13 +297,7 @@ impl TestFixture { server_behavior .wait_for_clients((id as Id).wrapping_add(first_event_id)) .await; - server.broadcast( - event.clone(), - Some(SseFilter::Events), - maybe_json_data - .as_ref() - .map(|el| serde_json::from_str(el.as_str()).unwrap()), - ); + server.broadcast(event.clone(), Some(SseFilter::Events)); server_behavior.sleep_if_required().await; } @@ -361,12 +357,12 @@ impl TestFixture { .chain( self.events .iter() - .filter(|(event, _)| !matches!(event, SseData::ApiVersion(..))) + .filter(|event| !matches!(event, SseData::ApiVersion(..))) .enumerate() .filter_map(|(id, event)| { let id = id as u128 + self.first_event_id as u128; - if event.0.should_include(filter) { - id_filter(id, &event.0) + if event.should_include(filter) { + id_filter(id, event) } else { None } @@ -661,7 +657,7 @@ fn parse_response(response_text: String, client_id: &str) -> Vec /// * connected before first event /// /// Expected to receive all main, transaction-accepted or signature events depending on `filter`. -async fn should_serve_events_with_no_query(path: &str) { +async fn should_serve_events_with_no_query(path: &str, is_legacy_endpoint: bool) { let mut rng = TestRng::new(); let mut fixture = TestFixture::new(&mut rng); @@ -671,15 +667,79 @@ async fn should_serve_events_with_no_query(path: &str) { let url = url(server_address, path, None); let (expected_events, final_id) = fixture.all_filtered_events(path); + let (expected_events, final_id) = + adjust_final_id(is_legacy_endpoint, expected_events, final_id); let received_events = subscribe(&url, barrier, final_id, "client").await.unwrap(); fixture.stop_server().await; + compare_received_events_for_legacy_endpoints(is_legacy_endpoint, expected_events, received_events); +} + +/// In legacy endpoints not all input events will be re-emitted to output. If an input (2.x) event is not translatable. +/// to 1.x it will be muffled. So we need to adjust the final id to the last event that was 1.x translatable. +fn adjust_final_id( + is_legacy_endpoint: bool, + expected_events: Vec, + final_id: u32, +) -> (Vec, u32) { + let (expected_events, final_id) = if is_legacy_endpoint { + let legacy_compliant_events: Vec = expected_events + .iter() + .filter_map(|event| { + let sse_data = serde_json::from_str::(&event.data).unwrap(); + LegacySseData::from(&sse_data).map(|_| event.clone()) + }) + .collect(); + let id = legacy_compliant_events.last().and_then(|el| el.id).unwrap(); + (legacy_compliant_events, id) + } else { + (expected_events, final_id) + }; + (expected_events, final_id) +} + +/// In legacy endpoints the node produces 2.x compliant sse events, but the node transforms them into legacy format. +/// So to compare we need to apply the translation logic to input 2.x events. +fn compare_received_events_for_legacy_endpoints( + is_legacy_endpoint: bool, + expected_events: Vec, + received_events: Vec, +) { + if is_legacy_endpoint { + let expected_legacy_events: Vec = expected_events + .iter() + .filter_map(|event| { + let sse_data = serde_json::from_str::(&event.data).unwrap(); + LegacySseData::from(&sse_data) + }) + .collect(); + let received_legacy_events: Vec = received_events + .iter() + .map(|event| serde_json::from_str::(&event.data).unwrap()) + .collect(); + assert_eq!(received_legacy_events, expected_legacy_events); + } else { + assert_eq!(received_events, expected_events); + } +} + +#[tokio::test(flavor = "multi_thread", worker_threads = 2)] +async fn should_serve_main_events_with_no_query() { + should_serve_events_with_no_query(MAIN_PATH, true).await; +} - assert_eq!(received_events, expected_events); +#[tokio::test(flavor = "multi_thread", worker_threads = 2)] +async fn should_serve_deploy_accepted_events_with_no_query() { + should_serve_events_with_no_query(DEPLOYS_PATH, true).await; +} + +#[tokio::test(flavor = "multi_thread", worker_threads = 2)] +async fn should_serve_signature_events_with_no_query() { + should_serve_events_with_no_query(SIGS_PATH, true).await; } #[tokio::test(flavor = "multi_thread", worker_threads = 2)] async fn should_serve_firehose_events_with_no_query() { - should_serve_events_with_no_query(ROOT_PATH).await; + should_serve_events_with_no_query(ROOT_PATH, false).await; } /// Client setup: @@ -688,7 +748,7 @@ async fn should_serve_firehose_events_with_no_query() { /// /// Expected to receive main, transaction-accepted or signature events (depending on `path`) from ID 25 /// onwards, as events 25 to 49 should still be in the server buffer. -async fn should_serve_events_with_query(path: &str) { +async fn should_serve_events_with_query(path: &str, is_legacy_endpoint: bool) { let mut rng = TestRng::new(); let mut fixture = TestFixture::new(&mut rng); @@ -701,15 +761,32 @@ async fn should_serve_events_with_query(path: &str) { let url = url(server_address, path, Some(start_from_event_id)); let (expected_events, final_id) = fixture.filtered_events(path, start_from_event_id); + let (expected_events, final_id) = + adjust_final_id(is_legacy_endpoint, expected_events, final_id); let received_events = subscribe(&url, barrier, final_id, "client").await.unwrap(); fixture.stop_server().await; - assert_eq!(received_events, expected_events); + compare_received_events_for_legacy_endpoints(is_legacy_endpoint, expected_events, received_events); +} + +#[tokio::test(flavor = "multi_thread", worker_threads = 2)] +async fn should_serve_main_events_with_query() { + should_serve_events_with_query(MAIN_PATH, true).await; +} + +#[tokio::test(flavor = "multi_thread", worker_threads = 2)] +async fn should_serve_deploy_accepted_events_with_query() { + should_serve_events_with_query(DEPLOYS_PATH, true).await; +} + +#[tokio::test(flavor = "multi_thread", worker_threads = 2)] +async fn should_serve_signature_events_with_query() { + should_serve_events_with_query(SIGS_PATH, true).await; } #[tokio::test(flavor = "multi_thread", worker_threads = 2)] async fn should_serve_firehose_events_with_query() { - should_serve_events_with_query(ROOT_PATH).await; + should_serve_events_with_query(ROOT_PATH, false).await; } /// Client setup: @@ -718,7 +795,7 @@ async fn should_serve_firehose_events_with_query() { /// /// Expected to receive main, transaction-accepted or signature events (depending on `path`) from ID 25 /// onwards, as events 0 to 24 should have been purged from the server buffer. -async fn should_serve_remaining_events_with_query(path: &str) { +async fn should_serve_remaining_events_with_query(path: &str, is_legacy_endpoint: bool) { let mut rng = TestRng::new(); let mut fixture = TestFixture::new(&mut rng); @@ -732,15 +809,32 @@ async fn should_serve_remaining_events_with_query(path: &str) { let url = url(server_address, path, Some(start_from_event_id)); let expected_first_event = connect_at_event_id - BUFFER_LENGTH; let (expected_events, final_id) = fixture.filtered_events(path, expected_first_event); + let (expected_events, final_id) = + adjust_final_id(is_legacy_endpoint, expected_events, final_id); let received_events = subscribe(&url, barrier, final_id, "client").await.unwrap(); fixture.stop_server().await; - assert_eq!(received_events, expected_events); + compare_received_events_for_legacy_endpoints(is_legacy_endpoint, expected_events, received_events); +} + +#[tokio::test(flavor = "multi_thread", worker_threads = 2)] +async fn should_serve_remaining_main_events_with_query() { + should_serve_remaining_events_with_query(MAIN_PATH, true).await; +} + +#[tokio::test(flavor = "multi_thread", worker_threads = 2)] +async fn should_serve_remaining_deploy_accepted_events_with_query() { + should_serve_remaining_events_with_query(DEPLOYS_PATH, true).await; +} + +#[tokio::test(flavor = "multi_thread", worker_threads = 2)] +async fn should_serve_remaining_signature_events_with_query() { + should_serve_remaining_events_with_query(SIGS_PATH, true).await; } #[tokio::test(flavor = "multi_thread", worker_threads = 2)] async fn should_serve_remaining_firehose_events_with_query() { - should_serve_remaining_events_with_query(ROOT_PATH).await; + should_serve_remaining_events_with_query(ROOT_PATH, false).await; } /// Client setup: @@ -749,7 +843,7 @@ async fn should_serve_remaining_firehose_events_with_query() { /// /// Expected to receive all main, transaction-accepted or signature events (depending on `path`), as /// event 25 hasn't been added to the server buffer yet. -async fn should_serve_events_with_query_for_future_event(path: &str) { +async fn should_serve_events_with_query_for_future_event(path: &str, is_legacy_endpoint: bool) { let mut rng = TestRng::new(); let mut fixture = TestFixture::new(&mut rng); @@ -759,15 +853,32 @@ async fn should_serve_events_with_query_for_future_event(path: &str) { let url = url(server_address, path, Some(25)); let (expected_events, final_id) = fixture.all_filtered_events(path); + let (expected_events, final_id) = + adjust_final_id(is_legacy_endpoint, expected_events, final_id); let received_events = subscribe(&url, barrier, final_id, "client").await.unwrap(); fixture.stop_server().await; - assert_eq!(received_events, expected_events); + compare_received_events_for_legacy_endpoints(is_legacy_endpoint, expected_events, received_events); +} + +#[tokio::test(flavor = "multi_thread", worker_threads = 2)] +async fn should_serve_main_events_with_query_for_future_event() { + should_serve_events_with_query_for_future_event(MAIN_PATH, true).await; +} + +#[tokio::test(flavor = "multi_thread", worker_threads = 2)] +async fn should_serve_deploy_accepted_events_with_query_for_future_event() { + should_serve_events_with_query_for_future_event(DEPLOYS_PATH, true).await; +} + +#[tokio::test(flavor = "multi_thread", worker_threads = 2)] +async fn should_serve_signature_events_with_query_for_future_event() { + should_serve_events_with_query_for_future_event(SIGS_PATH, true).await; } #[tokio::test(flavor = "multi_thread", worker_threads = 2)] async fn should_serve_firehose_events_with_query_for_future_event() { - should_serve_events_with_query_for_future_event(ROOT_PATH).await; + should_serve_events_with_query_for_future_event(ROOT_PATH, false).await; } /// Checks that when a server is shut down (e.g. for a node upgrade), connected clients don't have @@ -918,7 +1029,7 @@ async fn should_handle_bad_url_query() { } /// Check that a server which restarts continues from the previous numbering of event IDs. -async fn should_persist_event_ids(path: &str) { +async fn should_persist_event_ids(path: &str, is_legacy_endpoint: bool) { let mut rng = TestRng::new(); let mut fixture = TestFixture::new(&mut rng); @@ -930,7 +1041,9 @@ async fn should_persist_event_ids(path: &str) { // Consume these and stop the server. let url = url(server_address, path, None); - let (_expected_events, final_id) = fixture.all_filtered_events(path); + let (expected_events, final_id) = fixture.all_filtered_events(path); + let (_expected_events, final_id) = + adjust_final_id(is_legacy_endpoint, expected_events, final_id); let _ = subscribe(&url, barrier, final_id, "client 1") .await .unwrap(); @@ -954,22 +1067,33 @@ async fn should_persist_event_ids(path: &str) { // Consume the events and assert their IDs are all >= `first_run_final_id`. let url = url(server_address, path, None); let (expected_events, final_id) = fixture.filtered_events(path, EVENT_COUNT + 1); + let (expected_events, final_id) = + adjust_final_id(is_legacy_endpoint, expected_events, final_id); let received_events = subscribe(&url, barrier, final_id, "client 2") .await .unwrap(); fixture.stop_server().await; - - assert_eq!(received_events, expected_events); assert!(received_events .iter() .skip(1) .all(|event| event.id.unwrap() >= first_run_final_id)); + compare_received_events_for_legacy_endpoints(is_legacy_endpoint, expected_events, received_events); } } +#[tokio::test(flavor = "multi_thread", worker_threads = 2)] +async fn should_persist_deploy_accepted_event_ids() { + should_persist_event_ids(DEPLOYS_PATH, true).await; +} + +#[tokio::test(flavor = "multi_thread", worker_threads = 2)] +async fn should_persist_signature_event_ids() { + should_persist_event_ids(SIGS_PATH, true).await; +} + #[tokio::test(flavor = "multi_thread", worker_threads = 2)] async fn should_persist_firehose_event_ids() { - should_persist_event_ids(ROOT_PATH).await; + should_persist_event_ids(ROOT_PATH, false).await; } /// Check that a server handles wrapping round past the maximum value for event IDs. diff --git a/event_sidecar/src/lib.rs b/event_sidecar/src/lib.rs index 7be0221f..b623833f 100644 --- a/event_sidecar/src/lib.rs +++ b/event_sidecar/src/lib.rs @@ -104,7 +104,7 @@ pub async fn run( fn start_event_broadcasting( config: &SseEventServerConfig, storage_path: String, - mut outbound_sse_data_receiver: Receiver<(SseData, Option, Option)>, + mut outbound_sse_data_receiver: Receiver<(SseData, Option)>, enable_legacy_filters: bool, ) -> JoinHandle> { let event_stream_server_port = config.event_stream_server.port; @@ -122,10 +122,8 @@ fn start_event_broadcasting( enable_legacy_filters, ) .context("Error starting EventStreamServer")?; - while let Some((sse_data, inbound_filter, maybe_json_data)) = - outbound_sse_data_receiver.recv().await - { - event_stream_server.broadcast(sse_data, inbound_filter, maybe_json_data); + while let Some((sse_data, inbound_filter)) = outbound_sse_data_receiver.recv().await { + event_stream_server.broadcast(sse_data, inbound_filter); } Err::<(), Error>(Error::msg("Event broadcasting finished")) }) @@ -136,7 +134,7 @@ fn start_sse_processors( event_listeners: Vec, sse_data_receivers: Vec>, database: Database, - outbound_sse_data_sender: Sender<(SseData, Option, Option)>, + outbound_sse_data_sender: Sender<(SseData, Option)>, ) -> JoinHandle> { tokio::spawn(async move { let mut join_handles = Vec::with_capacity(event_listeners.len()); @@ -167,7 +165,7 @@ fn start_sse_processors( let _ = join_all(join_handles).await; //Send Shutdown to the sidecar sse endpoint let _ = outbound_sse_data_sender - .send((SseData::Shutdown, None, None)) + .send((SseData::Shutdown, None)) .await; // Below sleep is a workaround to allow the above Shutdown to propagate. // If we don't do this there is a race condition between handling of the message and dropping of the outbound server @@ -183,7 +181,7 @@ fn start_sse_processors( fn spawn_sse_processor( database: &Database, sse_data_receiver: Receiver, - outbound_sse_data_sender: &Sender<(SseData, Option, Option)>, + outbound_sse_data_sender: &Sender<(SseData, Option)>, connection_config: Connection, api_version_manager: &std::sync::Arc>, ) -> JoinHandle> { @@ -290,9 +288,8 @@ async fn handle_database_save_result( entity_name: &str, entity_identifier: &str, res: Result, - outbound_sse_data_sender: &Sender<(SseData, Option, Option)>, + outbound_sse_data_sender: &Sender<(SseData, Option)>, inbound_filter: Filter, - json_data: Option, build_sse_data: F, ) where F: FnOnce() -> SseData, @@ -300,7 +297,7 @@ async fn handle_database_save_result( match res { Ok(_) => { if let Err(error) = outbound_sse_data_sender - .send((build_sse_data(), Some(inbound_filter), json_data)) + .send((build_sse_data(), Some(inbound_filter))) .await { debug!( @@ -331,7 +328,7 @@ async fn handle_single_event, Option)>, + outbound_sse_data_sender: Sender<(SseData, Option)>, api_version_manager: GuardedApiVersionManager, ) { match sse_event.data { @@ -369,7 +366,6 @@ async fn handle_single_event( sse_event: SseEvent, sqlite_database: Db, - outbound_sse_data_sender: Sender<(SseData, Option, Option)>, + outbound_sse_data_sender: Sender<(SseData, Option)>, ) { warn!("Node ({}) is unavailable", sse_event.source.to_string()); let res = sqlite_database @@ -601,11 +591,7 @@ async fn handle_shutdown>, version: ProtocolVersion, - outbound_sse_data_sender: &Sender<(SseData, Option, Option)>, + outbound_sse_data_sender: &Sender<(SseData, Option)>, filter: Filter, enable_event_logging: bool, ) { @@ -632,7 +618,7 @@ async fn handle_api_version( let changed_newest_version = manager_guard.store_version(version); if changed_newest_version { if let Err(error) = outbound_sse_data_sender - .send((SseData::ApiVersion(version), Some(filter), None)) + .send((SseData::ApiVersion(version), Some(filter))) .await { debug!( @@ -649,7 +635,7 @@ async fn handle_api_version( async fn sse_processor( inbound_sse_data_receiver: Receiver, - outbound_sse_data_sender: Sender<(SseData, Option, Option)>, + outbound_sse_data_sender: Sender<(SseData, Option)>, database: Db, database_supports_multithreaded_processing: bool, enable_event_logging: bool, @@ -687,7 +673,7 @@ async fn sse_processor( mut queue_rx: Receiver, database: Db, - outbound_sse_data_sender: Sender<(SseData, Option, Option)>, + outbound_sse_data_sender: Sender<(SseData, Option)>, api_version_manager: GuardedApiVersionManager, enable_event_logging: bool, #[cfg(feature = "additional-metrics")] metrics_sender: Sender<()>, @@ -718,7 +704,7 @@ async fn start_multi_threaded_events_consumer< Db: DatabaseReader + DatabaseWriter + Clone + Send + Sync + 'static, >( mut inbound_sse_data_receiver: Receiver, - outbound_sse_data_sender: Sender<(SseData, Option, Option)>, + outbound_sse_data_sender: Sender<(SseData, Option)>, database: Db, enable_event_logging: bool, api_version_manager: GuardedApiVersionManager, @@ -756,7 +742,7 @@ async fn start_single_threaded_events_consumer< Db: DatabaseReader + DatabaseWriter + Clone + Send + Sync, >( mut inbound_sse_data_receiver: Receiver, - outbound_sse_data_sender: Sender<(SseData, Option, Option)>, + outbound_sse_data_sender: Sender<(SseData, Option)>, database: Db, enable_event_logging: bool, api_version_manager: GuardedApiVersionManager, diff --git a/event_sidecar/src/testing/fake_event_stream.rs b/event_sidecar/src/testing/fake_event_stream.rs index 28c4c3f3..f3a303ef 100644 --- a/event_sidecar/src/testing/fake_event_stream.rs +++ b/event_sidecar/src/testing/fake_event_stream.rs @@ -194,7 +194,7 @@ async fn do_spam_testing( let broadcasting_task = tokio::spawn(async move { while let Some(event) = events_receiver.recv().await { - event_stream_server.broadcast(event, Some(SseFilter::Events), None); + event_stream_server.broadcast(event, Some(SseFilter::Events)); } }); @@ -237,7 +237,7 @@ async fn do_load_testing_transaction( let broadcasting_task = tokio::spawn(async move { while let Some(event) = events_receiver.recv().await { - event_stream_server.broadcast(event, Some(SseFilter::Events), None); + event_stream_server.broadcast(event, Some(SseFilter::Events)); } }); @@ -279,7 +279,7 @@ async fn do_load_testing_step( }); let broadcasting_task = tokio::spawn(async move { while let Some(event) = events_receiver.recv().await { - event_stream_server.broadcast(event, Some(SseFilter::Events), None); + event_stream_server.broadcast(event, Some(SseFilter::Events)); } }); let (test_rng, _) = tokio::join!(scenario_task, broadcasting_task); @@ -314,7 +314,7 @@ async fn handle_realistic_scenario( }); let broadcasting_task = tokio::spawn(async move { while let Some(event) = events_receiver.recv().await { - event_stream_server.broadcast(event, Some(SseFilter::Events), None); + event_stream_server.broadcast(event, Some(SseFilter::Events)); } }); let (test_rng, _) = tokio::join!(scenario_task, broadcasting_task); diff --git a/listener/src/connection_manager.rs b/listener/src/connection_manager.rs index 4da20ad0..9cf96378 100644 --- a/listener/src/connection_manager.rs +++ b/listener/src/connection_manager.rs @@ -236,12 +236,8 @@ impl DefaultConnectionManager { error!(error_message); return Err(Error::msg(error_message)); } - Ok((sse_data, needs_raw_json)) => { + Ok(sse_data) => { let payload_size = event.data.len(); - let mut raw_json_data = None; - if needs_raw_json { - raw_json_data = Some(event.data); - } self.observe_bytes(sse_data.type_label(), payload_size); let api_version = self.api_version.ok_or(anyhow!( "Expected ApiVersion to be present when handling messages." @@ -250,7 +246,6 @@ impl DefaultConnectionManager { event.id.parse().unwrap_or(0), sse_data, self.bind_address.clone(), - raw_json_data, self.filter.clone(), api_version.to_string(), self.network_name.clone(), @@ -293,7 +288,7 @@ impl DefaultConnectionManager { match deserialize(&event.data) { //at this point we // are assuming that it's an ApiVersion and ApiVersion is the same across all semvers - Ok((SseData::ApiVersion(semver), _)) => { + Ok(SseData::ApiVersion(semver)) => { let payload_size = event.data.len(); self.observe_bytes("ApiVersion", payload_size); self.api_version = Some(semver); @@ -301,7 +296,6 @@ impl DefaultConnectionManager { 0, SseData::ApiVersion(semver), self.bind_address.clone(), - None, self.filter.clone(), semver.to_string(), self.network_name.clone(), diff --git a/listener/src/types.rs b/listener/src/types.rs index db1b361a..85609f3f 100644 --- a/listener/src/types.rs +++ b/listener/src/types.rs @@ -32,9 +32,6 @@ pub struct SseEvent { pub data: SseData, /// Source from which we got the message pub source: Url, - /// In some cases it is required to emit the data exactly as we got it from the node. - /// For those situations we store the exact text of the raw payload in this field. - pub json_data: Option, /// Info from which filter we received the message. For some events (Shutdown in particularly) we want to push only to the same outbound as we received them from so we don't duplicate. pub inbound_filter: Filter, /// Api version which was reported for the node from which the event was received. @@ -48,7 +45,6 @@ impl SseEvent { id: u32, data: SseData, mut source: Url, - json_data: Option, inbound_filter: Filter, api_version: String, network_name: String, @@ -60,7 +56,6 @@ impl SseEvent { id, data, source, - json_data, inbound_filter, api_version, network_name, diff --git a/types/src/legacy_sse_data/fixtures.rs b/types/src/legacy_sse_data/fixtures.rs index ed5b389a..90813d44 100644 --- a/types/src/legacy_sse_data/fixtures.rs +++ b/types/src/legacy_sse_data/fixtures.rs @@ -1,3 +1,6 @@ +use casper_types::{BlockV1, BlockV2, EraEndV2}; +use serde_json::Value; + use super::LegacySseData; use crate::sse_data::SseData; @@ -77,6 +80,26 @@ pub fn legacy_deploy_processed() -> LegacySseData { serde_json::from_str(RAW_LEGACY_DEPLOY_PROCESSED).unwrap() } +pub fn block_v2() -> BlockV2 { + serde_json::from_str(BLOCK_BODY_V2_WITH_ALL_DATA).unwrap() +} + +pub fn block_v1_no_deploys_no_era() -> BlockV1 { + let mut val = serde_json::from_str::(BLOCK_BODY_V1_ALL_DATA).unwrap(); + val["header"]["era_end"] = serde_json::Value::Null; + val["body"]["deploy_hashes"] = serde_json::Value::Array(vec![]); + val["body"]["transfer_hashes"] = serde_json::Value::Array(vec![]); + serde_json::from_value(val).unwrap() +} + +pub fn era_end_v2() -> EraEndV2 { + serde_json::from_str(ERA_END_V2_WITH_ALL_DATA).unwrap() +} + +pub fn era_end_v2_with_reward_exceeding_u64() -> EraEndV2 { + serde_json::from_str(ERA_END_V2_WITH_REWARD_EXCEEDING_U64).unwrap() +} + const RAW_API_VERSION: &str = r#"{"ApiVersion":"2.0.0"}"#; const RAW_FINALITY_SIGNATURE_V2: &str = r#"{ @@ -667,3 +690,241 @@ const RAW_LEGACY_DEPLOY_PROCESSED: &str = r#"{ } } }"#; +const BLOCK_BODY_V1_ALL_DATA: &str = r#" +{ + "hash": "e0c36ab6748b9011e5aba95f6e753ace0c2171fa96d99dba5d51f8996359d248", + "header": { + "parent_hash": "90a4ade2849634e9c1ad0e02cb30645d0984056f68075cad8f6cad2b42a824ba", + "state_root_hash": "e396ff238e584f101be1fd60c5fd4b4b14f52941a2ea9ed610fd466171f613e8", + "body_hash": "fd2455605a34a540d99b474cd7a276024f1268d4fc8c86967a29191776576a11", + "random_bit": true, + "accumulated_seed": "446c1581914be4dc9617d64d06062a4f38acccff64d683eefe55b7c240c5fb9b", + "era_end": { + "era_report": { + "equivocators": [ + "010a10a45ea0aff7af1ffef92287d00ec4cf01c5e9e2952e018a2fbb0f0ede2b50", + "02037c17d279d6e54375f7cfb3559730d5434bfedc8638a3f95e55f6e85fc9e8f611", + "02026d4b741a0ece4b3d6d61294a8db28a28dbd734133694582d38f240686ec61d05" + ], + "rewards": [ + { + "validator": "010d2d4fdda5ff7a9820de2fe18a262b29bb2df36cbc767446e1dcd015e9c5ea98", + "amount": 155246594 + } + ], + "inactive_validators": [] + }, + "next_era_validator_weights": [ + { + "validator": "013183e7169846881fb6ae07dc5ba63d92bd592d67681a765cf9813d5146de97f3", + "weight": "277433153" + }, + { + "validator": "0202cfc31ccfba98abbc4cfe6c17e7aacc7bfe52f9f88dac8d32aca5c825951fb660", + "weight": "875434194" + }, + { + "validator": "0203f4ba92963513e1cc0171691a7133d26b59aa025261a0ecdbfb53502457ab770a", + "weight": "775555276" + }, + { + "validator": "0203ff1caebb0fd53fb4c52f8cf18d0e3257f6b075a16f0fd6aa5499ddb28a8d82ab", + "weight": "818689728" + } + ] + }, + "timestamp": "2020-08-07T01:25:44.194Z", + "era_id": 746263, + "height": 12581264813190897996, + "protocol_version": "2.0.0" + }, + "body": { + "proposer": "018f40f339c5a4999eb50d5438da964f94e4c329ac33c1d5c32ab6640b3f8261a0", + "deploy_hashes": [ + "e185793e2a6214542ffee6de0ede37d7dd9748b429e4586d73fd2abdd100bd7c", + "c4f7acd014ef88af95ebf338e8dd29b95b161a6a812a6764112bf9d09abc399a" + ], + "transfer_hashes": [ + "19cd7acc75ffe58e6dd5f3f1a6b7c08f8d02bf47928926054d4818e6eb41ca74" + ] + } +} +"#; +const ERA_END_V2_WITH_REWARD_EXCEEDING_U64: &str = r#" +{ + "equivocators": [ + "010a10a45ea0aff7af1ffef92287d00ec4cf01c5e9e2952e018a2fbb0f0ede2b50", + "02037c17d279d6e54375f7cfb3559730d5434bfedc8638a3f95e55f6e85fc9e8f611", + "02026d4b741a0ece4b3d6d61294a8db28a28dbd734133694582d38f240686ec61d05" + ], + "inactive_validators": ["010a10a45ea0aff7af1ffef92287d00ec4cf01c5e9e2952e018a2fbb0f0ede2b51"], + "next_era_validator_weights": [ + { + "validator": "013183e7169846881fb6ae07dc5ba63d92bd592d67681a765cf9813d5146de97f3", + "weight": "277433153" + }, + { + "validator": "0203ff1caebb0fd53fb4c52f8cf18d0e3257f6b075a16f0fd6aa5499ddb28a8d82ab", + "weight": "818689728" + } + ], + "rewards": { + "010d2d4fdda5ff7a9820de2fe18a262b29bb2df36cbc767446e1dcd015e9c5ea98": "18446744073709551616" + }, + "next_era_gas_price": 152 +} +"#; + +const ERA_END_V2_WITH_ALL_DATA: &str = r#" +{ + "equivocators": [ + "010a10a45ea0aff7af1ffef92287d00ec4cf01c5e9e2952e018a2fbb0f0ede2b50", + "02037c17d279d6e54375f7cfb3559730d5434bfedc8638a3f95e55f6e85fc9e8f611", + "02026d4b741a0ece4b3d6d61294a8db28a28dbd734133694582d38f240686ec61d05" + ], + "inactive_validators": ["010a10a45ea0aff7af1ffef92287d00ec4cf01c5e9e2952e018a2fbb0f0ede2b51"], + "next_era_validator_weights": [ + { + "validator": "013183e7169846881fb6ae07dc5ba63d92bd592d67681a765cf9813d5146de97f3", + "weight": "277433153" + }, + { + "validator": "0203ff1caebb0fd53fb4c52f8cf18d0e3257f6b075a16f0fd6aa5499ddb28a8d82ab", + "weight": "818689728" + } + ], + "rewards": { + "010d2d4fdda5ff7a9820de2fe18a262b29bb2df36cbc767446e1dcd015e9c5ea98": "155246594" + }, + "next_era_gas_price": 152 +} +"#; + +const BLOCK_BODY_V2_WITH_ALL_DATA: &str = r#" +{ + "hash": "e0c36ab6748b9011e5aba95f6e753ace0c2171fa96d99dba5d51f8996359d248", + "header": { + "parent_hash": "90a4ade2849634e9c1ad0e02cb30645d0984056f68075cad8f6cad2b42a824ba", + "state_root_hash": "e396ff238e584f101be1fd60c5fd4b4b14f52941a2ea9ed610fd466171f613e8", + "body_hash": "fd2455605a34a540d99b474cd7a276024f1268d4fc8c86967a29191776576a11", + "random_bit": true, + "accumulated_seed": "446c1581914be4dc9617d64d06062a4f38acccff64d683eefe55b7c240c5fb9b", + "era_end": { + "equivocators": [ + "010a10a45ea0aff7af1ffef92287d00ec4cf01c5e9e2952e018a2fbb0f0ede2b50", + "02037c17d279d6e54375f7cfb3559730d5434bfedc8638a3f95e55f6e85fc9e8f611", + "02026d4b741a0ece4b3d6d61294a8db28a28dbd734133694582d38f240686ec61d05" + ], + "inactive_validators": [], + "next_era_validator_weights": [ + { + "validator": "013183e7169846881fb6ae07dc5ba63d92bd592d67681a765cf9813d5146de97f3", + "weight": "277433153" + }, + { + "validator": "0202cfc31ccfba98abbc4cfe6c17e7aacc7bfe52f9f88dac8d32aca5c825951fb660", + "weight": "875434194" + }, + { + "validator": "0203f4ba92963513e1cc0171691a7133d26b59aa025261a0ecdbfb53502457ab770a", + "weight": "775555276" + }, + { + "validator": "0203ff1caebb0fd53fb4c52f8cf18d0e3257f6b075a16f0fd6aa5499ddb28a8d82ab", + "weight": "818689728" + } + ], + "rewards": { + "010d2d4fdda5ff7a9820de2fe18a262b29bb2df36cbc767446e1dcd015e9c5ea98": "155246594" + }, + "next_era_gas_price": 1 + }, + "timestamp": "2020-08-07T01:25:44.194Z", + "era_id": 746263, + "height": 12581264813190897996, + "protocol_version": "2.0.0", + "current_gas_price": 231 + }, + "body": { + "proposer": "018f40f339c5a4999eb50d5438da964f94e4c329ac33c1d5c32ab6640b3f8261a0", + "mint": [ + { + "Version1": "b26ae87e978a22e3d6cd28d2659a9a8ee7f0d98d1d7a7894fb07a42f9619b3f7" + }, + { + "Deploy": "19cd7acc75ffe58e6dd5f3f1a6b7c08f8d02bf47928926054d4818e6eb41ca74" + }, + { + "Version1": "493a5aba2ecd38c99302738509fe1ba27e28e9bfdd2a642c430ffbfb33cb7692" + }, + { + "Version1": "4df9827fdb559cf1ecfc2490ed6039e720a54f729377a86d5466cb1eedf30cc3" + }, + { + "Deploy": "5e50ebcf0190ef2be4182fe7940f4d68dde8210f42c75ca9478fc1be765c5751" + } + ], + "auction": [ + { + "Deploy": "3adac5169f7e2c83d9e9b8a0563ff072251851436e97c04287f838752be8114d" + }, + { + "Version1": "9a7c8cb4eee485220b89fd4cd0cc298b8bd5ac9c26836cb0c17b99f83f03a106" + }, + { + "Deploy": "f8b204a26c27ba38f624a11d629c162d739afe4af547e1f9da71be029c9ed577" + }, + { + "Version1": "56c0a2166f7b7e6cd03ca3cd8f0872cec37951817f28bc3b3f36924fd5f838c6" + }, + { + "Deploy": "cffea0de3f6dd60d3636e8e02c3bee696773c0d8e498deacef9a19e80bb43e42" + } + ], + "install_upgrade": [ + { + "Version1": "f6da6cf6005d0fc129de7d1e7c3994dffa91fb90dbd8c9f0889054219329e038" + }, + { + "Deploy": "332f14259115d09fccc79df0cc5d3c6b01f420b051ad0e7f93981c11af7ed332" + }, + { + "Deploy": "ff5f9819a0db8947739ec69438479c954439d7f95d6c09d4c2489a7a8bccb249" + }, + { + "Version1": "8abe7ebce38def7df1cbf642a28b50adc882d093821efea104cc64236be81f25" + }, + { + "Deploy": "06e7d220c0015d914fef28aec07047ee8f40030fb9ea001dcada426f3c43a656" + } + ], + "standard": [ + { + "Version1": "dda811037b26144f36c5b82c57b3378d93783cf87203f709822eff7f922ee788" + }, + { + "Version1": "e881cb051e210b34d4428552d89b2ef2800c949837f494763a5de1691da99103" + }, + { + "Version1": "7bef85e904b4c2757ed38a22570fb98fa451b5ecfeb601c0a454a18638db81d2" + }, + { + "Deploy": "e185793e2a6214542ffee6de0ede37d7dd9748b429e4586d73fd2abdd100bd7c" + }, + { + "Deploy": "c4f7acd014ef88af95ebf338e8dd29b95b161a6a812a6764112bf9d09abc399a" + } + ], + "rewarded_signatures": [ + [ + 1, + 2 + ], + [ + 4, + 12 + ] + ] + } +} +"#; diff --git a/types/src/legacy_sse_data/mod.rs b/types/src/legacy_sse_data/mod.rs index 47a6d78e..26fa90c5 100644 --- a/types/src/legacy_sse_data/mod.rs +++ b/types/src/legacy_sse_data/mod.rs @@ -203,25 +203,71 @@ mod tests { #[test] fn should_translate_sse_to_legacy() { - for (sse_data, expected) in sse_translation_scenarios() { + for (sse_data, expected, scenario_name) in sse_translation_scenarios() { let legacy_fs = LegacySseData::from(&sse_data); - assert_eq!(legacy_fs, expected); + assert_eq!( + legacy_fs, + expected, + "Failed when executing scenario {}", + scenario_name.as_str() + ); } } - fn sse_translation_scenarios() -> Vec<(SseData, Option)> { + #[allow(clippy::too_many_lines)] + fn sse_translation_scenarios() -> Vec<(SseData, Option, String)> { vec![ - (api_version(), Some(legacy_api_version())), - (finality_signature_v1(), Some(legacy_finality_signature())), - (finality_signature_v2(), Some(legacy_finality_signature())), - (transaction_accepted(), None), - (deploy_accepted(), Some(legacy_deploy_accepted())), - (deploy_expired(), Some(legacy_deploy_expired())), - (transaction_expired(), None), - (fault(), Some(legacy_fault())), - (block_added_v1(), Some(legacy_block_added())), - (block_added_v2(), Some(legacy_block_added_from_v2())), - (deploy_processed(), Some(legacy_deploy_processed())), + ( + api_version(), + Some(legacy_api_version()), + "api_version".to_string(), + ), + ( + finality_signature_v1(), + Some(legacy_finality_signature()), + "finality_signature_v1".to_string(), + ), + ( + finality_signature_v2(), + Some(legacy_finality_signature()), + "finality_signature_v2".to_string(), + ), + ( + transaction_accepted(), + None, + "transaction_accepted".to_string(), + ), + ( + deploy_accepted(), + Some(legacy_deploy_accepted()), + "legacy_deploy_accepted".to_string(), + ), + ( + deploy_expired(), + Some(legacy_deploy_expired()), + "legacy_deploy_expired".to_string(), + ), + ( + transaction_expired(), + None, + "transaction_expired".to_string(), + ), + (fault(), Some(legacy_fault()), "fault".to_string()), + ( + block_added_v1(), + Some(legacy_block_added()), + "block_added_v1".to_string(), + ), + ( + block_added_v2(), + Some(legacy_block_added_from_v2()), + "block_added_v2".to_string(), + ), + ( + deploy_processed(), + Some(legacy_deploy_processed()), + "deploy_processed".to_string(), + ), ] } } diff --git a/types/src/legacy_sse_data/translate_block_added.rs b/types/src/legacy_sse_data/translate_block_added.rs index 205a23f4..07b8516d 100644 --- a/types/src/legacy_sse_data/translate_block_added.rs +++ b/types/src/legacy_sse_data/translate_block_added.rs @@ -157,3 +157,207 @@ where } } } + +#[cfg(test)] +mod tests { + use std::collections::BTreeMap; + + use casper_types::{ + testing::TestRng, DeployHash, EraEndV1, EraReport, PublicKey, U512, + }; + use mockall::predicate; + use pretty_assertions::assert_eq; + use rand::Rng; + use serde::Serialize; + + use super::{ + BlockV2Translator, DefaultBlockV2Translator, DefaultEraEndV2Translator, EraEndV2Translator, + MockEraEndV2Translator, + }; + use crate::{ + legacy_sse_data::{fixtures::*, translate_deploy_hashes::MockDeployHashTranslator}, + testing::parse_public_key, + }; + + #[test] + pub fn default_block_v2_translator_translates_without_era_end_and_deploys() { + let (mut era_end_translator, mut deploy_hash_translator, mut transfer_hash_translator) = + prepare_mocks(); + let block_v2 = block_v2(); + let era_end_ref = block_v2.header().era_end().unwrap(); + prepare_era_end_mock(&mut era_end_translator, era_end_ref, None); + prepare_deploys_mock(&mut deploy_hash_translator, &block_v2, vec![]); + prepare_transfer_mock(&mut transfer_hash_translator, &block_v2, vec![]); + let under_test = DefaultBlockV2Translator { + era_end_translator, + deploy_hash_translator, + transfer_hash_translator, + }; + + let got = under_test.translate(&block_v2); + assert!(got.is_some()); + let expected = block_v1_no_deploys_no_era(); + compare_as_json(&expected, &got.unwrap()); + } + + #[test] + pub fn default_block_v2_translator_passes_era_end_info_and_deploys() { + let mut test_rng = TestRng::new(); + let (mut era_end_translator, mut deploy_hash_translator, mut transfer_hash_translator) = + prepare_mocks(); + let block_v2 = block_v2(); + let era_end_ref = block_v2.header().era_end().unwrap(); + let report = EraReport::random(&mut test_rng); + let validator_weights = random_validator_weights(&mut test_rng); + let era_end = EraEndV1::new(report, validator_weights); + let deploy_hashes_1: Vec = + (0..3).map(|_| DeployHash::random(&mut test_rng)).collect(); + let deploy_hashes_2: Vec = + (0..3).map(|_| DeployHash::random(&mut test_rng)).collect(); + prepare_era_end_mock(&mut era_end_translator, era_end_ref, Some(era_end.clone())); + prepare_deploys_mock( + &mut deploy_hash_translator, + &block_v2, + deploy_hashes_1.clone(), + ); + prepare_transfer_mock( + &mut transfer_hash_translator, + &block_v2, + deploy_hashes_2.clone(), + ); + + let under_test = DefaultBlockV2Translator { + era_end_translator, + deploy_hash_translator, + transfer_hash_translator, + }; + + let got = under_test.translate(&block_v2).unwrap(); + assert_eq!(got.body.deploy_hashes, deploy_hashes_1); + assert_eq!(got.body.transfer_hashes, deploy_hashes_2); + } + + #[test] + fn default_era_end_v2_translator_translates_all_data() { + let under_test = DefaultEraEndV2Translator; + let era_end_v2 = era_end_v2(); + let maybe_translated = under_test.translate(&era_end_v2); + assert!(maybe_translated.is_some()); + let translated = maybe_translated.unwrap(); + let mut expected_validator_weights = BTreeMap::new(); + expected_validator_weights.insert( + parse_public_key("013183e7169846881fb6ae07dc5ba63d92bd592d67681a765cf9813d5146de97f3"), + U512::from(277433153), + ); + expected_validator_weights.insert( + parse_public_key( + "0203ff1caebb0fd53fb4c52f8cf18d0e3257f6b075a16f0fd6aa5499ddb28a8d82ab", + ), + U512::from(818689728), + ); + let mut rewards = BTreeMap::new(); + rewards.insert( + parse_public_key("010d2d4fdda5ff7a9820de2fe18a262b29bb2df36cbc767446e1dcd015e9c5ea98"), + 155246594, + ); + let report = EraReport::new( + vec![ + parse_public_key( + "010a10a45ea0aff7af1ffef92287d00ec4cf01c5e9e2952e018a2fbb0f0ede2b50", + ), + parse_public_key( + "02037c17d279d6e54375f7cfb3559730d5434bfedc8638a3f95e55f6e85fc9e8f611", + ), + parse_public_key( + "02026d4b741a0ece4b3d6d61294a8db28a28dbd734133694582d38f240686ec61d05", + ), + ], + rewards, + vec![parse_public_key( + "010a10a45ea0aff7af1ffef92287d00ec4cf01c5e9e2952e018a2fbb0f0ede2b51", + )], + ); + let expected = EraEndV1::new(report, expected_validator_weights); + assert_eq!(translated, expected); + } + + #[test] + fn default_era_end_v2_translator_returns_none_when_reward_exceeds_u64() { + let under_test = DefaultEraEndV2Translator; + let era_end_v2 = era_end_v2_with_reward_exceeding_u64(); + let maybe_translated = under_test.translate(&era_end_v2); + assert!(maybe_translated.is_none()); + } + + fn compare_as_json(left: &T, right: &Y) + where + T: Serialize, + Y: Serialize, + { + let left_value = serde_json::to_value(left).unwrap(); + let right_value = serde_json::to_value(right).unwrap(); + assert_eq!(left_value, right_value); + } + + fn prepare_deploys_mock( + deploy_hash_translator: &mut MockDeployHashTranslator, + block_v2: &casper_types::BlockV2, + deploys: Vec, + ) { + deploy_hash_translator + .expect_translate() + .times(1) + .with(predicate::eq(block_v2.body().clone())) + .return_const(deploys); + } + + fn prepare_transfer_mock( + transfer_hash_translator: &mut MockDeployHashTranslator, + block_v2: &casper_types::BlockV2, + deploys: Vec, + ) { + transfer_hash_translator + .expect_translate() + .times(1) + .with(predicate::eq(block_v2.body().clone())) + .return_const(deploys); + } + + fn prepare_era_end_mock( + era_end_translator: &mut MockEraEndV2Translator, + era_end_ref: &casper_types::EraEndV2, + returned: Option, + ) { + era_end_translator + .expect_translate() + .times(1) + .with(predicate::eq(era_end_ref.clone())) + .return_const(returned); + } + + fn prepare_mocks() -> ( + MockEraEndV2Translator, + MockDeployHashTranslator, + MockDeployHashTranslator, + ) { + let era_end_translator = MockEraEndV2Translator::new(); + let deploy_hash_translator = MockDeployHashTranslator::new(); + let transfer_hash_translator = MockDeployHashTranslator::new(); + ( + era_end_translator, + deploy_hash_translator, + transfer_hash_translator, + ) + } + + fn random_validator_weights( + test_rng: &mut TestRng, + ) -> std::collections::BTreeMap { + let mut tree = BTreeMap::new(); + let number_of_weights = test_rng.gen_range(5..=10); + for _ in 0..number_of_weights { + tree.insert(PublicKey::random(test_rng), test_rng.gen()); + } + tree + } +} diff --git a/types/src/legacy_sse_data/translate_deploy_hashes.rs b/types/src/legacy_sse_data/translate_deploy_hashes.rs index 58b59d5f..d8c49899 100644 --- a/types/src/legacy_sse_data/translate_deploy_hashes.rs +++ b/types/src/legacy_sse_data/translate_deploy_hashes.rs @@ -35,3 +35,47 @@ impl DeployHashTranslator for TransferDeployHashesTranslator { .collect() } } + +#[cfg(test)] +mod tests { + use crate::{legacy_sse_data::fixtures::block_v2, testing::parse_deploy_hash}; + + use super::*; + + #[test] + fn standard_deploy_hashes_translator_uses_standard_deploy_transaction_hashes() { + let under_test = StandardDeployHashesTranslator; + let block_v2 = block_v2(); + let block_body = block_v2.body(); + let translated = under_test.translate(block_body); + assert_eq!( + translated, + vec![ + parse_deploy_hash( + "e185793e2a6214542ffee6de0ede37d7dd9748b429e4586d73fd2abdd100bd7c" + ), + parse_deploy_hash( + "c4f7acd014ef88af95ebf338e8dd29b95b161a6a812a6764112bf9d09abc399a" + ) + ] + ) + } + #[test] + fn transfer_deploy_hashes_translator_uses_mint_deploy_transaction_hashes() { + let under_test = TransferDeployHashesTranslator; + let block_v2 = block_v2(); + let block_body = block_v2.body(); + let translated = under_test.translate(block_body); + assert_eq!( + translated, + vec![ + parse_deploy_hash( + "19cd7acc75ffe58e6dd5f3f1a6b7c08f8d02bf47928926054d4818e6eb41ca74" + ), + parse_deploy_hash( + "5e50ebcf0190ef2be4182fe7940f4d68dde8210f42c75ca9478fc1be765c5751" + ) + ] + ) + } +} diff --git a/types/src/legacy_sse_data/translate_execution_result.rs b/types/src/legacy_sse_data/translate_execution_result.rs index b35b1c5e..19c09154 100644 --- a/types/src/legacy_sse_data/translate_execution_result.rs +++ b/types/src/legacy_sse_data/translate_execution_result.rs @@ -66,7 +66,7 @@ impl ExecutionEffectsTranslator for DefaultExecutionEffectsTranslator { let maybe_transform_kind = map_transform_v2(ex_ef); if let Some(transform_kind) = maybe_transform_kind { let transform = TransformV1 { - key: key.to_string(), + key: key.to_formatted_string(), transform: transform_kind, }; transforms.push(transform); @@ -120,7 +120,7 @@ fn handle_named_keys(keys: &NamedKeys) -> Option { for (name, key) in keys.iter() { let named_key = NamedKey { name: name.to_string(), - key: key.to_string(), + key: key.to_formatted_string(), }; named_keys.push(named_key); } @@ -128,7 +128,6 @@ fn handle_named_keys(keys: &NamedKeys) -> Option { } fn maybe_tanslate_stored_value(stored_value: &StoredValue) -> Option { - //TODO stored_value this shouldn't be a reference. we should take ownership and reassign to V1 enum to avoid potentially expensive clones. match stored_value { StoredValue::CLValue(cl_value) => Some(TransformKindV1::WriteCLValue(cl_value.clone())), StoredValue::Account(acc) => Some(TransformKindV1::WriteAccount(acc.account_hash())), @@ -157,7 +156,7 @@ fn maybe_tanslate_stored_value(stored_value: &StoredValue) -> Option None, StoredValue::BidKind(_) => None, StoredValue::Package(_) => None, @@ -171,14 +170,23 @@ fn maybe_tanslate_stored_value(stored_value: &StoredValue) -> Option Vec { + let transform_1 = TransformV1 { + key: key_1.to_formatted_string(), + transform: TransformKindV1::Identity, + }; + let transform_2 = TransformV1 { + key: key_2.to_formatted_string(), + transform: TransformKindV1::AddKeys(vec![ + NamedKey { + name: "key_1".to_string(), + key: key_1.to_formatted_string(), + }, + NamedKey { + name: "key_2".to_string(), + key: key_2.to_formatted_string(), + }, + ]), + }; + let transform_3 = TransformV1 { + key: key_3.to_formatted_string(), + transform: TransformKindV1::AddUInt64(1235), + }; + let expected_transforms = vec![transform_1, transform_2, transform_3]; + expected_transforms + } + + fn build_example_effects(key_1: Key, key_2: Key, key_3: Key) -> Effects { + let mut effects = Effects::new(); + effects.push(TransformV2::new(key_1, TransformKindV2::Identity)); + let mut named_keys = NamedKeys::new(); + named_keys.insert("key_1".to_string(), key_1.clone()); + named_keys.insert("key_2".to_string(), key_2.clone()); + effects.push(TransformV2::new( + key_2, + TransformKindV2::AddKeys(named_keys), + )); + effects.push(TransformV2::new(key_3, TransformKindV2::AddUInt64(1235))); + effects } fn random_account() -> Account { diff --git a/types/src/lib.rs b/types/src/lib.rs index dcaa2273..b7f12768 100644 --- a/types/src/lib.rs +++ b/types/src/lib.rs @@ -7,7 +7,7 @@ extern crate alloc; mod filter; pub mod legacy_sse_data; pub mod sse_data; -#[cfg(feature = "sse-data-testing")] +#[cfg(any(feature = "sse-data-testing", test))] mod testing; use casper_types::ProtocolVersion; diff --git a/types/src/sse_data.rs b/types/src/sse_data.rs index afcfd33b..bb8e43fe 100644 --- a/types/src/sse_data.rs +++ b/types/src/sse_data.rs @@ -48,13 +48,11 @@ pub(crate) fn to_error(msg: String) -> SseDataDeserializeError { /// Deserializes a string which should contain json data and returns a result of either SseData (which is 2.0.x compliant) or an SseDataDeserializeError /// /// * `json_raw`: string slice which should contain raw json data. -pub fn deserialize(json_raw: &str) -> Result<(SseData, bool), SseDataDeserializeError> { - serde_json::from_str::(json_raw) - .map(|el| (el, false)) - .map_err(|err| { - let error_message = format!("Serde Error: {}", err); - to_error(error_message) - }) +pub fn deserialize(json_raw: &str) -> Result { + serde_json::from_str::(json_raw).map_err(|err| { + let error_message = format!("Serde Error: {}", err); + to_error(error_message) + }) } /// The "data" field of the events sent on the event stream to clients. diff --git a/types/src/testing.rs b/types/src/testing.rs index c9496fb4..e75f45ff 100644 --- a/types/src/testing.rs +++ b/types/src/testing.rs @@ -4,10 +4,14 @@ //! `casper-node` library. use casper_types::{ - testing::TestRng, Deploy, TimeDiff, Timestamp, Transaction, TransactionV1Builder, + testing::TestRng, Deploy, TimeDiff, Timestamp, Transaction, + TransactionV1Builder, }; use rand::Rng; +#[cfg(test)] +use casper_types::{DeployHash, PublicKey}; +#[cfg(feature = "sse-data-testing")] /// Creates a test deploy created at given instant and with given ttl. pub fn create_test_transaction( created_ago: TimeDiff, @@ -32,6 +36,7 @@ pub fn create_test_transaction( } } +#[cfg(feature = "sse-data-testing")] /// Creates a random deploy that is considered expired. pub fn create_expired_transaction(now: Timestamp, test_rng: &mut TestRng) -> Transaction { create_test_transaction( @@ -41,3 +46,15 @@ pub fn create_expired_transaction(now: Timestamp, test_rng: &mut TestRng) -> Tra test_rng, ) } + +#[cfg(test)] +pub fn parse_public_key(arg: &str) -> PublicKey { + let escaped = format!("\"{}\"", arg); + serde_json::from_str(&escaped).unwrap() +} + +#[cfg(test)] +pub fn parse_deploy_hash(arg: &str) -> DeployHash { + let escaped = format!("\"{}\"", arg); + serde_json::from_str(&escaped).unwrap() +}