From 949d5029e4cd2cdcdbe446e2bfd0328f402edff4 Mon Sep 17 00:00:00 2001 From: ipopescu Date: Thu, 9 May 2024 13:59:34 +0200 Subject: [PATCH 01/45] Updated flow, wording, and some configs in the main README --- README.md | 291 +++++++++++++++++++++++++++++------------------------- 1 file changed, 156 insertions(+), 135 deletions(-) diff --git a/README.md b/README.md index 4b8cbef5..7de2ee76 100644 --- a/README.md +++ b/README.md @@ -1,21 +1,22 @@ -# Casper Event Sidecar README +# The Casper Sidecar ## Summary of Purpose -The Casper Event Sidecar is an application that runs in tandem with the node process. It's main purpose is to: -* offload the node from broadcasting SSE events to multiple clients -* provide client features that aren't part of the nodes' functionality, nor should they be +The Casper Sidecar application runs in tandem with the node process, and its primary purpose is to: +* Offload the node from broadcasting SSE events to multiple clients. +* Provide client features that aren't part of the nodes' functionality, nor should they be. While the primary use case for the Sidecar application is running alongside the node on the same machine, it can be run remotely if necessary. -### System Components & Architecture +## System Components & Architecture -Casper Sidecar has three main functionalities: -* Providing a SSE server with a firehose `/events` endpoint that streams all events from the connected nodes. Sidecar also stores observed events in storage. -* Providing a REST API server that allows clients to query events in storage. -* Be a JSON RPC bridge between end users and a Casper node's binary RPC port. +The Casper Sidecar provides the following functionalities: +* A server-sent events (SSE) server with an `/events` endpoint that streams all the events received from all connected nodes. The Sidecar also stores these events. +* A REST API server that allows clients to query stored events. +* A JSON RPC bridge between end users and a Casper node's binary port. + +The Sidecar has the following components and external dependencies: -The system has the following components and external dependencies: ```mermaid graph LR; subgraph CASPER-SIDECAR @@ -35,9 +36,10 @@ The system has the following components and external dependencies: STORAGE --> REST_API ``` -#### SSE Server +### The SSE server + +The SSE Server has these components: -Diving into the SSE Server, we see the following components: ```mermaid graph TD; CLIENT{Client} @@ -48,7 +50,7 @@ Diving into the SSE Server, we see the following components: NODE_SSE{Node SSE port} SSE_LISTENER --2--> STORAGE NODE_SSE --1--> SSE_LISTENER - subgraph "Casper sidecar" + subgraph "Casper Sidecar" MAIN[main.rs] MAIN --2.spawns---> SSE-SERVER subgraph SSE-SERVER @@ -63,19 +65,23 @@ Diving into the SSE Server, we see the following components: end ``` -Given the flow above, the SSE Listener processes events in this order: -1. Fetch an event from the node's SSE port -2. Store the event -3. Publish the event to the SSE API +The SSE Listener processes events in this order: +1. Fetch an event from the node's SSE port. +2. Store the event. +3. Publish the event to the SSE API. + +Casper nodes stream server-sent events with JSON-encoded data to the Sidecar. The Sidecar reads the event stream of all connected nodes, acting as a passthrough and replicating the SSE interface of the connected nodes. Enabling and configuring the SSE Server of the Sidecar is optional. + +The Sidecar can: +* Republish the current events from the node to clients listening to Sidecar's SSE API. +* Publish a configurable number of previous events to clients connecting to the Sidecar's SSE API with `?start_from=` query. +* Store the events in external storage for clients to query them via the Sidecar's REST API. + +### The REST API server -Casper nodes offer an event stream API that returns Server-Sent Events (SSEs) with JSON-encoded data. The Sidecar reads the event stream of all connected nodes, acting as a passthrough and replicating the SSE interface of the connected nodes. The Sidecar can: -* republish the current events from the node to clients listening to Sidecar's SSE API -* publish a configurable number of previous events to clients connecting to the Sidecar's SSE API with `?start_from=` query (similar to the node's SSE API) -* store the events in external storage for clients to query them via the Sidecar's REST API -Enabling and configuring the SSE Server of the Sidecar is optional. +The Sidecar offers an optional REST API that allows clients to query the events stored in external storage. Node operators can discover the specific endpoints of the REST API using [OpenAPI] (#openapi-specification) and [Swagger] (#swagger-documentation). The [usage instructions](USAGE.md) provide more details. -#### REST API Server ```mermaid graph LR; CLIENT{Client} @@ -84,32 +90,34 @@ Enabling and configuring the SSE Server of the Sidecar is optional. REST_API --> STORAGE CONFIG{{"Config file (toml)"}} MAIN --1.reads--> CONFIG - subgraph "Casper sidecar" + subgraph "Casper Sidecar" MAIN[main.rs] MAIN --2.spawns--> REST_API REST_API["REST API"] end ``` -The Sidecar offers an optional REST API that allows clients to query the events stored in external storage. Node operators can discover the specific endpoints of the REST API using [OpenAPI] (#openapi-specification) and [Swagger] (#swagger-documentation). Also, the [usage instructions](USAGE.md) provide more details. +### The Admin API server + +The Sidecar offers an administrative API to allow an operator to check its current status. The Sidecar operator has the option to enable and configure this API. Please see the [admin server configuration](#admin-server) for details. -#### ADMIN API Server ```mermaid graph LR; CLIENT{Client} CLIENT --> ADMIN_API CONFIG{{Config file}} MAIN --1.reads--> CONFIG - subgraph "Casper sidecar" + subgraph "Casper Sidecar" MAIN[main.rs] MAIN --2.spawns--> ADMIN_API ADMIN_API["ADMIN API"] end ``` -The Sidecar offers an administrative API to allow an operator to check its current status. The Sidecar operator has the option to enable and configure this API. Please see the [admin server configuration](#admin-server) for details. +### The RPC API server + +The Sidecar also offers an RPC JSON API server that can be enabled and configured so that clients can interact with a Casper network. It is a JSON bridge between end users and a Casper node's binary port. The RPC API server forwards requests to the Casper node's binary port. For more details on how the RPC JSON API works, see the [RPC Sidecar README](rpc_sidecar/README.md). -#### RPC API Server ```mermaid graph LR; CLIENT{Client} @@ -118,15 +126,40 @@ The Sidecar offers an administrative API to allow an operator to check its curre MAIN --1.reads--> CONFIG CASPER_NODE(("Casper Node binary port")) RPC_API --forwards request--> CASPER_NODE - subgraph "Casper sidecar" + subgraph "Casper Sidecar" MAIN[main.rs] MAIN --2.spawns--> RPC_API RPC_API["RPC JSON API"] end ``` -The Sidecar offers an optional RPC JSON API module that can be enabled and configured. It is a JSON bridge between end users and a Casper node's binary port. The RPC API server forwards requests to the Casper node's binary port. For more details on how the RPC JSON API works, see the [RPC Sidecar README](rpc_sidecar/README.md). -Here is an example configuration of the RPC API server: +## Running and Testing the Sidecar + +## Prerequisites + +To compile, test, and run the Sidecar, install the following software first: + +* CMake 3.1.4 or greater +* [Rust](https://www.rust-lang.org/tools/install) +* pkg-config +* gcc +* g++ + +## Configuration + +The Sidecar service must be configured using a `.toml` file specified at runtime. + +This repository contains several sample configuration files that can be used as examples and adjusted according to your scenario: + +- [EXAMPLE_NCTL_CONFIG.toml](./resources/example_configs/EXAMPLE_NCTL_CONFIG.toml) - Configuration for connecting to nodes on a local NCTL network. This configuration is used in the unit and integration tests found in this repository. +- [EXAMPLE_NCTL_POSTGRES_CONFIG.toml](./resources/example_configs/EXAMPLE_NCTL_POSTGRES_CONFIG.toml) - Configuration for using the PostgreSQL database and nodes on a local NCTL network. +- [EXAMPLE_NODE_CONFIG.toml](./resources/example_configs/EXAMPLE_NODE_CONFIG.toml) - Configuration for connecting to live nodes on a Casper network. + +Once you create the configuration file and are ready to run the Sidecar service, you must provide the configuration as an argument using the `-- --path-to-config` option as described [here](#running-the-sidecar). + +### Configuring the RPC server + +Here is an example configuration for the RPC API server: ``` [rpc_server.main_server] @@ -137,10 +170,12 @@ max_body_bytes = 2_621_440 cors_origin = '' [rpc_server.node_client] -address = '127.0.0.1:28101' +address = '0.0.0.0:28101' max_message_size_bytes = 4_194_304 request_limit = 3 request_buffer_size = 16 +message_timeout_secs = 30 +client_access_timeout_secs = 2 [rpc_server.speculative_exec_server] enable_server = true @@ -169,43 +204,27 @@ max_attempts = 30 * `speculative_exec_server.max_body_bytes` - Maximum body size of request to API in bytes. * `speculative_exec_server.cors_origin` - Configures the CORS origin. -* `node_client.address` - Address of the Casper Node binary port +* `node_client.address` - Address of the Casper Node binary port. * `node_client.max_message_size_bytes` - Maximum binary port message size in bytes. * `node_client.request_limit` - Maximum number of in-flight requests. * `node_client.request_buffer_size` - Number of node requests that can be buffered. +* `node_client.message_timeout_secs` - Timeout for the message. +* `node_client.client_access_timeout_secs` - Timeout for the client connection. * `node_client.exponential_backoff.initial_delay_ms` - Timeout after the first broken connection (backoff) in milliseconds. * `node_client.exponential_backoff.max_delay_ms` - Maximum timeout after a broken connection in milliseconds. * `node_client.exponential_backoff.coefficient` - Coefficient for the exponential backoff. The next timeout is calculated as min(`current_timeout * coefficient`, `max_delay_ms`). * `node_client.exponential_backoff.max_attempts` - Maximum number of times to try to reconnect to the binary port of the node. -## Prerequisites - -* CMake 3.1.4 or greater -* [Rust](https://www.rust-lang.org/tools/install) -* pkg-config -* gcc -* g++ - -## Configuration - -The SSE Sidecar service must be configured using a `.toml` file specified at runtime. - -This repository contains several sample configuration files that can be used as examples and adjusted according to your scenario: - -- [EXAMPLE_NCTL_CONFIG.toml](./resources/example_configs/EXAMPLE_NCTL_CONFIG.toml) - Configuration for connecting to nodes on a local NCTL network. This configuration is used in the unit and integration tests found in this repository -- [EXAMPLE_NCTL_POSTGRES_CONFIG.toml](./resources/example_configs/EXAMPLE_NCTL_POSTGRES_CONFIG.toml) - Configuration for using the PostgreSQL database and nodes on a local NCTL network -- [EXAMPLE_NODE_CONFIG.toml](./resources/example_configs/EXAMPLE_NODE_CONFIG.toml) - Configuration for connecting to live nodes on a Casper network and setting up an admin server - -Once you create the configuration file and are ready to run the Sidecar service, you must provide the configuration as an argument using the `-- --path-to-config` option as described [here](#running-the-sidecar). - ### SSE server configuration -The Casper sidecar SSE server is used to connect to casper nodes, listen to events from them, store them locally and re-broadcast them to clients. The configuration for the SSE server itself is as follows: + +The Sidecar SSE server is used to connect to Casper nodes, listen to events from them, store them locally and re-broadcast them to clients. Here is a sample configuration for the SSE server: ``` [sse_server] enable_server = true emulate_legacy_sse_apis = ["V1"] + [[sse_server.connections]] @@ -214,15 +233,19 @@ emulate_legacy_sse_apis = ["V1"] ``` * `sse_server.enable_server` - If set to true, the SSE server will be enabled. -* `sse_server.emulate_legacy_sse_apis` - A list of legacy casper node SSE APIs to emulate. The Sidecar will expose sse endpoints that are compatible with specified versions. Please bear in mind that this feature is an emulation and should be used only for transition periods. In most case scenarios having a 1 to 1 mapping of new messages into old formats is impossible, so this can be a process that looses some data and/or doesn't emit all messages that come out of the casper node. The details of the emulation are described in section [Event Stream Server SSE legacy emulations](#event-stream-server-sse-legacy-emulations) module. +* `sse_server.emulate_legacy_sse_apis` - A list of legacy Casper node SSE APIs to emulate. The Sidecar will expose SSE endpoints that are compatible with specified versions. Please bear in mind that this feature is an emulation and should be used only for transition periods. In most scenarios, having a 1-to-1 mapping of new messages into old formats is impossible, so this can be a process that loses some data and/or doesn't emit all messages that come from the Casper node. -#### SSE Node Connections +#### SSE node connections -The Casper Sidecar's SSE component can connect to Casper nodes' SSE endpoints with versions greater or equal to `2.0.0`. + +The Sidecar's SSE component can connect to Casper nodes' SSE endpoints with versions greater or equal to `2.0.0`. The `node_connections` option configures the node (or multiple nodes) to which the Sidecar will connect and the parameters under which it will operate with that node. Connecting to multiple nodes requires multiple `[[sse_server.connections]]` sections. ``` +[sse_server] +enable_server = true + [[sse_server.connections]] ip_address = "127.0.0.1" sse_port = 18101 @@ -267,43 +290,77 @@ sleep_between_keep_alive_checks_in_seconds = 30 * `delay_between_retries_in_seconds` - The delay between attempts to connect to the node. * `allow_partial_connection` - Determining whether the Sidecar will allow a partial connection to this node. * `enable_logging` - This enables the logging of events from the node in question. -* `connection_timeout_in_seconds` - Number of seconds before the connection request times out. Parameter is optional, defaults to 5 -* `no_message_timeout_in_seconds` - Number of seconds after which the connection will be restarted if no bytes were received. Parameter is optional, defaults to 120 -* `sleep_between_keep_alive_checks_in_seconds` - Optional parameter specifying the time intervals (in seconds) for checking if the connection is still alive. Defaults to 60 +* `connection_timeout_in_seconds` - Number of seconds before the connection request times out. This parameter is optional, and defaults to 5. +* `no_message_timeout_in_seconds` - Number of seconds after which the connection will be restarted if no bytes were received. This parameter is optional, and defaults to 120. +* `sleep_between_keep_alive_checks_in_seconds` - Optional parameter specifying the time intervals (in seconds) for checking if the connection is still alive. Defaults to 60. -#### Event Stream Server SSE legacy emulations +#### SSE legacy emulations + +Applications using version 1 of a Casper node's event stream server can still function using an emulated V1 SSE API for a limited time. Enabling the V1 SSE API emulation requires the `emulate_legacy_sse_apis` setting to be `["V1"]`: -Currently the only possible emulation is the V1 SSE API. Enabling V1 SSE api emulation requires setting `emulate_legacy_sse_apis` to `["V1"]`, like: ``` [sse_server] -(...) +enable_server = true emulate_legacy_sse_apis = ["V1"] -(...) ``` -This will expose three additional sse endpoints: -* `/events/sigs` -* `/events/deploys` -* `/events/main` +This setting will expose three legacy SSE endpoints with the following events streamed on each endpoint: +* `/events/sigs` - Finality Signature events +* `/events/deploys` - DeployAccepted events +* `/events/main` - All other legacy events, including BlockAdded, DeployProcessed, DeployExpired, Fault, Step, and Shutdown events -Those endpoints will emit events in the same format as the V1 SSE API of the casper node. There are limitations to what Casper Sidecar can and will do, here is a list of assumptions: + + +#### Event stream configuration + +To configure the Sidecar's event stream server, specify the following settings: + +``` +[sse_server.event_stream_server] +port = 19999 +max_concurrent_subscribers = 100 +event_stream_buffer_length = 5000 +``` + +* `event_stream_server.port` - The port under which the Sidecar's SSE server publishes events. +* `event_stream_server.max_concurrent_subscribers` - The maximum number of subscribers that can monitor the Sidecar's event stream. +* `event_stream_server.event_stream_buffer_length` - The number of events that the stream will hold in its buffer for reference when a subscriber reconnects. + +### REST server configuration + +The following section determines outbound connection criteria for the Sidecar's REST server. + +``` +[rest_api_server] +enable_server = true +port = 18888 +max_concurrent_requests = 50 +max_requests_per_second = 50 +request_timeout_in_seconds = 10 +``` -TODO -> fill this in the next PR when mapping is implemented +* `enable_server` - If set to true, the RPC API server will be enabled. +* `port` - The port for accessing the Sidecar's REST server. `18888` is the default, but operators are free to choose their own port as needed. +* `max_concurrent_requests` - The maximum total number of simultaneous requests that can be made to the REST server. +* `max_requests_per_second` - The maximum total number of requests that can be made per second. +* `request_timeout_in_seconds` - The total time before a request times out. ### Storage -This directory stores the SSE cache and an SQLite database if the Sidecar is configured to use SQLite. +This directory stores the SSE cache and an SQLite database if the Sidecar was configured to use SQLite. ``` [storage] storage_path = "./target/storage" ``` -### Database Connectivity +### Database connectivity -The Sidecar can connect to different types of databases. The current options are `SQLite` or `PostgreSQL`. The following sections show how to configure the database connection for one of these DBs. Note that the Sidecar can only connect to one DB at a time. +The Sidecar can connect to different types of databases. The current options are `SQLite` or `PostgreSQL`. The following sections show how to configure the database connection. Note that the Sidecar can only connect to one database at a time. -#### SQLite Database +#### SQLite database This section includes configurations for the SQLite database. @@ -311,7 +368,6 @@ This section includes configurations for the SQLite database. [storage.sqlite_config] file_name = "sqlite_database.db3" max_connections_in_pool = 100 -# https://www.sqlite.org/compile.html#default_wal_autocheckpoint wal_autocheckpointing_interval = 1000 ``` @@ -319,7 +375,7 @@ wal_autocheckpointing_interval = 1000 * `storage.sqlite_config.max_connections_in_pool` - The maximum number of connections to the database (should generally be left as is). * `storage.sqlite_config.wal_autocheckpointing_interval` - This controls how often the system commits pages to the database. The value determines the maximum number of pages before forcing a commit. More information can be found [here](https://www.sqlite.org/compile.html#default_wal_autocheckpoint). -#### PostgreSQL Database +#### PostgreSQL database The properties listed below are elements of the PostgreSQL database connection that can be configured for the Sidecar. @@ -357,9 +413,7 @@ SIDECAR_POSTGRES_MAX_CONNECTIONS="max connections" SIDECAR_POSTGRES_PORT="port" ``` -However, DB connectivity can also be configured using the Sidecar configuration file. - -If the DB environment variables and the Sidecar's configuration file have the same variable set, the DB environment variables will take precedence. +However, DB connectivity can also be configured using the Sidecar configuration file. If the DB environment variables and the Sidecar's configuration file have the same variable set, the DB environment variables will take precedence. It is possible to completely omit the PostgreSQL configuration from the Sidecar's configuration file. In this case, the Sidecar will attempt to connect to the PostgreSQL using the database environment variables or use some default values for non-critical variables. @@ -372,40 +426,7 @@ database_username = "postgres" max_connections_in_pool = 30 ``` -#### Rest & Event Stream Criteria - -This information determines outbound connection criteria for the Sidecar's `rest_server`. - -``` -[rest_api_server] -enable_server = true -port = 18888 -max_concurrent_requests = 50 -max_requests_per_second = 50 -request_timeout_in_seconds = 10 -``` -* `enable_server` - If set to true, the RPC API server will be enabled. -* `port` - The port for accessing the sidecar's `rest_server`. `18888` is the default, but operators are free to choose their own port as needed. -* `max_concurrent_requests` - The maximum total number of simultaneous requests that can be made to the REST server. -* `max_requests_per_second` - The maximum total number of requests that can be made per second. -* `request_timeout_in_seconds` - The total time before a request times out. - -``` -[sse_server.event_stream_server] -port = 19999 -max_concurrent_subscribers = 100 -event_stream_buffer_length = 5000 -``` - -The `sse_server.event_stream_server` section specifies a port for the Sidecar's event stream. - -Additionally, there are the following two options: - -* `event_stream_server.port` - Port under which the SSE server is published. -* `event_stream_server.max_concurrent_subscribers` - The maximum number of subscribers that can monitor the Sidecar's event stream. -* `event_stream_server.event_stream_buffer_length` - The number of events that the stream will hold in its buffer for reference when a subscriber reconnects. - -### Admin Server +### Admin server configuration This optional section configures the Sidecar's administrative server. If this section is not specified, the Sidecar will not start an admin server. @@ -432,25 +453,9 @@ Once the Sidecar is running, access the Swagger documentation at `http://localho An OpenAPI schema is available at `http://localhost:18888/api-doc.json/`. You need to replace `localhost` with the IP address of the machine running the Sidecar application if you are running the Sidecar remotely. -## Unit Testing the Sidecar - -You can run the unit and integration tests included in this repository with the following command: - -``` -cargo test -``` - -You can also run the performance tests using the following command: - -``` -cargo test -- --include-ignored -``` - -The [EXAMPLE_NCTL_CONFIG.toml](./resources/example_configs/EXAMPLE_NCTL_CONFIG.toml) file contains the configurations used for these tests. - ## Running the Sidecar -After creating the configuration file, run the Sidecar using Cargo and point to the configuration file using the `--path-to-config` option, as shown below. The command needs to run with `root` privileges. +After creating the configuration file, run the Sidecar using `cargo` and point to the configuration file using the `--path-to-config` option, as shown below. The command needs to run with `root` privileges. ```shell sudo cargo run -- --path-to-config ./resources/example_configs/EXAMPLE_NODE_CONFIG.toml @@ -458,7 +463,7 @@ sudo cargo run -- --path-to-config ./resources/example_configs/EXAMPLE_NODE_CONF The Sidecar application leverages tracing, which can be controlled by setting the `RUST_LOG` environment variable. -The following command will run the sidecar application with the `INFO` log level. +The following command will run the Sidecar application with the `INFO` log level. ``` RUST_LOG=info cargo run -p casper-sidecar -- --path-to-config ./resources/example_configs/EXAMPLE_NCTL_CONFIG.toml @@ -474,11 +479,27 @@ The log levels, listed in order of increasing verbosity, are: Further details about log levels can be found [here](https://docs.rs/env_logger/0.9.1/env_logger/#enabling-logging). -## Testing the Sidecar using NCTL +## Testing the Sidecar + +You can run the unit and integration tests included in this repository with the following command: + +``` +cargo test +``` + +You can also run the performance tests using this command: + +``` +cargo test -- --include-ignored +``` + +The [EXAMPLE_NCTL_CONFIG.toml](./resources/example_configs/EXAMPLE_NCTL_CONFIG.toml) file contains the configurations used for these tests. + +### Testing the Sidecar using NCTL The Sidecar application can be tested against live Casper nodes or a local [NCTL network](https://docs.casperlabs.io/dapp-dev-guide/building-dapps/setup-nctl/). -The configuration shown within this README will direct the Sidecar application to a locally hosted NCTL network if one is running. The Sidecar should function the same way it would with a live node, displaying events as they occur in the local NCTL network. +The configuration shown [here](./resources/example_configs/EXAMPLE_NCTL_CONFIG.toml) will direct the Sidecar application to a locally hosted NCTL network if one is running. The Sidecar should function the same way it would while connected to a live node, displaying events as they occur in the local NCTL network. ## Troubleshooting Tips @@ -509,7 +530,7 @@ curl http://SIDECAR_URL:SIDECAR_ADMIN_PORT/metrics **Sample output**: ``` -# HELP node_statuses Current status of node to which sidecar is connected. Numbers mean: 0 - preparing; 1 - connecting; 2 - connected; 3 - reconnecting; -1 - connections_exhausted -> used up all connection attempts ; -2 - incompatible -> node is in an incompatible version +# HELP node_statuses Current status of node to which the Sidecar is connected. Numbers mean: 0 - preparing; 1 - connecting; 2 - connected; 3 - reconnecting; -1 - connections_exhausted -> used up all connection attempts ; -2 - incompatible -> node is in an incompatible version # TYPE node_statuses gauge node_statuses{node="35.180.42.211:9999"} 2 node_statuses{node="69.197.42.27:9999"} 2 From 8a346a46ad4f4f8041cc5c6b74043ed51ebb8a9c Mon Sep 17 00:00:00 2001 From: ipopescu Date: Thu, 9 May 2024 14:12:42 +0200 Subject: [PATCH 02/45] Edited USAGE and formatting error --- USAGE.md | 58 +++++++++++++++++++++++++++----------------------------- 1 file changed, 28 insertions(+), 30 deletions(-) diff --git a/USAGE.md b/USAGE.md index 38030f8a..240e99cc 100644 --- a/USAGE.md +++ b/USAGE.md @@ -1,4 +1,4 @@ -# Casper Event Sidecar USAGE +# Casper Sidecar USAGE This document describes how to consume events and perform queries using the Sidecar, covering the following topics: @@ -20,10 +20,12 @@ Events are emitted on two endpoints: For more information on various event types emitted by the node, visit the [Monitoring and Consuming Events](https://docs.casperlabs.io/developers/dapps/monitor-and-consume-events/#event-types) documentation. -### Monitoring the Sidecar Event Stream +### Monitoring the Sidecar event stream It is possible to monitor the Sidecar event stream using *cURL*, depending on how the HOST and PORT are configured. +The Sidecar can connect to Casper nodes with versions greater or equal to `2.0.0`. + ```json curl -s http:///events ``` @@ -43,15 +45,13 @@ Also, the Sidecar exposes an endpoint for Sidecar-generated events: curl -sN http://127.0.0.1:19999/events/sidecar ``` -### The API Version of Node Events - -An `ApiVersion` event is always emitted when a new client connects to a node's SSE server, informing the client of the node's software version. +### Node events versioning -When a client connects to the Sidecar, the Sidecar displays the node’s API version, `ApiVersion`, which it receives from the node. Then, it starts streaming the events coming from the node. The `ApiVersion` may differ from the node’s build version. +An `ApiVersion` event is always emitted when the Sidecar connects to a node's SSE server, broadcasting the node's software version. Then, the Sidecar starts streaming the events coming from the node. Note that the `ApiVersion` may differ from the node’s build version. If the node goes offline, the `ApiVersion` may differ when it restarts (i.e., in the case of an upgrade). In this case, the Sidecar will report the new `ApiVersion` to its client. If the node’s `ApiVersion` has not changed, the Sidecar will not report the version again and will continue to stream messages that use the previous version. -Here is an example of what the API version would look like while listening on the Sidecar’s `TransactionAccepted` event stream: +Here is an example of what the API version would look like while listening on the Sidecar’s event stream. The colons represent "keep-alive" messages. ``` curl -sN http://127.0.0.1:19999/events @@ -68,13 +68,9 @@ id:21821471 : ``` -#### Middleware Mode - -The Sidecar can connect simultaneously to nodes with different build versions, which send messages with different API versions. There is also the rare possibility of nodes changing API versions and not being in sync with other connected nodes. Although this situation would be rare, clients should be able to parse messages with different API versions. +>**Note**: The Sidecar can connect simultaneously to nodes with different build versions, which send messages with different API versions. There is also the rare possibility of nodes changing API versions and not being in sync with other connected nodes. Although this situation would be rare, clients should be able to parse messages with different API versions. ->**Note**: The Sidecar can connect to Casper nodes with versions greater or equal to `2.0.0`. - -### The Version of Sidecar Events +### Sidecar events versioning When a client connects to the `events/sidecar` endpoint, it will receive a message containing the version of the Sidecar software. Release version `1.1.0` would look like this: @@ -91,9 +87,9 @@ data:{"SidecarVersion":"1.1.0"} Note that the SidecarVersion differs from the APIVersion emitted by the node event streams. You will also see the keep-alive messages as colons, ensuring the connection is active. -### The Node Shutdown Event +### The node's Shutdown event -When the node sends a Shutdown event and disconnects from the Sidecar, the Sidecar will report it as part of the event stream and on the `/events` endpoint. The Sidecar will continue to operate and attempt to reconnect to the node according to the `max_attempts` and `delay_between_retries_in_seconds` settings specified in its configuration. +When the node sends a Shutdown event and disconnects from the Sidecar, the Sidecar will report it as part of the event stream on the `/events` endpoint. The Sidecar will continue to operate and attempt to reconnect to the node according to the `max_attempts` and `delay_between_retries_in_seconds` settings specified in its configuration. The Sidecar does not expose Shutdown events via its REST API. @@ -131,7 +127,7 @@ id:3 Note that the Sidecar can emit another type of shutdown event on the `events/sidecar` endpoint, as described below. -### The Sidecar Shutdown Event +### The Sidecar Shutdown event If the Sidecar attempts to connect to a node that does not come back online within the maximum number of reconnection attempts, the Sidecar will start a controlled shutdown process. It will emit a Sidecar-specific Shutdown event on the [events/sidecar](#the-sidecar-shutdown-event) endpoint, designated for events originating solely from the Sidecar service. The other event streams do not get this message because they only emit messages from the node. @@ -156,7 +152,7 @@ id:8 The Sidecar provides a RESTful endpoint for useful queries about the state of the network. -### Latest Block +### Latest block Retrieve information about the last block added to the linear chain. @@ -178,7 +174,7 @@ curl -s http://127.0.0.1:18888/block

-### Block by Hash +### Block by hash Retrieve information about a block given its block hash. @@ -199,7 +195,7 @@ curl -s http://127.0.0.1:18888/block/bd2e0c36150a74f50d9884e38a0955f8b1cba94821b

-### Block by Height +### Block by chain height Retrieve information about a block, given a specific block height. @@ -220,7 +216,7 @@ curl -s http://127.0.0.1:18888/block/336460

-### Transaction by Hash +### Transaction by hash Retrieve an aggregate of the various states a transaction goes through, given its transaction hash. The endpoint also needs the transaction type as an input (`deploy` or `version1`) The node does not emit this event, but the Sidecar computes it and returns it for the given transaction. This endpoint behaves differently than other endpoints, which return the raw event received from the node. @@ -250,11 +246,13 @@ The next sample output is for a transaction that was accepted and processed. Transaction accepted and processed successfully ```json -{"transaction_hash": "3141e85f8075c3a75c2a1abcc79810c07d103ff97c03200ab0d0baf91995fe4a","transaction_accepted": {"header": {"api_version": "2.0.0","network_name": "casper-net-1"},"payload": {"transaction": {"Version1": {"hash": "3141e85f8075c3a75c2a1abcc79810c07d103ff97c03200ab0d0baf91995fe4a","header": {"chain_name": "casper-net-1","timestamp": "2024-03-20T13:31:59.772Z","ttl": "30m","body_hash": "40c7476a175fb97656ec6da1ace2f1900a9d353f1637943a30edd5385494b345","pricing_mode": {"Fixed": {"gas_price_tolerance": 1000}},"initiator_addr": {"PublicKey": "01d848e225db95e34328ca1c64d73ecda50f5070fd6b21037453e532d085a81973"}},"body": {"args": [],"target": {"Session": {"kind": "Standard","module_bytes":"","runtime": "VmCasperV1"}},"entry_point": {"Custom": "test"},"scheduling": "Standard"},"approvals": [{"signer": "01d848e225db95e34328ca1c64d73ecda50f5070fd6b21037453e532d085a81973","signature": "0154fd295f5d4d62544f63d70470de28b2bf2cddecac2a237b6a2a78d25ee14b21ea2861d711a51f57b3f9f74e247a8d26861eceead6569f233949864a9d5fa100"}]}}}},"transaction_processed": {"transaction_hash":{"Deploy":"c6907d46a5cc61ef30c66dbb6599208a57d3d62812c5f061169cdd7ad4e52597"},"initiator_addr":{"PublicKey":"0202dec9e70126ddd13af6e2e14771339c22f73626202a28ef1ed41594a3b2a79156"},"timestamp":"2024-03-20T13:58:57.301Z","ttl":"2m 53s","block_hash":"6c6a1fb17147fe467a52f8078e4c6d1143e8f61e2ec0c57938a0ac5f49e3f960","execution_result":{"Version1":{"Success":{"effect":{"operations":[{"key":"9192013132486795888","kind":"NoOp"}],"transforms":[{"key":"9278390014984155010","transform":{"AddUInt64":17967007786823421753}},{"key":"8284631679508534160","transform":{"AddUInt512":"13486131286369918968"}},{"key":"11406903664472624400","transform":{"AddKeys":[{"name":"5532223989822042950","key":"6376159234520705888"},{"name":"9797089120764120320","key":"3973583116099652644"},{"name":"17360643427404656075","key":"3412027808185329863"},{"name":"9849256366384177518","key":"1556404389498537987"},{"name":"14237913702817074429","key":"16416969798013966173"}]}},{"key":"11567235260771335457","transform":"Identity"},{"key":"13285707355579107355","transform":"Identity"}]},"transfers":[],"cost":"14667737366273622842"}}},"messages":[{"entity_addr":{"SmartContract":[193,43,184,185,6,88,15,83,243,107,130,63,136,174,24,148,79,214,87,238,171,138,195,141,119,235,134,196,253,221,36,0]},"message":{"String":"wLNta4zbpJiW5ScjagPXm5LoGViYApCfIbEXJycPUuLQP4fA7REhV4LdBRbZ7bQb"},"topic_name":"FdRRgbXEGS1xKEXCJKvaq7hVyZ2ZUlSb","topic_name_hash":"473f644238bbb334843df5bd06a85e8bc34d692cce804de5f97e7f344595c769","topic_index":4225483688,"block_index":16248749308130060594},{"entity_addr":{"Account":[109,75,111,241,219,141,104,160,197,208,7,245,112,199,31,150,68,65,166,247,43,111,0,56,32,124,7,36,107,230,100,132]},"message":{"String":"U5qR82wJoPDGJWhwJ4qkblsu6Q5DDqDt0Q2pAjhVOUjn520PdvYOC27oo4aDEosw"},"topic_name":"zMEkHxGgUUSMmb7eWJhFs5e6DH9vXvCg","topic_name_hash":"d911ebafb53ccfeaf5c970e462a864622ec4e3a1030a17a8cfaf4d7a4cd74d48","topic_index":560585407,"block_index":15889379229443860143}]},"transaction_expired": false}``` +{"transaction_hash": "3141e85f8075c3a75c2a1abcc79810c07d103ff97c03200ab0d0baf91995fe4a","transaction_accepted": {"header": {"api_version": "2.0.0","network_name": "casper-net-1"},"payload": {"transaction": {"Version1": {"hash": "3141e85f8075c3a75c2a1abcc79810c07d103ff97c03200ab0d0baf91995fe4a","header": {"chain_name": "casper-net-1","timestamp": "2024-03-20T13:31:59.772Z","ttl": "30m","body_hash": "40c7476a175fb97656ec6da1ace2f1900a9d353f1637943a30edd5385494b345","pricing_mode": {"Fixed": {"gas_price_tolerance": 1000}},"initiator_addr": {"PublicKey": "01d848e225db95e34328ca1c64d73ecda50f5070fd6b21037453e532d085a81973"}},"body": {"args": [],"target": {"Session": {"kind": "Standard","module_bytes":"","runtime": "VmCasperV1"}},"entry_point": {"Custom": "test"},"scheduling": "Standard"},"approvals": [{"signer": "01d848e225db95e34328ca1c64d73ecda50f5070fd6b21037453e532d085a81973","signature": "0154fd295f5d4d62544f63d70470de28b2bf2cddecac2a237b6a2a78d25ee14b21ea2861d711a51f57b3f9f74e247a8d26861eceead6569f233949864a9d5fa100"}]}}}},"transaction_processed": {"transaction_hash":{"Deploy":"c6907d46a5cc61ef30c66dbb6599208a57d3d62812c5f061169cdd7ad4e52597"},"initiator_addr":{"PublicKey":"0202dec9e70126ddd13af6e2e14771339c22f73626202a28ef1ed41594a3b2a79156"},"timestamp":"2024-03-20T13:58:57.301Z","ttl":"2m 53s","block_hash":"6c6a1fb17147fe467a52f8078e4c6d1143e8f61e2ec0c57938a0ac5f49e3f960","execution_result":{"Version1":{"Success":{"effect":{"operations":[{"key":"9192013132486795888","kind":"NoOp"}],"transforms":[{"key":"9278390014984155010","transform":{"AddUInt64":17967007786823421753}},{"key":"8284631679508534160","transform":{"AddUInt512":"13486131286369918968"}},{"key":"11406903664472624400","transform":{"AddKeys":[{"name":"5532223989822042950","key":"6376159234520705888"},{"name":"9797089120764120320","key":"3973583116099652644"},{"name":"17360643427404656075","key":"3412027808185329863"},{"name":"9849256366384177518","key":"1556404389498537987"},{"name":"14237913702817074429","key":"16416969798013966173"}]}},{"key":"11567235260771335457","transform":"Identity"},{"key":"13285707355579107355","transform":"Identity"}]},"transfers":[],"cost":"14667737366273622842"}}},"messages":[{"entity_addr":{"SmartContract":[193,43,184,185,6,88,15,83,243,107,130,63,136,174,24,148,79,214,87,238,171,138,195,141,119,235,134,196,253,221,36,0]},"message":{"String":"wLNta4zbpJiW5ScjagPXm5LoGViYApCfIbEXJycPUuLQP4fA7REhV4LdBRbZ7bQb"},"topic_name":"FdRRgbXEGS1xKEXCJKvaq7hVyZ2ZUlSb","topic_name_hash":"473f644238bbb334843df5bd06a85e8bc34d692cce804de5f97e7f344595c769","topic_index":4225483688,"block_index":16248749308130060594},{"entity_addr":{"Account":[109,75,111,241,219,141,104,160,197,208,7,245,112,199,31,150,68,65,166,247,43,111,0,56,32,124,7,36,107,230,100,132]},"message":{"String":"U5qR82wJoPDGJWhwJ4qkblsu6Q5DDqDt0Q2pAjhVOUjn520PdvYOC27oo4aDEosw"},"topic_name":"zMEkHxGgUUSMmb7eWJhFs5e6DH9vXvCg","topic_name_hash":"d911ebafb53ccfeaf5c970e462a864622ec4e3a1030a17a8cfaf4d7a4cd74d48","topic_index":560585407,"block_index":15889379229443860143}]},"transaction_expired": false} +``` +

-### Accepted Transaction by Hash +### Accepted transaction by hash Retrieve information about an accepted transaction, given its transaction hash. @@ -276,7 +274,7 @@ curl -s http://127.0.0.1:18888/transaction/accepted/version1/8204af872d7d19ef8da

-### Expired Transaction by Hash +### Expired transaction by hash Retrieve information about a transaction that expired, given its trnasaction type and transaction hash. @@ -296,7 +294,7 @@ curl -s http://127.0.0.1:18888/transaction/expired/version1/3dcf9cb73977a1163129 ``` -### Processed Transaction by Hash +### Processed transaction by hash Retrieve information about a transaction that was processed, given its transaction hash. The path URL is `/transaction/expired/version1/`. Enter a valid transaction hash. @@ -317,7 +315,7 @@ curl -s http://127.0.0.1:18888/transaction/processed/version1/8204af872d7d19ef8d

-### Faults by Public Key +### Faults by public key Retrieve the faults associated with a validator's public key. The path URL is `/faults/`. Enter a valid hexadecimal representation of a validator's public key. @@ -328,7 +326,7 @@ Example: curl -s http://127.0.0.1:18888/faults/01a601840126a0363a6048bfcbb0492ab5a313a1a19dc4c695650d8f3b51302703 ``` -### Faults by Era +### Faults by era Return the faults associated with an era, given a valid era identifier. The path URL is: `/faults/`. Enter an era identifier. @@ -339,7 +337,7 @@ Example: curl -s http://127.0.0.1:18888/faults/2304 ``` -### Finality Signatures by Block +### Finality signatures by block Retrieve the finality signatures in a block, given its block hash. @@ -351,7 +349,7 @@ Example: curl -s http://127.0.0.1:18888/signatures/85aa2a939bc3a4afc6d953c965bab333bb5e53185b96bb07b52c295164046da2 ``` -### Step by Era +### Step by era Retrieve the step event emitted at the end of an era, given a valid era identifier. @@ -363,7 +361,7 @@ Example: curl -s http://127.0.0.1:18888/step/7268 ``` -### Missing Filter +### Missing filter If no filter URL was specified after the root address (HOST:PORT), an error message will be returned. @@ -374,7 +372,7 @@ curl http://127.0.0.1:18888 {"code":400,"message":"Invalid request path provided"} ``` -### Invalid Filter +### Invalid filter If an invalid filter was specified, an error message will be returned. From 883006e5a5f6a425b40ca30982c17048b5b41d4a Mon Sep 17 00:00:00 2001 From: ipopescu Date: Thu, 9 May 2024 14:35:16 +0200 Subject: [PATCH 03/45] Add TOC in main README --- README.md | 58 +++++++++++++++++++++++++++++++++++++++++++------------ 1 file changed, 46 insertions(+), 12 deletions(-) diff --git a/README.md b/README.md index 7de2ee76..b89e8e75 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,39 @@ # The Casper Sidecar +- [Summary of Purpose](#summary-of-purpose) +- [System Components and Architecture](#system-components-and-architecture) + - [The SSE server](#the-sse-server) + - [The REST API server](#the-rest-api-server) + - [The Admin API server](#the-admin-api-server) + - [The RPC API server](#the-rpc-api-server) +- [Running and Testing the Sidecar](#running-and-testing-the-sidecar) + - [Prerequisites](#prerequisites) + - [Configuration](#configuration) + - [RPC server setup](#rpc-server-setup) + - [SSE server setup](#sse-server-setup) + - [Configuring SSE node connections](#configuring-sse-node-connections) + - [Configuring SSE legacy emulations](#configuring-sse-legacy-emulations) + - [Configuring the event stream](#configuring-the-event-stream) + - [REST server setup](#rest-server-setup) + - [Storage setup](#setup-storage) + - [Database connectivity setup](#database-connectivity-setup) + - [SQLite database](#sqlite-database) + - [PostgreSQL database](#postgresql-database) + - [Admin server setup](#admin-server-setup) +- [Swagger Documentation](#swagger-documentation) +- [OpenAPI Specification](#openapi-specification) +- [Running the Sidecar](#running-the-sidecar) +- [Testing the Sidecar](#testing-the-sidecar) + - [Testing the Sidecar using NCTL](#testing-the-sidecar-using-nctl) +- [Troubleshooting Tips](#troubleshooting-tips) + - [Checking liveness](#checking-liveness) + - [Checking the node connection](#checking-the-node-connection) + - [Diagnosing errors](#diagnosing-errors) + - [Monitoring memory consumption](#monitoring-memory-consumption) + - [Ensuring sufficient storage](#ensuring-sufficient-storage) + - [Inspecting the REST API](#inspecting-the-rest-api) + - [Limiting concurrent requests](#limiting-concurrent-requests) + ## Summary of Purpose The Casper Sidecar application runs in tandem with the node process, and its primary purpose is to: @@ -8,7 +42,7 @@ The Casper Sidecar application runs in tandem with the node process, and its pri While the primary use case for the Sidecar application is running alongside the node on the same machine, it can be run remotely if necessary. -## System Components & Architecture +## System Components and Architecture The Casper Sidecar provides the following functionalities: * A server-sent events (SSE) server with an `/events` endpoint that streams all the events received from all connected nodes. The Sidecar also stores these events. @@ -135,7 +169,7 @@ The Sidecar also offers an RPC JSON API server that can be enabled and configure ## Running and Testing the Sidecar -## Prerequisites +### Prerequisites To compile, test, and run the Sidecar, install the following software first: @@ -145,7 +179,7 @@ To compile, test, and run the Sidecar, install the following software first: * gcc * g++ -## Configuration +### Configuration The Sidecar service must be configured using a `.toml` file specified at runtime. @@ -157,7 +191,7 @@ This repository contains several sample configuration files that can be used as Once you create the configuration file and are ready to run the Sidecar service, you must provide the configuration as an argument using the `-- --path-to-config` option as described [here](#running-the-sidecar). -### Configuring the RPC server +### RPC server setup Here is an example configuration for the RPC API server: @@ -216,7 +250,7 @@ max_attempts = 30 * `node_client.exponential_backoff.coefficient` - Coefficient for the exponential backoff. The next timeout is calculated as min(`current_timeout * coefficient`, `max_delay_ms`). * `node_client.exponential_backoff.max_attempts` - Maximum number of times to try to reconnect to the binary port of the node. -### SSE server configuration +### SSE server setup The Sidecar SSE server is used to connect to Casper nodes, listen to events from them, store them locally and re-broadcast them to clients. Here is a sample configuration for the SSE server: @@ -235,7 +269,7 @@ emulate_legacy_sse_apis = ["V1"] * `sse_server.enable_server` - If set to true, the SSE server will be enabled. * `sse_server.emulate_legacy_sse_apis` - A list of legacy Casper node SSE APIs to emulate. The Sidecar will expose SSE endpoints that are compatible with specified versions. Please bear in mind that this feature is an emulation and should be used only for transition periods. In most scenarios, having a 1-to-1 mapping of new messages into old formats is impossible, so this can be a process that loses some data and/or doesn't emit all messages that come from the Casper node. -#### SSE node connections +#### Configuring SSE node connections The Sidecar's SSE component can connect to Casper nodes' SSE endpoints with versions greater or equal to `2.0.0`. @@ -294,7 +328,7 @@ sleep_between_keep_alive_checks_in_seconds = 30 * `no_message_timeout_in_seconds` - Number of seconds after which the connection will be restarted if no bytes were received. This parameter is optional, and defaults to 120. * `sleep_between_keep_alive_checks_in_seconds` - Optional parameter specifying the time intervals (in seconds) for checking if the connection is still alive. Defaults to 60. -#### SSE legacy emulations +#### Configuring SSE legacy emulations Applications using version 1 of a Casper node's event stream server can still function using an emulated V1 SSE API for a limited time. Enabling the V1 SSE API emulation requires the `emulate_legacy_sse_apis` setting to be `["V1"]`: @@ -313,7 +347,7 @@ This setting will expose three legacy SSE endpoints with the following events st Those endpoints will emit events in the same format as the V1 SSE API of the Casper node. There are limitations to what the Casper Sidecar can and will do. Here is a list of assumptions: --> -#### Event stream configuration +#### Configuring the event stream To configure the Sidecar's event stream server, specify the following settings: @@ -328,7 +362,7 @@ event_stream_buffer_length = 5000 * `event_stream_server.max_concurrent_subscribers` - The maximum number of subscribers that can monitor the Sidecar's event stream. * `event_stream_server.event_stream_buffer_length` - The number of events that the stream will hold in its buffer for reference when a subscriber reconnects. -### REST server configuration +### REST server setup The following section determines outbound connection criteria for the Sidecar's REST server. @@ -347,7 +381,7 @@ request_timeout_in_seconds = 10 * `max_requests_per_second` - The maximum total number of requests that can be made per second. * `request_timeout_in_seconds` - The total time before a request times out. -### Storage +### Dtorage setup This directory stores the SSE cache and an SQLite database if the Sidecar was configured to use SQLite. @@ -356,7 +390,7 @@ This directory stores the SSE cache and an SQLite database if the Sidecar was co storage_path = "./target/storage" ``` -### Database connectivity +### Database connectivity setup The Sidecar can connect to different types of databases. The current options are `SQLite` or `PostgreSQL`. The following sections show how to configure the database connection. Note that the Sidecar can only connect to one database at a time. @@ -426,7 +460,7 @@ database_username = "postgres" max_connections_in_pool = 30 ``` -### Admin server configuration +### Admin server setup This optional section configures the Sidecar's administrative server. If this section is not specified, the Sidecar will not start an admin server. From 2b8726d42d4d5243cdac6027e0e6802e93de32ae Mon Sep 17 00:00:00 2001 From: ipopescu Date: Thu, 9 May 2024 14:41:35 +0200 Subject: [PATCH 04/45] Improved flow in main README --- README.md | 52 ++++++++++++++++++++++++++-------------------------- 1 file changed, 26 insertions(+), 26 deletions(-) diff --git a/README.md b/README.md index b89e8e75..24be1194 100644 --- a/README.md +++ b/README.md @@ -6,9 +6,7 @@ - [The REST API server](#the-rest-api-server) - [The Admin API server](#the-admin-api-server) - [The RPC API server](#the-rpc-api-server) -- [Running and Testing the Sidecar](#running-and-testing-the-sidecar) - - [Prerequisites](#prerequisites) - - [Configuration](#configuration) +- [Configuring the Sidecar](#configuring-the-sidecar) - [RPC server setup](#rpc-server-setup) - [SSE server setup](#sse-server-setup) - [Configuring SSE node connections](#configuring-sse-node-connections) @@ -20,11 +18,12 @@ - [SQLite database](#sqlite-database) - [PostgreSQL database](#postgresql-database) - [Admin server setup](#admin-server-setup) +- [Running and Testing the Sidecar](#running-and-testing-the-sidecar) + - [Prerequisites](#prerequisites) + - [Running the Sidecar](#running-the-sidecar) + - [Testing the Sidecar](#testing-the-sidecar) - [Swagger Documentation](#swagger-documentation) - [OpenAPI Specification](#openapi-specification) -- [Running the Sidecar](#running-the-sidecar) -- [Testing the Sidecar](#testing-the-sidecar) - - [Testing the Sidecar using NCTL](#testing-the-sidecar-using-nctl) - [Troubleshooting Tips](#troubleshooting-tips) - [Checking liveness](#checking-liveness) - [Checking the node connection](#checking-the-node-connection) @@ -167,19 +166,8 @@ The Sidecar also offers an RPC JSON API server that can be enabled and configure end ``` -## Running and Testing the Sidecar - -### Prerequisites - -To compile, test, and run the Sidecar, install the following software first: - -* CMake 3.1.4 or greater -* [Rust](https://www.rust-lang.org/tools/install) -* pkg-config -* gcc -* g++ +## Configuring the Sidecar -### Configuration The Sidecar service must be configured using a `.toml` file specified at runtime. @@ -381,7 +369,7 @@ request_timeout_in_seconds = 10 * `max_requests_per_second` - The maximum total number of requests that can be made per second. * `request_timeout_in_seconds` - The total time before a request times out. -### Dtorage setup +### Storage setup This directory stores the SSE cache and an SQLite database if the Sidecar was configured to use SQLite. @@ -479,15 +467,19 @@ max_requests_per_second = 1 Access the admin server at `http://localhost:18887/metrics/`. -## Swagger Documentation +## Running and Testing the Sidecar -Once the Sidecar is running, access the Swagger documentation at `http://localhost:18888/swagger-ui/`. You need to replace `localhost` with the IP address of the machine running the Sidecar application if you are running the Sidecar remotely. The Swagger documentation will allow you to test the REST API. +### Prerequisites -## OpenAPI Specification +To compile, test, and run the Sidecar, install the following software first: -An OpenAPI schema is available at `http://localhost:18888/api-doc.json/`. You need to replace `localhost` with the IP address of the machine running the Sidecar application if you are running the Sidecar remotely. +* CMake 3.1.4 or greater +* [Rust](https://www.rust-lang.org/tools/install) +* pkg-config +* gcc +* g++ -## Running the Sidecar +### Running the Sidecar After creating the configuration file, run the Sidecar using `cargo` and point to the configuration file using the `--path-to-config` option, as shown below. The command needs to run with `root` privileges. @@ -513,7 +505,7 @@ The log levels, listed in order of increasing verbosity, are: Further details about log levels can be found [here](https://docs.rs/env_logger/0.9.1/env_logger/#enabling-logging). -## Testing the Sidecar +### Testing the Sidecar You can run the unit and integration tests included in this repository with the following command: @@ -529,12 +521,20 @@ cargo test -- --include-ignored The [EXAMPLE_NCTL_CONFIG.toml](./resources/example_configs/EXAMPLE_NCTL_CONFIG.toml) file contains the configurations used for these tests. -### Testing the Sidecar using NCTL +#### Testing the Sidecar using NCTL The Sidecar application can be tested against live Casper nodes or a local [NCTL network](https://docs.casperlabs.io/dapp-dev-guide/building-dapps/setup-nctl/). The configuration shown [here](./resources/example_configs/EXAMPLE_NCTL_CONFIG.toml) will direct the Sidecar application to a locally hosted NCTL network if one is running. The Sidecar should function the same way it would while connected to a live node, displaying events as they occur in the local NCTL network. +## Swagger Documentation + +Once the Sidecar is running, access the Swagger documentation at `http://localhost:18888/swagger-ui/`. You need to replace `localhost` with the IP address of the machine running the Sidecar application if you are running the Sidecar remotely. The Swagger documentation will allow you to test the REST API. + +## OpenAPI Specification + +An OpenAPI schema is available at `http://localhost:18888/api-doc.json/`. You need to replace `localhost` with the IP address of the machine running the Sidecar application if you are running the Sidecar remotely. + ## Troubleshooting Tips This section covers helpful tips when troubleshooting the Sidecar service. Replace the URL and ports provided in the examples as appropriate. From 6e5241919222830accb7b6e0a8217ae5180cccfc Mon Sep 17 00:00:00 2001 From: ipopescu Date: Thu, 9 May 2024 14:57:00 +0200 Subject: [PATCH 05/45] Removed duplication in the ETC_README for node operators --- resources/ETC_README.md | 230 ++++------------------------------------ 1 file changed, 22 insertions(+), 208 deletions(-) diff --git a/resources/ETC_README.md b/resources/ETC_README.md index 216a53f0..2e5d1020 100644 --- a/resources/ETC_README.md +++ b/resources/ETC_README.md @@ -1,226 +1,32 @@ -# Casper Event Sidecar README for Node Operators +# Casper Sidecar README for Node Operators -## Summary of Purpose +This page contains specific instructions for node operators. Before proceeding, familiarize yourself with the main [README](../README.md) file, which covers the following: + - [Summary of purpose](../README.md#summary-of-purpose) + - [System components and architecture](../README.md#system-components-and-architecture) + - [Configuration options](../README.md#configuring-the-sidecar) + - [Running and testing the Sidecar](../README.md#running-and-testing-the-sidecar) + - [Troubleshooting tips](../README.md#troubleshooting-tips) -The Casper Event Sidecar is an application that runs in tandem with the node process. This reduces the load on the node process by allowing subscribers to monitor the event stream through the Sidecar, while the node focuses entirely on the blockchain. Users needing access to the JSON-RPC will still need to query the node directly. - -While the primary use case for the Sidecar application is running alongside the node on the same machine, it can be run remotely if necessary. - -### System Components & Architecture - -Casper Nodes offer a Node Event Stream API returning Server-Sent Events (SSEs) that hold JSON-encoded data. The SSE Sidecar uses this API to achieve the following goals: - -* Build a sidecar middleware service that reads the Event Stream of all connected nodes, acting as a passthrough and replicating the SSE interface of the connected nodes and their filters (i.e., `/main`, `/deploys`, and `/sigs` with support for the use of the `?start_from=` query to allow clients to get previously sent events from the Sidecar's buffer). - -* Provide a new RESTful endpoint that is discoverable to node operators. - -The SSE Sidecar uses one ring buffer for outbound events, providing some robustness against unintended subscriber disconnects. If a disconnected subscriber re-subscribes before the buffer moves past their last received event, there will be no gap in the event history if they use the `start_from` URL query. - - -## Configuration +## Sidecar Configuration on the Node The file `/etc/casper-sidecar/config.toml` holds a default configuration. This should work if installed on a Casper node. If you install the Sidecar on an external server, you must update the `ip-address` values under `node_connections` appropriately. -### Node Connections - -The Sidecar can connect to Casper nodes with versions greater or equal to `2.0.0`. - -The `node_connections` option configures the node (or multiple nodes) to which the Sidecar will connect and the parameters under which it will operate with that node. - -``` -[[sse_server.connections]] -ip_address = "127.0.0.1" -sse_port = 9999 -rest_port = 8888 -max_attempts = 10 -delay_between_retries_in_seconds = 5 -allow_partial_connection = false -enable_logging = true -connection_timeout_in_seconds = 3 -no_message_timeout_in_seconds = 60 -sleep_between_keep_alive_checks_in_seconds = 30 -``` +For more information, including how to setup the SSE, RPC, REST, and Admin servers, read the [configuration options](../README.md#configuring-the-sidecar) in the main README. -* `ip_address` - The IP address of the node to monitor. -* `sse_port` - The node's event stream (SSE) port. This [example configuration](../resources/example_configs/EXAMPLE_NODE_CONFIG.toml) uses port `9999`. -* `rest_port` - The node's REST endpoint for status and metrics. This [example configuration](../resources/example_configs/EXAMPLE_NODE_CONFIG.toml) uses port `8888`. -* `max_attempts` - The maximum number of attempts the Sidecar will make to connect to the node. If set to `0`, the Sidecar will not attempt to connect. -* `delay_between_retries_in_seconds` - The delay between attempts to connect to the node. -* `allow_partial_connection` - Determining whether the sidecar will allow a partial connection to this node. -* `enable_logging` - This enables logging of events from the node in question. -* `connection_timeout_in_seconds` - Number of seconds before the connection request times out. Parameter is optional, defaults to 5 -* `no_message_timeout_in_seconds` - Number of seconds after which the connection will be restarted if no bytes were received. Parameter is optional, defaults to 120 -* `sleep_between_keep_alive_checks_in_seconds` - Optional parameter specifying the time intervals (in seconds) for checking if the connection is still alive. Defaults to 60 +## Storage on the Node -Connecting to multiple nodes requires multiple `[[sse_server.connections]]` sections: - -``` -[[sse_server.connections]] -ip_address = "127.0.0.1" -sse_port = 9999 -rest_port = 8888 -max_attempts = 10 -delay_between_retries_in_seconds = 5 -allow_partial_connection = false -enable_logging = true - -[[sse_server.connections]] -ip_address = "18.154.79.193" -sse_port = 1234 -rest_port = 3456 -max_attempts = 10 -delay_between_retries_in_seconds = 5 -allow_partial_connection = false -enable_logging = true -``` - -### Storage - -This directory stores the SSE cache and an SQLite database if the Sidecar is configured to use SQLite. +This directory stores the SSE cache and a database if the Sidecar was configured to use one. ``` [storage] storage_path = "/var/lib/casper-sidecar" ``` -### Database Connectivity - - - -The Sidecar can connect to different types of databases. The current options are `SQLite` or `PostgreSQL`. The following sections show how to configure the database connection for one of these DBs. Note that the Sidecar can only connect to one DB at a time. - -#### SQLite Database - -This section includes configurations for the SQLite database. - -``` -[storage.sqlite_config] -file_name = "sqlite_database.db3" -max_connections_in_pool = 100 -# https://www.sqlite.org/compile.html#default_wal_autocheckpoint -wal_autocheckpointing_interval = 1000 -``` - -* `file_name` - The database file path. -* `max_connections_in_pool` - The maximum number of connections to the database. (Should generally be left as is.) -* `wal_autocheckpointing_interval` - This controls how often the system commits pages to the database. The value determines the maximum number of pages before forcing a commit. More information can be found [here](https://www.sqlite.org/compile.html#default_wal_autocheckpoint). - -#### PostgreSQL Database - -The properties listed below are elements of the PostgreSQL database connection that can be configured for the Sidecar. - -* `database_name` - Name of the database. -* `host` - URL to PostgreSQL instance. -* `database_username` - Username. -* `database_password` - Database password. -* `max_connections_in_pool` - The maximum number of connections to the database. -* `port` - The port for the database connection. - - -To run the Sidecar with PostgreSQL, you can set the following database environment variables to control how the Sidecar connects to the database. This is the suggested method to set the connection information for the PostgreSQL database. - -``` -SIDECAR_POSTGRES_USERNAME="your username" -``` - -``` -SIDECAR_POSTGRES_PASSWORD="your password" -``` - -``` -SIDECAR_POSTGRES_DATABASE_NAME="your database name" -``` - -``` -SIDECAR_POSTGRES_HOST="your host" -``` - -``` -SIDECAR_POSTGRES_MAX_CONNECTIONS="max connections" -``` - -``` -SIDECAR_POSTGRES_PORT="port" -``` - -However, DB connectivity can also be configured using the Sidecar configuration file. - -If the DB environment variables and the Sidecar's configuration file have the same variable set, the DB environment variables will take precedence. - -It is possible to completely omit the PostgreSQL configuration from the Sidecar's configuration file. In this case, the Sidecar will attempt to connect to the PostgreSQL using the database environment variables or use some default values for non-critical variables. - -``` -[storage.postgresql_config] -database_name = "event_sidecar" -host = "localhost" -database_password = "p@$$w0rd" -database_username = "postgres" -max_connections_in_pool = 30 -``` - -### REST & Event Stream Criteria - -This information determines outbound connection criteria for the Sidecar's `rest_server`. - - - -``` -[rest_api_server] -port = 18888 -max_concurrent_requests = 50 -max_requests_per_second = 50 -request_timeout_in_seconds = 10 -``` - -* `port` - The port for accessing the Sidecar's `rest_server`. `18888` is the default, but operators are free to choose their own port as needed. -* `max_concurrent_requests` - The maximum total number of simultaneous requests that can be made to the REST server. -* `max_requests_per_second` - The maximum total number of requests that can be made per second. -* `request_timeout_in_seconds` - The total time before a request times out. - -``` -[event_stream_server] -port = 19999 -max_concurrent_subscribers = 100 -event_stream_buffer_length = 5000 -``` - -The `event_stream_server` section specifies a port for the Sidecar's event stream. +The DB setup is described [here](../README#database-connectivity-setup). -Additionally, there are the following two options: - -* `max_concurrent_subscribers` - The maximum number of subscribers that can monitor the Sidecar's event stream. -* `event_stream_buffer_length` - The number of events that the stream will hold in its buffer for reference when a subscriber reconnects. - -### Admin Server - - - -This optional section configures the Sidecar's administrative REST server. If this section is not specified, the Sidecar will not start an admin server. - -``` -[admin_api_server] -port = 18887 -max_concurrent_requests = 1 -max_requests_per_second = 1 -``` - -* `port` - The port for accessing the Sidecar's admin REST server. -* `max_concurrent_requests` - The maximum total number of simultaneous requests that can be sent to the admin server. -* `max_requests_per_second` - The maximum total number of requests that can be sent per second to the admin server. - -Access the admin server at `http://localhost:18887/metrics/`. - -## Swagger Documentation - -Once the Sidecar is running, access the Swagger documentation at `http://localhost:18888/swagger-ui/`. - -## OpenAPI Specification - -An OpenAPI schema is available at `http://localhost:18888/api-doc.json/`. - -## Running the Event Sidecar +## Running the Sidecar on a Node The `casper-sidecar` service starts after installation, using the systemd service file. @@ -234,4 +40,12 @@ The `casper-sidecar` service starts after installation, using the systemd servic ### Logs -`journalctl --no-pager -u casper-sidecar` \ No newline at end of file +`journalctl --no-pager -u casper-sidecar` + +## Swagger Documentation + +If the Sidecar is running locally, access the Swagger documentation at `http://localhost:18888/swagger-ui/`. + +## OpenAPI Specification + +An OpenAPI schema is available at `http://localhost:18888/api-doc.json/`. \ No newline at end of file From 587d6019b84b724037597b40af89a132637f3b0b Mon Sep 17 00:00:00 2001 From: ipopescu Date: Thu, 9 May 2024 14:59:20 +0200 Subject: [PATCH 06/45] Updated example config files --- .../example_configs/EXAMPLE_NCTL_CONFIG.toml | 37 ++++++++++++ .../EXAMPLE_NCTL_POSTGRES_CONFIG.toml | 42 +++++++++++++ .../example_configs/EXAMPLE_NODE_CONFIG.toml | 59 +++++++++++++++---- 3 files changed, 127 insertions(+), 11 deletions(-) diff --git a/resources/example_configs/EXAMPLE_NCTL_CONFIG.toml b/resources/example_configs/EXAMPLE_NCTL_CONFIG.toml index 78f31211..e2d2bc9c 100644 --- a/resources/example_configs/EXAMPLE_NCTL_CONFIG.toml +++ b/resources/example_configs/EXAMPLE_NCTL_CONFIG.toml @@ -1,3 +1,34 @@ +[rpc_server.main_server] +enable_server = true +address = "0.0.0.0:11102" +qps_limit = 100 +max_body_bytes = 2621440 +cors_origin = "" + +[rpc_server.speculative_exec_server] +enable_server = true +address = "0.0.0.0:25102" +qps_limit = 1 +max_body_bytes = 2621440 +cors_origin = "" + +[rpc_server.node_client] +address = "0.0.0.0:28102" +max_message_size_bytes = 4194304 +request_limit = 3 +request_buffer_size = 16 +message_timeout_secs = 30 +client_access_timeout_secs = 2 + +[rpc_server.node_client.exponential_backoff] +initial_delay_ms = 1000 +max_delay_ms = 32000 +coefficient = 2 +max_attempts = 30 + +[sse_server] +enable_server = true + [[sse_server.connections]] ip_address = "127.0.0.1" sse_port = 18101 @@ -6,6 +37,8 @@ max_attempts = 10 delay_between_retries_in_seconds = 5 allow_partial_connection = false enable_logging = true +no_message_timeout_in_seconds = 10 +sleep_between_keep_alive_checks_in_seconds = 5 [[sse_server.connections]] ip_address = "127.0.0.1" @@ -15,6 +48,8 @@ max_attempts = 10 delay_between_retries_in_seconds = 5 allow_partial_connection = false enable_logging = false +no_message_timeout_in_seconds = 10 +sleep_between_keep_alive_checks_in_seconds = 5 [[sse_server.connections]] ip_address = "127.0.0.1" @@ -25,6 +60,8 @@ delay_between_retries_in_seconds = 5 allow_partial_connection = false enable_logging = false connection_timeout_in_seconds = 3 +no_message_timeout_in_seconds = 10 +sleep_between_keep_alive_checks_in_seconds = 5 [sse_server.event_stream_server] port = 19999 diff --git a/resources/example_configs/EXAMPLE_NCTL_POSTGRES_CONFIG.toml b/resources/example_configs/EXAMPLE_NCTL_POSTGRES_CONFIG.toml index 43a30918..57ff8908 100644 --- a/resources/example_configs/EXAMPLE_NCTL_POSTGRES_CONFIG.toml +++ b/resources/example_configs/EXAMPLE_NCTL_POSTGRES_CONFIG.toml @@ -1,3 +1,34 @@ +[rpc_server.main_server] +enable_server = true +address = "0.0.0.0:11102" +qps_limit = 100 +max_body_bytes = 2621440 +cors_origin = "" + +[rpc_server.speculative_exec_server] +enable_server = true +address = "0.0.0.0:25102" +qps_limit = 1 +max_body_bytes = 2621440 +cors_origin = "" + +[rpc_server.node_client] +address = "0.0.0.0:28102" +max_message_size_bytes = 4194304 +request_limit = 3 +request_buffer_size = 16 +message_timeout_secs = 30 +client_access_timeout_secs = 2 + +[rpc_server.node_client.exponential_backoff] +initial_delay_ms = 1000 +max_delay_ms = 32000 +coefficient = 2 +max_attempts = 30 + +[sse_server] +enable_server = true + [[sse_server.connections]] ip_address = "127.0.0.1" sse_port = 18101 @@ -6,6 +37,8 @@ max_attempts = 10 delay_between_retries_in_seconds = 5 allow_partial_connection = false enable_logging = true +no_message_timeout_in_seconds = 10 +sleep_between_keep_alive_checks_in_seconds = 5 [[sse_server.connections]] ip_address = "127.0.0.1" @@ -15,6 +48,8 @@ max_attempts = 10 delay_between_retries_in_seconds = 5 allow_partial_connection = false enable_logging = false +no_message_timeout_in_seconds = 10 +sleep_between_keep_alive_checks_in_seconds = 5 [[sse_server.connections]] ip_address = "127.0.0.1" @@ -25,6 +60,8 @@ delay_between_retries_in_seconds = 5 allow_partial_connection = false enable_logging = false connection_timeout_in_seconds = 3 +no_message_timeout_in_seconds = 10 +sleep_between_keep_alive_checks_in_seconds = 5 [sse_server.event_stream_server] port = 19999 @@ -45,3 +82,8 @@ max_connections_in_pool = 30 port = 18888 max_concurrent_requests = 50 max_requests_per_second = 50 + +[admin_api_server] +port = 18887 +max_concurrent_requests = 1 +max_requests_per_second = 1 \ No newline at end of file diff --git a/resources/example_configs/EXAMPLE_NODE_CONFIG.toml b/resources/example_configs/EXAMPLE_NODE_CONFIG.toml index f34bc350..e8a14648 100644 --- a/resources/example_configs/EXAMPLE_NODE_CONFIG.toml +++ b/resources/example_configs/EXAMPLE_NODE_CONFIG.toml @@ -1,29 +1,66 @@ +[rpc_server.main_server] +enable_server = true +address = "0.0.0.0:7777" +qps_limit = 100 +max_body_bytes = 2621440 +cors_origin = "" + +[rpc_server.speculative_exec_server] +enable_server = true +address = "0.0.0.0:7778" +qps_limit = 1 +max_body_bytes = 2621440 +cors_origin = "" + +[rpc_server.node_client] +address = "3.20.57.210:7777" +max_message_size_bytes = 4194304 +request_limit = 10 +request_buffer_size = 50 +message_timeout_secs = 60 +client_access_timeout_secs = 60 + +[rpc_server.node_client.exponential_backoff] +initial_delay_ms = 1000 +max_delay_ms = 32000 +coefficient = 2 +max_attempts = 30 + +[sse_server] +enable_server = true + [[sse_server.connections]] -ip_address = "127.0.0.1" +ip_address = "168.254.51.1" sse_port = 9999 rest_port = 8888 -max_attempts = 10 -delay_between_retries_in_seconds = 5 -allow_partial_connection = false -enable_logging = true +max_attempts = 100 +delay_between_retries_in_seconds = 10 +allow_partial_connection = true +enable_logging = false +no_message_timeout_in_seconds = 20 +sleep_between_keep_alive_checks_in_seconds = 10 [[sse_server.connections]] ip_address = "168.254.51.2" sse_port = 9999 rest_port = 8888 -max_attempts = 10 -delay_between_retries_in_seconds = 5 +max_attempts = 100 +delay_between_retries_in_seconds = 10 allow_partial_connection = false -enable_logging = true +enable_logging = false +no_message_timeout_in_seconds = 20 +sleep_between_keep_alive_checks_in_seconds = 10 [[sse_server.connections]] ip_address = "168.254.51.3" sse_port = 9999 rest_port = 8888 -max_attempts = 10 -delay_between_retries_in_seconds = 5 +max_attempts = 100 +delay_between_retries_in_seconds = 10 allow_partial_connection = false -enable_logging = true +enable_logging = false +no_message_timeout_in_seconds = 20 +sleep_between_keep_alive_checks_in_seconds = 10 [sse_server.event_stream_server] port = 19999 From 9e66a4cb9be5d9f687747fa57adac815695a4ff4 Mon Sep 17 00:00:00 2001 From: ipopescu Date: Thu, 9 May 2024 15:00:41 +0200 Subject: [PATCH 07/45] Remove "event" sidecar wording from files, since it does more now --- event_sidecar/src/utils.rs | 4 ++-- rpc_sidecar/README.md | 2 +- sidecar/Cargo.toml | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/event_sidecar/src/utils.rs b/event_sidecar/src/utils.rs index 9b4d3034..92866650 100644 --- a/event_sidecar/src/utils.rs +++ b/event_sidecar/src/utils.rs @@ -263,14 +263,14 @@ pub mod tests { config: TestingConfig, ) -> tokio::task::JoinHandle> { tokio::spawn(async move { unpack_test_config_and_run(config, true).await }) - // starting event sidecar + // starting the sidecar } pub async fn start_sidecar( config: TestingConfig, ) -> tokio::task::JoinHandle> { tokio::spawn(async move { unpack_test_config_and_run(config, false).await }) - // starting event sidecar + // starting the sidecar } pub fn build_test_config() -> (TestingConfig, TempDir, u16, u16, u16) { diff --git a/rpc_sidecar/README.md b/rpc_sidecar/README.md index bc7ffcdd..71b1e83a 100644 --- a/rpc_sidecar/README.md +++ b/rpc_sidecar/README.md @@ -8,7 +8,7 @@ ## Synopsis -The Casper Event Sidecar is a process that connects to the RPC port of a Casper node and exposes a JSON-RPC interface for interacting with that node. The RPC protocol allows for basic operations like querying global state, sending transactions and deploys, etc. All of the RPC methods are documented [here](https://docs.casper.network/developers/json-rpc/). +The Casper Sidecar is a process that connects to the RPC port of a Casper node and exposes a JSON-RPC interface for interacting with that node. The RPC protocol allows for basic operations like querying global state, sending transactions and deploys, etc. All of the RPC methods are documented [here](https://docs.casper.network/developers/json-rpc/). ## Protocol The sidecar maintains a TCP connection with the node and communicates using a custom binary protocol built on top of [Juliet](https://github.com/casper-network/juliet). The protocol uses a request-response model where the sidecar sends simple self-contained requests and the node responds to them. The requests can be split into these main categories: diff --git a/sidecar/Cargo.toml b/sidecar/Cargo.toml index b9cad1fa..fff3d9b1 100644 --- a/sidecar/Cargo.toml +++ b/sidecar/Cargo.toml @@ -45,7 +45,7 @@ assets = [ ] maintainer-scripts = "../resources/maintainer_scripts/debian" extended-description = """ -Package for Casper Event Sidecar +Package for Casper Sidecar """ [package.metadata.deb.systemd-units] From dcfdd81d840409b36640f1e4b1795bb2e086630b Mon Sep 17 00:00:00 2001 From: ipopescu Date: Thu, 9 May 2024 17:06:10 +0200 Subject: [PATCH 08/45] Cleaned up rpc_sidecar/README.md --- rpc_sidecar/README.md | 21 ++++++++++----------- 1 file changed, 10 insertions(+), 11 deletions(-) diff --git a/rpc_sidecar/README.md b/rpc_sidecar/README.md index 71b1e83a..324e4d1a 100644 --- a/rpc_sidecar/README.md +++ b/rpc_sidecar/README.md @@ -8,22 +8,21 @@ ## Synopsis -The Casper Sidecar is a process that connects to the RPC port of a Casper node and exposes a JSON-RPC interface for interacting with that node. The RPC protocol allows for basic operations like querying global state, sending transactions and deploys, etc. All of the RPC methods are documented [here](https://docs.casper.network/developers/json-rpc/). +The Casper Sidecar provides connectivity to the binary port of a Casper node (among [other capabilities](../README.md#system-components-and-architecture)), exposing a JSON-RPC interface for interacting with that node. The RPC protocol allows for basic operations like querying global state, sending transactions and deploys, etc. All of the available RPC methods are documented [here](https://docs.casper.network/developers/json-rpc/). ## Protocol -The sidecar maintains a TCP connection with the node and communicates using a custom binary protocol built on top of [Juliet](https://github.com/casper-network/juliet). The protocol uses a request-response model where the sidecar sends simple self-contained requests and the node responds to them. The requests can be split into these main categories: -- read requests - - queries for transient in-memory information like the - current block height, peer list, component status etc. - - queries for database items, with both the database and the key - always being explicitly specified by the sidecar -- execute transaction requests - - request to submit a transaction for execution - - request to speculatively execute a transaction + +The Sidecar maintains a TCP connection with the node and communicates using a custom binary protocol, which uses a request-response model. The Sidecar sends simple self-contained requests and the node responds to them. The requests can be split into these main categories: +- Read requests + - Queries for transient in-memory information like the current block height, peer list, component status etc. + - Queries for database items, with both the database and the key always being explicitly specified by the sidecar +- Transaction requests + - Requests to submit transactions for execution + - Requests to speculatively execute a transactions ## Discovering the JSON RPC API -Once running, the Sidecar can be queried for its JSON RPC API using the `rpc.discover` method, as shown below. The result will be a list of RPC methods and their parameters. +Once setup and running as described [here](../README.md), the Sidecar can be queried for its JSON RPC API using the `rpc.discover` method, as shown below. The result will be a list of RPC methods and their parameters. ```bash curl -X POST http://localhost:/rpc -H 'Content-Type: application/json' -d '{"jsonrpc": "2.0", "method": "rpc.discover", "id": 1}' From 0e28aca4b3bceb10587585c55d89b3ab7d35cee4 Mon Sep 17 00:00:00 2001 From: ipopescu Date: Thu, 9 May 2024 17:07:26 +0200 Subject: [PATCH 09/45] Add logo and license to the main README --- README.md | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/README.md b/README.md index 24be1194..5d30f94f 100644 --- a/README.md +++ b/README.md @@ -1,3 +1,7 @@ +[![LOGO](https://raw.githubusercontent.com/casper-network/casper-node/master/images/casper-association-logo-primary.svg)](https://casper.network/) + +[![License](https://img.shields.io/badge/license-Apache-blue)](https://github.com/CasperLabs/casper-node/blob/master/LICENSE) + # The Casper Sidecar - [Summary of Purpose](#summary-of-purpose) @@ -618,3 +622,7 @@ The easiest way to inspect the Sidecar’s REST API is with [Swagger](#swagger-d The Sidecar can be configured to limit concurrent requests (`max_concurrent_requests`) and requests per second (`max_requests_per_second`) for the REST and admin servers. However, remember that those are application-level guards, meaning that the operating system already accepted the connection, which used up the operating system's resources. Limiting potential DDoS attacks requires consideration before the requests are directed to the Sidecar application. + +## License + +Licensed under the [Apache License Version 2.0](https://github.com/casper-network/casper-node/blob/master/LICENSE). From b5016f0bb0536fdd626f48c7a4e6f5b11732f4a3 Mon Sep 17 00:00:00 2001 From: ipopescu Date: Thu, 9 May 2024 17:08:47 +0200 Subject: [PATCH 10/45] Minor spelling update --- USAGE.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/USAGE.md b/USAGE.md index 240e99cc..e56ed605 100644 --- a/USAGE.md +++ b/USAGE.md @@ -131,7 +131,7 @@ Note that the Sidecar can emit another type of shutdown event on the `events/sid If the Sidecar attempts to connect to a node that does not come back online within the maximum number of reconnection attempts, the Sidecar will start a controlled shutdown process. It will emit a Sidecar-specific Shutdown event on the [events/sidecar](#the-sidecar-shutdown-event) endpoint, designated for events originating solely from the Sidecar service. The other event streams do not get this message because they only emit messages from the node. -The message structure of the Sidecar shutdown event is the same as the [node shutdown event](#the-node-shutdown-event). The sidecar event stream would look like this: +The message structure of the Sidecar shutdown event is the same as the [node shutdown event](#the-node-shutdown-event). The Sidecar event stream would look like this: ``` curl -sN http://127.0.0.1:19999/events/sidecar From a2625153f6043e0623c43df06183a12726d7c5d1 Mon Sep 17 00:00:00 2001 From: ipopescu Date: Fri, 10 May 2024 16:56:41 +0200 Subject: [PATCH 11/45] Add back details after checking with the devs. --- README.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 5d30f94f..088b13c0 100644 --- a/README.md +++ b/README.md @@ -107,13 +107,14 @@ The SSE Listener processes events in this order: 2. Store the event. 3. Publish the event to the SSE API. -Casper nodes stream server-sent events with JSON-encoded data to the Sidecar. The Sidecar reads the event stream of all connected nodes, acting as a passthrough and replicating the SSE interface of the connected nodes. Enabling and configuring the SSE Server of the Sidecar is optional. +Casper nodes offer an event stream API that returns server-sent events (SSEs) with JSON-encoded data. The Sidecar reads the event stream of all connected nodes, acting as a passthrough and replicating the SSE interface of the connected nodes. The Sidecar can: The Sidecar can: * Republish the current events from the node to clients listening to Sidecar's SSE API. -* Publish a configurable number of previous events to clients connecting to the Sidecar's SSE API with `?start_from=` query. +* Publish a configurable number of previous events to clients connecting to the Sidecar's SSE API with `?start_from=` query (similar to the node's SSE API). * Store the events in external storage for clients to query them via the Sidecar's REST API. +Enabling and configuring the SSE Server of the Sidecar is optional. ### The REST API server @@ -263,7 +264,6 @@ emulate_legacy_sse_apis = ["V1"] #### Configuring SSE node connections - The Sidecar's SSE component can connect to Casper nodes' SSE endpoints with versions greater or equal to `2.0.0`. The `node_connections` option configures the node (or multiple nodes) to which the Sidecar will connect and the parameters under which it will operate with that node. Connecting to multiple nodes requires multiple `[[sse_server.connections]]` sections. From 7ce6bc477466f6a38f5256cf5c77eb88b866447e Mon Sep 17 00:00:00 2001 From: ipopescu Date: Sat, 18 May 2024 01:42:19 +0200 Subject: [PATCH 12/45] Editing pass with open questions --- LEGACY_SSE_EMULATION.md | 301 +++++++++++++++++++++++++--------------- 1 file changed, 188 insertions(+), 113 deletions(-) diff --git a/LEGACY_SSE_EMULATION.md b/LEGACY_SSE_EMULATION.md index 74a3b4bd..c88db0d9 100644 --- a/LEGACY_SSE_EMULATION.md +++ b/LEGACY_SSE_EMULATION.md @@ -1,20 +1,48 @@ -# Rationale +# The Legacy SSE Emulation -The casper node 2.x produces a different set of SSE events than the 1.x ones. Also, 1.x nodes used 3 sse endpoints (`/events/sigs`, `/events/deploys`, `/events/main`), while 2.x node exposes all SSE events under one firehose endpoint (`/events`). + + +Casper node versions 2.0 or greater (2.x) produce different SSE events than 1.x versions. Also, 1.x Casper nodes used 3 SSE endpoints (`/events/sigs`, `/events/deploys`, `/events/main`), while 2.x nodes expose all the SSE events on one endpoint (`/events`). + +Generally, the changes in 2.x regarding SSE are somewhat backward-incompatible. To collect all the data, clients should adopt the new SSE API. However, if some clients are not ready or do not need to adopt the new SSE API, they can use the legacy SSE emulation. + +SSE emulation is off by default. To enable it, follow the steps below and read the main [README.md](./README.md#sse-server-configuration) file describing how to configure the SSE server. + +> **Note**: This document refers to legacy events as V1 events, and to events streamed by nodes with version 2.x as V2 events. -**BEFORE YOU ENABLE LEGACY SSE EMULATION** please consider the following: +**LIMITATIONS:** -- The legacy SSE emulation is a temporary solution and can be removed in a future major release. -- The legacy SSE emulation is not a 1:1 mapping of the 2.x events to 1.x events. Some events will be omitted, some will be transformed, some will be passed as is. More details on the limitations of the emulation are explained below. -- The legacy SSE emulation is an additional drain on resources. It will consume more resources than the "native" 2.x SSE API. +Before enabling the legacy SSE emulation, consider its limitations: -# Premises of legacy SSE emulation +- The legacy SSE emulation is a temporary solution and may be removed in a future major release of the node software. +The legacy SSE emulation does not map 2.x events to 1.x events in a 1-to-1 fashion. Some events are omitted, some are transformed, and some are passed through. Below are more details on the emulation's limitations. +- The legacy SSE emulation places an extra burden on resources. It will consume more resources than the native 2.x SSE API. -Currently the only possible emulation is the V1 SSE API. Enabling V1 SSE api emulation requires setting `emulate_legacy_sse_apis` to `["V1"]`, like: +## Configuration + +Currently, the only possible emulation is the V1 SSE API. To enable the emulation, set the `emulate_legacy_sse_apis` setting to `["V1"]`: ``` [sse_server] @@ -23,21 +51,32 @@ emulate_legacy_sse_apis = ["V1"] (...) ``` -This will expose three additional sse endpoints: +This setting will expose three legacy SSE endpoints with the following events streamed on each endpoint: + +- `/events/main` - `ApiVersion`, `BlockAdded`, `DeployProcessed`, `DeployExpired`, `Fault` and `Shutdown` +- `/events/deploys`- `ApiVersion`, `DeployAccepted` and `Shutdown` +- `/events/sigs` - `ApiVersion`, `FinalitySignature` and `Shutdown` + +Those endpoints will emit events in the same format as the V1 SSE API of the Casper node. + +## Event Mapping -- `/events/sigs` -> publishes `ApiVersion`, `BlockAdded`, `DeployProcessed`, `DeployExpired`, `Fault` and `Shutdown` -- `/events/deploys`-> publishes `ApiVersion`, `TransactionAccepted` and `Shutdown` -- `/events/main` -> publishes `ApiVersion`, `FinalitySignature` and `Shutdown` events +There are limitations to what the Casper Sidecar can and will do. Below, you will find a list of mapping assumptions between 2.x events and 1.x events. -Those endpoints will emit events in the same format as the V1 SSE API of the casper node. There are limitations to what Casper Sidecar can and will do, here is a list of mapping assumptions: +### The `ApiVersion` event -## Translating `ApiVersion` event +The legacy SSE ApiVersion event is the same as the current version. -Legacy SSE event will be the same +### The `BlockAdded` event -## Translating `BlockAdded` event + -- When the 2.x event emits a V1 block it will be unwrapped and passed as a legacy BlockAdded, for instance a 2.x event like this: +A V1 `BlockAdded` event will be unwrapped and passed as a legacy `BlockAdded` event on the 2.x `events` endpoint. For instance, the V1 `BlockAdded` event will be translated to a 1.x emulated event as shown below. + +
+V1 BlockAdded in 2.x ```json { @@ -99,7 +138,10 @@ Legacy SSE event will be the same } ``` - will be translated to 1.x emulated event: +
+ +
+Emulated 1.x BlockAdded (from V1 BlockAdded) ```json { @@ -158,26 +200,30 @@ Legacy SSE event will be the same } } ``` +


+ -- When the 2.x event emits a V2 block the following rules apply: - - - `block_hash` will be copied from V2 to V1 - - `block.block_hash` will be copied from V2 to V1 - - `block.header.era_end`: - - if the era_end is a V1 variety - it will be copied - - if the era_end is a V2 variety: - - V2 `next_era_validator_weights` will be copied from V2 `next_era_validator_weights` - - V1 `era_report` will be assembled from V2 `era_end.equivocators`, `era_end.rewards` and `era_end.inactive_validators` fields - - IF one of the `rewards` contains a reward that doesn't fit in a u64 (because V2 has U512 type in rewards values) - the whole `era_end` **WILL BE OMITTED** from the legacy V1 block (value None) - - V2 field `next_era_gas_price` has no equivalent in V1 and will be omitted - - `block.header.current_gas_price` this field only exists in V2 and will be omitted from the V1 block header - - `block.header.proposer` will be copied from V2 to V1 `block.body.proposer` - - other `block.header.*` fields will be copied from V2 to V1 - - `block.body.deploy_hashes` will be based on V2 `block.body.standard` transactions. Bear in mind, that only values of transactions of type `Deploy` will be copied to V1 `block.body.deploy_hashes` array - - `block.body.transfer_hashes` will be based on V2 `block.body.mint` transactions. Bear in mind, that only values of transactions of type `Deploy` will be copied to V1 `block.body.transfer_hashes` array. - - An example of the above rules. - Input V2 BlockAdded: +When the 2.x event stream emits a legacy `BlockAdded` event, the following mapping rules apply: + +- `block_hash` will be copied from V2 to V1. +- `block.block_hash` will be copied from V2 to V1. +- `block.header.era_end`: + - If the `era_end` is a V1 variety - it will be copied. + - If the `era_end` is a V2 variety: + - V2 `next_era_validator_weights` will be copied from V2 `next_era_validator_weights`. + - V1 `era_report` will be assembled from the V2 `era_end.equivocators`, `era_end.rewards` and `era_end.inactive_validators` fields. + - If one of the `rewards` contains a reward that doesn't fit in a u64 (because V2 has U512 type in rewards values) - the whole `era_end` **WILL BE OMITTED** from the legacy V1 block (value None). + - V2 field `next_era_gas_price` has no equivalent in V1 and will be omitted. +- `block.header.current_gas_price` this field only exists in V2 and will be omitted from the V1 block header. +- `block.header.proposer` will be copied from V2 to V1 `block.body.proposer`. +- other `block.header.*` fields will be copied from V2 to V1. +- `block.body.deploy_hashes` will be based on V2 `block.body.standard` transactions. Bear in mind, that only values of transactions of type `Deploy` will be copied to V1 `block.body.deploy_hashes` array. +- `block.body.transfer_hashes` will be based on V2 `block.body.mint` transactions. Bear in mind, that only values of transactions of type `Deploy` will be copied to V1 `block.body.transfer_hashes` array. + +Here is an example mapping demonstrating the rules above: + +
+V2 BlockAdded in 2.x ```json { @@ -274,7 +320,10 @@ Legacy SSE event will be the same } ``` - Output legacy BlockAdded: +
+ +
+Emulated 1.x BlockAdded (from V2 BlockAdded) ```json { @@ -350,9 +399,16 @@ Legacy SSE event will be the same } ``` -## Translating `TransactionAccepted` event +
+ + +### The `TransactionAccepted` event + +V1 `TransactionAccepted` events will be unwrapped and translated to legacy `DeployAccepted` events on the legacy SSE stream. + +
+V1 TransactionAccepted in 2.x -- If the event is a V1 variant - it will be unwrapped and passed, so a 2.x event: ```json { "TransactionAccepted": { @@ -411,7 +467,13 @@ Legacy SSE event will be the same } } ``` - will be translated to legacy `DeployAccepted`: + +
+ + +
+Emulated 1.x DeployAccepted (from V1 TransactionAccepted) + ```json { "DeployAccepted": { @@ -469,21 +531,27 @@ Legacy SSE event will be the same } ``` -* If the event is a V2 variant - it will be omitted so a 2.x event like: - ``` - { - "TransactionAccepted": { - "Version1": { - ... - } - } - } - ``` - will be omitted from the legacy SSE streams +


-## Translating `TransactionExpired` event -- If it's a Deploy variety it will be unpacked and sent. So a 2.x `TransactionExpired` event: + + +All V2 events will be omitted from legacy SSE event streams. For example, the following event will not be streamed. + +```json +"TransactionAccepted": { + "Version1": { + ... +``` + +### The `TransactionExpired` event + +Other transaction types will be unwrapped and sent as legacy deploy types. + +A 2.x `TransactionExpired` event will be mapped to a `DeployExpired` event. + +
+TransactionExpired mapped to DeployExpired ```json { @@ -495,8 +563,6 @@ Legacy SSE event will be the same } ``` - will be sent as a legacy `DeployExpired` event: - ```json { "DeployExpired": { @@ -505,62 +571,71 @@ Legacy SSE event will be the same } ``` -* If it's a Version1 variant it will be omitted from legacy SSE streams. So a 2.x `TransactionExpired` event: +


- ```json - { - "TransactionExpired": { - "Version1": { - "hash": "565d7147e28be402c34208a133fd59fde7ac785ae5f0298cb5fb7adfb1b054a8" - } + + +All V1 variants will be omitted from legacy SSE streams. For example, a 2.x V1 `TransactionExpired` event will not be streamed. + +```json +{ + "TransactionExpired": { + "Version1": { + "hash": "565d7147e28be402c34208a133fd59fde7ac785ae5f0298cb5fb7adfb1b054a8" } } - ``` +} +``` - will be omitted - -## Translating `TransactionProcessed` event. - -- If `transaction_hash` field is a `Version1`, the event will be ignored. -- If `transaction_hash` field is a `Deploy`, it's value will be used as `DeployProcessed.deploy_hash` - - If `initiator_addr` field is not a `PublicKey` type, the event will be omitted. - - If `initiator_addr` field is a `PublicKey` type, it's value will be used as `DeployProcessed.account` - - `timestamp`, `ttl`, `block_hash` will be filled from analogous fields in the `TransactionProcessed` event - - If `execution_result` is a `Version1` type, it's value will be copied as-is do the `DeployProcessed.execution_result` field. - - If `execution_result` is a `Version2` type please see [this paragraph](#translating-executionresultv2) - -### Translating `ExecutionResultV2`. - -- When translating `ExecutionResultV2` (later in this paragraph called `ex_v2`) to legacy `ExecutionResult` (later in this paragraph called `ex_v1`) the following rules apply: - - if `ex_v2.error_message` is not empty, the `ExecutionResult` will be of type `Failure` and `ex_v1.error_message` will be set to that value. Otherwise `ex_v1` will be of type `Success` - - `ex_v1.cost` will be set to `ex_v2.cost` - - `ex_v1.transfers` will always be an empty list since 2.x node doesn't use a notion of `TransferAddr` anymore - - `ex_v1.effect` will be populated based on `ex_v2.effects` field applying rules from paragraph [Translating Effects from V2](#translating-effects-from-v2) - -### Translating `Effects` from V2 - -- Output `operations` field will always be an empty list, since 2.x node no longer uses this concept for execution results -- For `transforms` the objects will be constructed based on `ex_v2.effects` with the following exceptions: - - V2 `AddKeys` transform will be translated to V1 `NamedKeys` transform. - - V2 `Write` transform will be translated applying rules from paragraph [Translating Write transform from V2](#translating-write-transform-from-v2). If translating at least one `Write` transform is not translatable (In the paragraph it will be denoted that it yields a `None` value) - the whole transform will be an empty array. - -### Translating `Write` transform from V2 - -- When translating `Write` transforms from V2 to V1 the following rules apply: - - For `CLValue`, it will be copied to output as `WriteCLValue` transform - - For `Account` it will be copied to output as `WriteAccount` transform, taking the v2 `account_hash` as value for `WriteAccount`. - - For `ContractWasm` a `WriteContractWasm` transform will be created. Please note that `WriteContractWasm` has no data, so details from V2 will be omitted. - - For `Contract` a `WriteContract` transform will be created. Please note that `WriteContract` has no data, so details from V2 will be omitted. - - For `Contract` a `WriteContractPackage` transform will be created. Please note that `WriteContractPackage` has no data, so details from V2 will be omitted. - - For `LegacyTransfer` a `WriteTransfer` transform will be created. Data will be copied. - - For `DeployInfo` a `WriteDeployInfo` transform will be created. Data will be copied. - - For `EraInfo` a `ErInfo` transform will be created. Data will be copied. - - For `Bid` a `WriteBid` transform will be created. Data will be copied. - - For `Withdraw` a `WriteWithdraw` transform will be created. Data will be copied. - - For `NamedKey` will be translated into a `AddKeys` transform. Data will be copied. - - For `AddressableEntity` no value will be produced (a `None` value will be yielded). - - For `BidKind` no value will be produced (a `None` value will be yielded). - - For `Package` no value will be produced (a `None` value will be yielded). - - For `ByteCode` no value will be produced (a `None` value will be yielded). - - For `MessageTopic` no value will be produced (a `None` value will be yielded). - - For `Message` no value will be produced (a `None` value will be yielded). +### The `TransactionProcessed` event + +When translating a `TransactionProcessed` event to a legacy `DeployProcessed` event, the following rules apply: + +- If the `transaction_hash` field contains `Version1`, the event will be ignored. +- If the `transaction_hash` field is a `Deploy`, its value will be used as `DeployProcessed.deploy_hash`. + - If the `initiator_addr` field is not a `PublicKey` type, the event will be omitted. + - If the `initiator_addr` field is a `PublicKey` type, its value will be used as `DeployProcessed.account`. + - `timestamp`, `ttl`, `block_hash` will be filled from analogous fields in the `TransactionProcessed` event. + - If the `execution_result` contains `Version1`, its value will be copied as-is to the `DeployProcessed.execution_result` field. + - If the `execution_result` contains `Version2`, see [this paragraph](#translating-executionresultv2). + +#### Translating `ExecutionResultV2` + +When translating the `ExecutionResultV2` (`ex_v2`) to a legacy `ExecutionResult` (`ex_v1`), the following rules apply: + +- If the `ex_v2.error_message` is not empty, the `ExecutionResult` will be of type `Failure`, and the `ex_v1.error_message` will be set to that value. Otherwise, `ex_v1` will be of type `Success`. +- The `ex_v1.cost` will be set to the `ex_v2.cost`. +- The `ex_v1.transfers` list will always be empty since the 2.x node no longer uses a' TransferAddr' notion. +- The `ex_v1.effect` will be populated based on the `ex_v2.effects` field, applying the rules from [Translating Effects from V2](#translating-effects-from-v2). + +#### Translating `Effects` from V2 + +When translating the `Effects` from V2 to V1, the following rules apply: + +- The output `operations` field will always be an empty list since the 2.x node no longer uses this concept for execution results. +- For `transforms`, the objects will be constructed based on the `ex_v2.effects` with the following exceptions: + - The V2 `AddKeys` transform will be translated to the V1 `NamedKeys` transform. + - The V2 `Write` transform will be translated by applying the rules from paragraph [Translating Write transforms from V2](#translating-write-transform-from-v2). If at least one `Write` transform is not translatable (yielding a `None` value), the transform will be an empty array. + +#### Translating `Write` transforms from V2 + +When translating `Write` transforms from V2 to V1, the following rules apply: + +- `CLValue`: will be copied to the `WriteCLValue` transform. +- `Account`: will be copied to the `WriteAccount` transform, assigning the V2 `account_hash` as the value for `WriteAccount`. +- `ContractWasm`: a `WriteContractWasm` transform will be created. Please note that the `WriteContractWasm` will not contain data, so the V2 details will be omitted. +- `Contract`: a `WriteContract` transform will be created. Please note that the `WriteContract` will not contain data, so the V2 details will be omitted. + +- `ContractPackage`: a `WriteContractPackage` transform will be created. Please note that the `WriteContractPackage` will not contain data, so the V2 details will be omitted. +- `LegacyTransfer`: a `WriteTransfer` transform will be created. Data will be copied. +- `DeployInfo`: a `WriteDeployInfo` transform will be created. Data will be copied. +- `EraInfo`: an `EraInfo` transform will be created. Data will be copied. +- `Bid`: a `WriteBid` transform will be created. Data will be copied. +- `Withdraw`: a `WriteWithdraw` transform will be created. Data will be copied. +- `NamedKey`: will be translated into an `AddKeys` transform. Data will be copied. +- `AddressableEntity`: the mapping will yield value `None`, meaning no value will be created. +- `BidKind`: the mapping will yield value `None`, meaning no value will be created. +- `Package`: the mapping will yield value `None`, meaning no value will be created. +- `ByteCode`: the mapping will yield value `None`, meaning no value will be created. +- `MessageTopic`: the mapping will yield value `None`, meaning no value will be created. +- `Message`: the mapping will yield value `None`, meaning no value will be created. From 1fb3a0f3c84105faf2ee099dcc4e329c3ffdebe3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rafa=C5=82=20Chabowski?= Date: Wed, 22 May 2024 12:05:27 +0200 Subject: [PATCH 13/45] Generate binary messages with incrementing IDs --- Cargo.lock | 2 -- Cargo.toml | 4 +++ rpc_sidecar/src/node_client.rs | 54 ++++++++++++++++++++++++++++++---- rpc_sidecar/src/testing/mod.rs | 2 +- 4 files changed, 53 insertions(+), 9 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index ac9b6527..e0d6eb04 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -468,7 +468,6 @@ dependencies = [ [[package]] name = "casper-binary-port" version = "1.0.0" -source = "git+https://github.com/casper-network/casper-node.git?branch=feat-2.0#44e67cdf1cb22e0f4dccd75199e3c337b1ddaa4e" dependencies = [ "bincode", "bytes", @@ -670,7 +669,6 @@ dependencies = [ [[package]] name = "casper-types" version = "5.0.0" -source = "git+https://github.com/casper-network/casper-node.git?branch=feat-2.0#44e67cdf1cb22e0f4dccd75199e3c337b1ddaa4e" dependencies = [ "base16", "base64 0.13.1", diff --git a/Cargo.toml b/Cargo.toml index 4a8f6c46..cf583ec0 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -31,3 +31,7 @@ toml = "0.5.8" tracing = { version = "0", default-features = false } tracing-subscriber = "0" serde = { version = "1", default-features = false } + +[patch.'https://github.com/casper-network/casper-node.git'] +casper-binary-port = { path = "../casper-node/binary_port" } +casper-types = { path = "../casper-node/types" } \ No newline at end of file diff --git a/rpc_sidecar/src/node_client.rs b/rpc_sidecar/src/node_client.rs index 27503408..745e0244 100644 --- a/rpc_sidecar/src/node_client.rs +++ b/rpc_sidecar/src/node_client.rs @@ -6,7 +6,10 @@ use metrics::rpc::{inc_disconnect, observe_reconnect_time}; use serde::de::DeserializeOwned; use std::{ convert::{TryFrom, TryInto}, - sync::Arc, + sync::{ + atomic::{AtomicU64, Ordering}, + Arc, + }, time::Duration, }; use tokio_util::codec::Framed; @@ -602,6 +605,7 @@ pub struct FramedNodeClient { shutdown: Arc>, config: NodeClientConfig, request_limit: Semaphore, + current_request_id: AtomicU64, } impl FramedNodeClient { @@ -626,11 +630,16 @@ impl FramedNodeClient { reconnect, shutdown, config, + current_request_id: AtomicU64::new(0), }, reconnect_loop, )) } + fn next_id(&self) -> u64 { + self.current_request_id.fetch_add(1, Ordering::Relaxed) + } + async fn reconnect_loop( config: NodeClientConfig, client: Arc>>, @@ -657,8 +666,7 @@ impl FramedNodeClient { req: BinaryRequest, client: &mut RwLockWriteGuard<'_, Framed>, ) -> Result { - let payload = - BinaryMessage::new(encode_request(&req).expect("should always serialize a request")); + let payload = self.generate_payload(req); if let Err(err) = tokio::time::timeout( Duration::from_secs(self.config.message_timeout_secs), @@ -692,6 +700,12 @@ impl FramedNodeClient { } } + fn generate_payload(&self, req: BinaryRequest) -> BinaryMessage { + BinaryMessage::new( + encode_request(&req, self.next_id()).expect("should always serialize a request"), + ) + } + async fn connect_with_retries( config: &NodeClientConfig, ) -> Result, AnyhowError> { @@ -790,8 +804,8 @@ fn handle_response( } } -fn encode_request(req: &BinaryRequest) -> Result, bytesrepr::Error> { - let header = BinaryRequestHeader::new(SUPPORTED_PROTOCOL_VERSION, req.tag()); +fn encode_request(req: &BinaryRequest, id: u64) -> Result, bytesrepr::Error> { + let header = BinaryRequestHeader::new(SUPPORTED_PROTOCOL_VERSION, req.tag(), id); let mut bytes = Vec::with_capacity(header.serialized_length() + req.serialized_length()); header.write_bytes(&mut bytes)?; req.write_bytes(&mut bytes)?; @@ -872,7 +886,9 @@ where #[cfg(test)] mod tests { - use crate::testing::{get_port, start_mock_binary_port_responding_with_stored_value}; + use crate::testing::{ + get_port, start_mock_binary_port, start_mock_binary_port_responding_with_stored_value, + }; use super::*; use casper_types::testing::TestRng; @@ -1058,4 +1074,30 @@ mod tests { _ = reconnect_loop => panic!("reconnect loop should not exit"), } } + + #[tokio::test] + async fn should_generate_payload_with_incrementing_id() { + let port = get_port(); + let config = NodeClientConfig::new_with_port(port); + let shutdown = Arc::new(tokio::sync::Notify::new()); + let _ = start_mock_binary_port(port, vec![], Arc::clone(&shutdown)).await; + let (c, _) = FramedNodeClient::new(config).await.unwrap(); + + let generated_ids: Vec<_> = (0..10) + .map(|i| { + println!("{i}"); + let binary_message = + c.generate_payload(BinaryRequest::Get(GetRequest::Information { + info_type_tag: 0, + key: vec![], + })); + let header = BinaryRequestHeader::from_bytes(&binary_message.payload()) + .unwrap() + .0; + header.id() + }) + .collect(); + + assert_eq!(generated_ids, (0..10).collect::>()); + } } diff --git a/rpc_sidecar/src/testing/mod.rs b/rpc_sidecar/src/testing/mod.rs index f8d9ce60..27f01e4d 100644 --- a/rpc_sidecar/src/testing/mod.rs +++ b/rpc_sidecar/src/testing/mod.rs @@ -85,7 +85,7 @@ pub async fn start_mock_binary_port_responding_with_stored_value( start_mock_binary_port(port, response.to_bytes().unwrap(), shutdown).await } -async fn start_mock_binary_port(port: u16, data: Vec, shutdown: Arc) -> JoinHandle<()> { +pub async fn start_mock_binary_port(port: u16, data: Vec, shutdown: Arc) -> JoinHandle<()> { let handler = tokio::spawn(async move { let binary_port = BinaryPortMock::new(port, data); binary_port.start(shutdown).await; From 237d17763c11697ef37638ad22753a68d564406c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rafa=C5=82=20Chabowski?= Date: Fri, 24 May 2024 15:34:30 +0200 Subject: [PATCH 14/45] Handle request id mismatch scenario --- rpc_sidecar/src/lib.rs | 12 ++- rpc_sidecar/src/node_client.rs | 150 +++++++++++++++++++++------------ rpc_sidecar/src/testing/mod.rs | 30 ++++++- sidecar/src/component.rs | 3 +- 4 files changed, 135 insertions(+), 60 deletions(-) diff --git a/rpc_sidecar/src/lib.rs b/rpc_sidecar/src/lib.rs index ed81d0d4..8059cc93 100644 --- a/rpc_sidecar/src/lib.rs +++ b/rpc_sidecar/src/lib.rs @@ -8,7 +8,9 @@ mod speculative_exec_server; pub mod testing; use anyhow::Error; -use casper_types::ProtocolVersion; +use casper_binary_port::{BinaryRequest, BinaryRequestHeader}; +use casper_types::bytesrepr::ToBytes; +use casper_types::{bytesrepr, ProtocolVersion}; pub use config::{FieldParseError, RpcServerConfig, RpcServerConfigTarget}; pub use config::{NodeClientConfig, RpcConfig}; use futures::future::BoxFuture; @@ -116,6 +118,14 @@ fn resolve_address(address: &str) -> anyhow::Result { .ok_or_else(|| anyhow::anyhow!("failed to resolve address")) } +fn encode_request(req: &BinaryRequest, id: u64) -> Result, bytesrepr::Error> { + let header = BinaryRequestHeader::new(SUPPORTED_PROTOCOL_VERSION, req.tag(), id); + let mut bytes = Vec::with_capacity(header.serialized_length() + req.serialized_length()); + header.write_bytes(&mut bytes)?; + req.write_bytes(&mut bytes)?; + Ok(bytes) +} + #[cfg(test)] mod tests { use std::fs; diff --git a/rpc_sidecar/src/node_client.rs b/rpc_sidecar/src/node_client.rs index 745e0244..1b21ea5c 100644 --- a/rpc_sidecar/src/node_client.rs +++ b/rpc_sidecar/src/node_client.rs @@ -1,4 +1,4 @@ -use crate::{NodeClientConfig, SUPPORTED_PROTOCOL_VERSION}; +use crate::{encode_request, NodeClientConfig, SUPPORTED_PROTOCOL_VERSION}; use anyhow::Error as AnyhowError; use async_trait::async_trait; use futures::{Future, SinkExt, StreamExt}; @@ -478,6 +478,10 @@ impl From for InvalidTransactionOrDeploy { pub enum Error { #[error("request error: {0}")] RequestFailed(String), + #[error("request id mismatch: expected {expected}, got {got}")] + RequestResponseIdMismatch { expected: u64, got: u64 }, + #[error("failed to deserialize the original request provided with the response: {0}")] + OriginalRequestDeserialization(String), #[error("failed to deserialize the envelope of a response: {0}")] EnvelopeDeserialization(String), #[error("failed to deserialize a response: {0}")] @@ -666,7 +670,7 @@ impl FramedNodeClient { req: BinaryRequest, client: &mut RwLockWriteGuard<'_, Framed>, ) -> Result { - let payload = self.generate_payload(req); + let (request_id, payload) = self.generate_payload(req); if let Err(err) = tokio::time::timeout( Duration::from_secs(self.config.message_timeout_secs), @@ -678,31 +682,47 @@ impl FramedNodeClient { return Err(Error::RequestFailed(err.to_string())); }; - let Ok(maybe_response) = tokio::time::timeout( - Duration::from_secs(self.config.message_timeout_secs), - client.next(), - ) - .await - else { - return Err(Error::RequestFailed("timeout".to_owned())); - }; - - if let Some(response) = maybe_response { - let resp = bytesrepr::deserialize_from_slice( - response - .map_err(|err| Error::RequestFailed(err.to_string()))? - .payload(), + loop { + let Ok(maybe_response) = tokio::time::timeout( + Duration::from_secs(self.config.message_timeout_secs), + client.next(), ) - .map_err(|err| Error::EnvelopeDeserialization(err.to_string()))?; - handle_response(resp, &self.shutdown) - } else { - Err(Error::RequestFailed("disconnected".to_owned())) + .await + else { + return Err(Error::RequestFailed("timeout".to_owned())); + }; + + if let Some(response) = maybe_response { + let resp = bytesrepr::deserialize_from_slice( + response + .map_err(|err| Error::RequestFailed(err.to_string()))? + .payload(), + ) + .map_err(|err| Error::EnvelopeDeserialization(err.to_string()))?; + match handle_response(resp, request_id, &self.shutdown) { + Ok(response) => return Ok(response), + Err(err) if matches!(err, Error::RequestResponseIdMismatch { expected, got } if expected > got) => + { + // If our expected ID is greater than the one we received, it means we can + // try to recover from the situation by reading more responses from the stream. + warn!(%err, "received a response with an outdated id, trying another response"); + continue; + } + Err(err) => return Err(err), + } + } else { + return Err(Error::RequestFailed("disconnected".to_owned())); + } } } - fn generate_payload(&self, req: BinaryRequest) -> BinaryMessage { - BinaryMessage::new( - encode_request(&req, self.next_id()).expect("should always serialize a request"), + fn generate_payload(&self, req: BinaryRequest) -> (u64, BinaryMessage) { + let next_id = self.next_id(); + ( + next_id, + BinaryMessage::new( + encode_request(&req, next_id).expect("should always serialize a request"), + ), ) } @@ -791,10 +811,21 @@ impl NodeClient for FramedNodeClient { fn handle_response( resp: BinaryResponseAndRequest, + expected_id: u64, shutdown: &Notify, ) -> Result { - let version = resp.response().protocol_version(); + let original_request = resp.original_request(); + let (original_header, _) = BinaryRequestHeader::from_bytes(original_request) + .map_err(|err| Error::EnvelopeDeserialization(err.to_string()))?; + let original_id = original_header.id(); + if original_id != expected_id { + return Err(Error::RequestResponseIdMismatch { + expected: expected_id, + got: original_id, + }); + } + let version = resp.response().protocol_version(); if version.is_compatible_with(&SUPPORTED_PROTOCOL_VERSION) { Ok(resp) } else { @@ -804,14 +835,6 @@ fn handle_response( } } -fn encode_request(req: &BinaryRequest, id: u64) -> Result, bytesrepr::Error> { - let header = BinaryRequestHeader::new(SUPPORTED_PROTOCOL_VERSION, req.tag(), id); - let mut bytes = Vec::with_capacity(header.serialized_length() + req.serialized_length()); - header.write_bytes(&mut bytes)?; - req.write_bytes(&mut bytes)?; - Ok(bytes) -} - fn parse_response(resp: &BinaryResponse) -> Result, Error> where A: FromBytes + PayloadEntity, @@ -887,7 +910,8 @@ where #[cfg(test)] mod tests { use crate::testing::{ - get_port, start_mock_binary_port, start_mock_binary_port_responding_with_stored_value, + get_dummy_request, get_dummy_request_payload, get_port, start_mock_binary_port, + start_mock_binary_port_responding_with_stored_value, }; use super::*; @@ -901,11 +925,14 @@ mod tests { let notify = Notify::::new(); let bad_version = ProtocolVersion::from_parts(10, 0, 0); + let request = get_dummy_request_payload(None); + let result = handle_response( BinaryResponseAndRequest::new( BinaryResponse::from_value(AvailableBlockRange::RANGE_0_0, bad_version), - &[], + &request, ), + 0, ¬ify, ); @@ -921,11 +948,14 @@ mod tests { ..SUPPORTED_PROTOCOL_VERSION.value() }); + let request = get_dummy_request_payload(None); + let result = handle_response( BinaryResponseAndRequest::new( BinaryResponse::from_value(AvailableBlockRange::RANGE_0_0, version), - &[], + &request, ), + 0, ¬ify, ); @@ -933,7 +963,7 @@ mod tests { result, Ok(BinaryResponseAndRequest::new( BinaryResponse::from_value(AvailableBlockRange::RANGE_0_0, version), - &[], + &request )) ); assert_eq!(notify.notified().now_or_never(), None) @@ -947,11 +977,14 @@ mod tests { ..SUPPORTED_PROTOCOL_VERSION.value() }); + let request = get_dummy_request_payload(None); + let result = handle_response( BinaryResponseAndRequest::new( BinaryResponse::from_value(AvailableBlockRange::RANGE_0_0, version), - &[], + &request, ), + 0, ¬ify, ); @@ -959,7 +992,7 @@ mod tests { result, Ok(BinaryResponseAndRequest::new( BinaryResponse::from_value(AvailableBlockRange::RANGE_0_0, version), - &[], + &request )) ); assert_eq!(notify.notified().now_or_never(), None) @@ -983,7 +1016,8 @@ mod tests { let mut rng = TestRng::new(); let shutdown = Arc::new(tokio::sync::Notify::new()); let _mock_server_handle = - start_mock_binary_port_responding_with_stored_value(port, Arc::clone(&shutdown)).await; + start_mock_binary_port_responding_with_stored_value(port, None, Arc::clone(&shutdown)) + .await; let config = NodeClientConfig::new_with_port_and_retries(port, 2); let (c, _) = FramedNodeClient::new(config).await.unwrap(); @@ -1001,9 +1035,12 @@ mod tests { let shutdown = Arc::new(tokio::sync::Notify::new()); tokio::spawn(async move { sleep(Duration::from_secs(5)).await; - let _mock_server_handle = - start_mock_binary_port_responding_with_stored_value(port, Arc::clone(&shutdown)) - .await; + let _mock_server_handle = start_mock_binary_port_responding_with_stored_value( + port, + None, + Arc::clone(&shutdown), + ) + .await; }); let config = NodeClientConfig::new_with_port_and_retries(port, 5); let (client, _) = FramedNodeClient::new(config).await.unwrap(); @@ -1037,12 +1074,17 @@ mod tests { let port = get_port(); let mut rng = TestRng::new(); let shutdown = Arc::new(tokio::sync::Notify::new()); - let mock_server_handle = - start_mock_binary_port_responding_with_stored_value(port, Arc::clone(&shutdown)).await; + let mock_server_handle = start_mock_binary_port_responding_with_stored_value( + port, + Some(0), + Arc::clone(&shutdown), + ) + .await; let config = NodeClientConfig::new_with_port(port); let (c, reconnect_loop) = FramedNodeClient::new(config).await.unwrap(); let scenario = async { + // Request id = 0 assert!(query_global_state_for_string_value(&mut rng, &c) .await .is_ok()); @@ -1050,6 +1092,7 @@ mod tests { shutdown.notify_one(); let _ = mock_server_handle.await; + // Request id = 1 let err = query_global_state_for_string_value(&mut rng, &c) .await .unwrap_err(); @@ -1058,12 +1101,16 @@ mod tests { Error::RequestFailed(e) if e == "disconnected" )); - let _mock_server_handle = - start_mock_binary_port_responding_with_stored_value(port, Arc::clone(&shutdown)) - .await; + let _mock_server_handle = start_mock_binary_port_responding_with_stored_value( + port, + Some(2), + Arc::clone(&shutdown), + ) + .await; tokio::time::sleep(Duration::from_secs(2)).await; + // Request id = 2 assert!(query_global_state_for_string_value(&mut rng, &c) .await .is_ok()); @@ -1084,13 +1131,8 @@ mod tests { let (c, _) = FramedNodeClient::new(config).await.unwrap(); let generated_ids: Vec<_> = (0..10) - .map(|i| { - println!("{i}"); - let binary_message = - c.generate_payload(BinaryRequest::Get(GetRequest::Information { - info_type_tag: 0, - key: vec![], - })); + .map(|_| { + let (_, binary_message) = c.generate_payload(get_dummy_request()); let header = BinaryRequestHeader::from_bytes(&binary_message.payload()) .unwrap() .0; diff --git a/rpc_sidecar/src/testing/mod.rs b/rpc_sidecar/src/testing/mod.rs index 27f01e4d..d8f2d178 100644 --- a/rpc_sidecar/src/testing/mod.rs +++ b/rpc_sidecar/src/testing/mod.rs @@ -2,9 +2,10 @@ use std::sync::Arc; use std::time::Duration; use casper_binary_port::{ - BinaryMessage, BinaryMessageCodec, BinaryResponse, BinaryResponseAndRequest, - GlobalStateQueryResult, + BinaryMessage, BinaryMessageCodec, BinaryRequest, BinaryResponse, BinaryResponseAndRequest, + GetRequest, GlobalStateQueryResult, }; +use casper_types::bytesrepr; use casper_types::{bytesrepr::ToBytes, CLValue, ProtocolVersion, StoredValue}; use futures::{SinkExt, StreamExt}; use tokio::sync::Notify; @@ -15,6 +16,8 @@ use tokio::{ }; use tokio_util::codec::Framed; +use crate::encode_request; + const LOCALHOST: &str = "127.0.0.1"; const MESSAGE_SIZE: u32 = 1024 * 1024 * 10; @@ -74,18 +77,23 @@ pub fn get_port() -> u16 { pub async fn start_mock_binary_port_responding_with_stored_value( port: u16, + request_id: Option, shutdown: Arc, ) -> JoinHandle<()> { let value = StoredValue::CLValue(CLValue::from_t("Foo").unwrap()); let data = GlobalStateQueryResult::new(value, vec![]); let protocol_version = ProtocolVersion::from_parts(2, 0, 0); let val = BinaryResponse::from_value(data, protocol_version); - let request = []; + let request = get_dummy_request_payload(request_id); let response = BinaryResponseAndRequest::new(val, &request); start_mock_binary_port(port, response.to_bytes().unwrap(), shutdown).await } -pub async fn start_mock_binary_port(port: u16, data: Vec, shutdown: Arc) -> JoinHandle<()> { +pub async fn start_mock_binary_port( + port: u16, + data: Vec, + shutdown: Arc, +) -> JoinHandle<()> { let handler = tokio::spawn(async move { let binary_port = BinaryPortMock::new(port, data); binary_port.start(shutdown).await; @@ -93,3 +101,17 @@ pub async fn start_mock_binary_port(port: u16, data: Vec, shutdown: Arc BinaryRequest { + BinaryRequest::Get(GetRequest::Information { + info_type_tag: 0, + key: vec![], + }) +} + +pub(crate) fn get_dummy_request_payload(request_id: Option) -> bytesrepr::Bytes { + let dummy_request = get_dummy_request(); + encode_request(&dummy_request, request_id.unwrap_or_default()) + .unwrap() + .into() +} diff --git a/sidecar/src/component.rs b/sidecar/src/component.rs index aea1a451..ba2ee1e3 100644 --- a/sidecar/src/component.rs +++ b/sidecar/src/component.rs @@ -359,7 +359,8 @@ mod tests { let port = get_port(); let shutdown = Arc::new(tokio::sync::Notify::new()); let _mock_server_handle = - start_mock_binary_port_responding_with_stored_value(port, Arc::clone(&shutdown)).await; + start_mock_binary_port_responding_with_stored_value(port, None, Arc::clone(&shutdown)) + .await; let component = RpcApiComponent::new(); let mut config = all_components_all_enabled(); config.rpc_server.as_mut().unwrap().node_client = From 7116330da9611b35a62b5aea3e2e98fc3a8bc743 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rafa=C5=82=20Chabowski?= Date: Fri, 24 May 2024 15:58:28 +0200 Subject: [PATCH 15/45] Add tests related to request id --- rpc_sidecar/src/node_client.rs | 57 +++++++++++++++++++++++++++++++--- 1 file changed, 52 insertions(+), 5 deletions(-) diff --git a/rpc_sidecar/src/node_client.rs b/rpc_sidecar/src/node_client.rs index 1b21ea5c..b8b09493 100644 --- a/rpc_sidecar/src/node_client.rs +++ b/rpc_sidecar/src/node_client.rs @@ -699,7 +699,7 @@ impl FramedNodeClient { .payload(), ) .map_err(|err| Error::EnvelopeDeserialization(err.to_string()))?; - match handle_response(resp, request_id, &self.shutdown) { + match validate_response(resp, request_id, &self.shutdown) { Ok(response) => return Ok(response), Err(err) if matches!(err, Error::RequestResponseIdMismatch { expected, got } if expected > got) => { @@ -809,7 +809,7 @@ impl NodeClient for FramedNodeClient { } } -fn handle_response( +fn validate_response( resp: BinaryResponseAndRequest, expected_id: u64, shutdown: &Notify, @@ -927,7 +927,7 @@ mod tests { let request = get_dummy_request_payload(None); - let result = handle_response( + let result = validate_response( BinaryResponseAndRequest::new( BinaryResponse::from_value(AvailableBlockRange::RANGE_0_0, bad_version), &request, @@ -950,7 +950,7 @@ mod tests { let request = get_dummy_request_payload(None); - let result = handle_response( + let result = validate_response( BinaryResponseAndRequest::new( BinaryResponse::from_value(AvailableBlockRange::RANGE_0_0, version), &request, @@ -979,7 +979,7 @@ mod tests { let request = get_dummy_request_payload(None); - let result = handle_response( + let result = validate_response( BinaryResponseAndRequest::new( BinaryResponse::from_value(AvailableBlockRange::RANGE_0_0, version), &request, @@ -1142,4 +1142,51 @@ mod tests { assert_eq!(generated_ids, (0..10).collect::>()); } + + #[test] + fn should_reject_mismatched_request_id() { + let notify = Notify::::new(); + + let expected_id = 1; + let actual_id = 2; + + let req = get_dummy_request_payload(Some(actual_id)); + let resp = BinaryResponse::new_empty(ProtocolVersion::V2_0_0); + let resp_and_req = BinaryResponseAndRequest::new(resp, &req); + + let result = validate_response(resp_and_req, expected_id, ¬ify); + assert!(matches!( + result, + Err(Error::RequestResponseIdMismatch { expected, got }) if expected == 1 && got == 2 + )); + + let expected_id = 2; + let actual_id = 1; + + let req = get_dummy_request_payload(Some(actual_id)); + let resp = BinaryResponse::new_empty(ProtocolVersion::V2_0_0); + let resp_and_req = BinaryResponseAndRequest::new(resp, &req); + + let result = validate_response(resp_and_req, expected_id, ¬ify); + assert!(matches!( + result, + Err(Error::RequestResponseIdMismatch { expected, got }) if expected == 2 && got == 1 + )); + } + + #[test] + fn should_accept_matching_request_id() { + let notify = Notify::::new(); + + let expected_id = 1; + let actual_id = 1; + + let req = get_dummy_request_payload(Some(actual_id)); + let resp = BinaryResponse::new_empty(ProtocolVersion::V2_0_0); + let resp_and_req = BinaryResponseAndRequest::new(resp, &req); + + let result = validate_response(resp_and_req, expected_id, ¬ify); + dbg!(&result); + assert!(result.is_ok()) + } } From 21a46c4575cabfe301a5d919bfc4c6e9df4b78da Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rafa=C5=82=20Chabowski?= Date: Mon, 27 May 2024 12:55:18 +0200 Subject: [PATCH 16/45] Prevent deserialization of the original request when validating request id --- rpc_sidecar/src/lib.rs | 2 +- rpc_sidecar/src/node_client.rs | 19 ++++++++----------- rpc_sidecar/src/testing/mod.rs | 6 +++--- 3 files changed, 12 insertions(+), 15 deletions(-) diff --git a/rpc_sidecar/src/lib.rs b/rpc_sidecar/src/lib.rs index 8059cc93..870d1625 100644 --- a/rpc_sidecar/src/lib.rs +++ b/rpc_sidecar/src/lib.rs @@ -118,7 +118,7 @@ fn resolve_address(address: &str) -> anyhow::Result { .ok_or_else(|| anyhow::anyhow!("failed to resolve address")) } -fn encode_request(req: &BinaryRequest, id: u64) -> Result, bytesrepr::Error> { +fn encode_request(req: &BinaryRequest, id: u16) -> Result, bytesrepr::Error> { let header = BinaryRequestHeader::new(SUPPORTED_PROTOCOL_VERSION, req.tag(), id); let mut bytes = Vec::with_capacity(header.serialized_length() + req.serialized_length()); header.write_bytes(&mut bytes)?; diff --git a/rpc_sidecar/src/node_client.rs b/rpc_sidecar/src/node_client.rs index b8b09493..407e0a98 100644 --- a/rpc_sidecar/src/node_client.rs +++ b/rpc_sidecar/src/node_client.rs @@ -7,7 +7,7 @@ use serde::de::DeserializeOwned; use std::{ convert::{TryFrom, TryInto}, sync::{ - atomic::{AtomicU64, Ordering}, + atomic::{AtomicU16, Ordering}, Arc, }, time::Duration, @@ -479,7 +479,7 @@ pub enum Error { #[error("request error: {0}")] RequestFailed(String), #[error("request id mismatch: expected {expected}, got {got}")] - RequestResponseIdMismatch { expected: u64, got: u64 }, + RequestResponseIdMismatch { expected: u16, got: u16 }, #[error("failed to deserialize the original request provided with the response: {0}")] OriginalRequestDeserialization(String), #[error("failed to deserialize the envelope of a response: {0}")] @@ -609,7 +609,7 @@ pub struct FramedNodeClient { shutdown: Arc>, config: NodeClientConfig, request_limit: Semaphore, - current_request_id: AtomicU64, + current_request_id: AtomicU16, } impl FramedNodeClient { @@ -634,13 +634,13 @@ impl FramedNodeClient { reconnect, shutdown, config, - current_request_id: AtomicU64::new(0), + current_request_id: AtomicU16::new(0), }, reconnect_loop, )) } - fn next_id(&self) -> u64 { + fn next_id(&self) -> u16 { self.current_request_id.fetch_add(1, Ordering::Relaxed) } @@ -716,7 +716,7 @@ impl FramedNodeClient { } } - fn generate_payload(&self, req: BinaryRequest) -> (u64, BinaryMessage) { + fn generate_payload(&self, req: BinaryRequest) -> (u16, BinaryMessage) { let next_id = self.next_id(); ( next_id, @@ -811,13 +811,10 @@ impl NodeClient for FramedNodeClient { fn validate_response( resp: BinaryResponseAndRequest, - expected_id: u64, + expected_id: u16, shutdown: &Notify, ) -> Result { - let original_request = resp.original_request(); - let (original_header, _) = BinaryRequestHeader::from_bytes(original_request) - .map_err(|err| Error::EnvelopeDeserialization(err.to_string()))?; - let original_id = original_header.id(); + let original_id = resp.original_request_id(); if original_id != expected_id { return Err(Error::RequestResponseIdMismatch { expected: expected_id, diff --git a/rpc_sidecar/src/testing/mod.rs b/rpc_sidecar/src/testing/mod.rs index d8f2d178..d3cac181 100644 --- a/rpc_sidecar/src/testing/mod.rs +++ b/rpc_sidecar/src/testing/mod.rs @@ -77,7 +77,7 @@ pub fn get_port() -> u16 { pub async fn start_mock_binary_port_responding_with_stored_value( port: u16, - request_id: Option, + request_id: Option, shutdown: Arc, ) -> JoinHandle<()> { let value = StoredValue::CLValue(CLValue::from_t("Foo").unwrap()); @@ -85,7 +85,7 @@ pub async fn start_mock_binary_port_responding_with_stored_value( let protocol_version = ProtocolVersion::from_parts(2, 0, 0); let val = BinaryResponse::from_value(data, protocol_version); let request = get_dummy_request_payload(request_id); - let response = BinaryResponseAndRequest::new(val, &request); + let response = BinaryResponseAndRequest::new(val, &request, request_id.unwrap_or_default()); start_mock_binary_port(port, response.to_bytes().unwrap(), shutdown).await } @@ -109,7 +109,7 @@ pub(crate) fn get_dummy_request() -> BinaryRequest { }) } -pub(crate) fn get_dummy_request_payload(request_id: Option) -> bytesrepr::Bytes { +pub(crate) fn get_dummy_request_payload(request_id: Option) -> bytesrepr::Bytes { let dummy_request = get_dummy_request(); encode_request(&dummy_request, request_id.unwrap_or_default()) .unwrap() From 927e06f7a7b52533d0761fab6835fc498ef86f79 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rafa=C5=82=20Chabowski?= Date: Mon, 27 May 2024 13:07:29 +0200 Subject: [PATCH 17/45] Make tests compatible with the "request id" related change --- rpc_sidecar/src/node_client.rs | 20 +++++++++------ rpc_sidecar/src/rpcs/account.rs | 3 +++ rpc_sidecar/src/rpcs/chain.rs | 4 +++ rpc_sidecar/src/rpcs/info.rs | 1 + rpc_sidecar/src/rpcs/speculative_exec.rs | 2 ++ rpc_sidecar/src/rpcs/state.rs | 31 ++++++++++++++++++++++++ 6 files changed, 54 insertions(+), 7 deletions(-) diff --git a/rpc_sidecar/src/node_client.rs b/rpc_sidecar/src/node_client.rs index 407e0a98..058fbcf6 100644 --- a/rpc_sidecar/src/node_client.rs +++ b/rpc_sidecar/src/node_client.rs @@ -15,8 +15,8 @@ use std::{ use tokio_util::codec::Framed; use casper_binary_port::{ - BalanceResponse, BinaryMessage, BinaryMessageCodec, BinaryRequest, BinaryRequestHeader, - BinaryResponse, BinaryResponseAndRequest, ConsensusValidatorChanges, DictionaryItemIdentifier, + BalanceResponse, BinaryMessage, BinaryMessageCodec, BinaryRequest, BinaryResponse, + BinaryResponseAndRequest, ConsensusValidatorChanges, DictionaryItemIdentifier, DictionaryQueryResult, ErrorCode, GetRequest, GetTrieFullResult, GlobalStateQueryResult, GlobalStateRequest, InformationRequest, KeyPrefix, NodeStatus, PayloadEntity, PurseIdentifier, RecordId, SpeculativeExecutionResult, TransactionWithExecutionInfo, @@ -912,6 +912,7 @@ mod tests { }; use super::*; + use casper_binary_port::BinaryRequestHeader; use casper_types::testing::TestRng; use casper_types::{CLValue, SemVer}; use futures::FutureExt; @@ -928,6 +929,7 @@ mod tests { BinaryResponseAndRequest::new( BinaryResponse::from_value(AvailableBlockRange::RANGE_0_0, bad_version), &request, + 0, ), 0, ¬ify, @@ -951,6 +953,7 @@ mod tests { BinaryResponseAndRequest::new( BinaryResponse::from_value(AvailableBlockRange::RANGE_0_0, version), &request, + 0, ), 0, ¬ify, @@ -960,7 +963,8 @@ mod tests { result, Ok(BinaryResponseAndRequest::new( BinaryResponse::from_value(AvailableBlockRange::RANGE_0_0, version), - &request + &request, + 0 )) ); assert_eq!(notify.notified().now_or_never(), None) @@ -980,6 +984,7 @@ mod tests { BinaryResponseAndRequest::new( BinaryResponse::from_value(AvailableBlockRange::RANGE_0_0, version), &request, + 0, ), 0, ¬ify, @@ -989,7 +994,8 @@ mod tests { result, Ok(BinaryResponseAndRequest::new( BinaryResponse::from_value(AvailableBlockRange::RANGE_0_0, version), - &request + &request, + 0 )) ); assert_eq!(notify.notified().now_or_never(), None) @@ -1149,7 +1155,7 @@ mod tests { let req = get_dummy_request_payload(Some(actual_id)); let resp = BinaryResponse::new_empty(ProtocolVersion::V2_0_0); - let resp_and_req = BinaryResponseAndRequest::new(resp, &req); + let resp_and_req = BinaryResponseAndRequest::new(resp, &req, actual_id); let result = validate_response(resp_and_req, expected_id, ¬ify); assert!(matches!( @@ -1162,7 +1168,7 @@ mod tests { let req = get_dummy_request_payload(Some(actual_id)); let resp = BinaryResponse::new_empty(ProtocolVersion::V2_0_0); - let resp_and_req = BinaryResponseAndRequest::new(resp, &req); + let resp_and_req = BinaryResponseAndRequest::new(resp, &req, actual_id); let result = validate_response(resp_and_req, expected_id, ¬ify); assert!(matches!( @@ -1180,7 +1186,7 @@ mod tests { let req = get_dummy_request_payload(Some(actual_id)); let resp = BinaryResponse::new_empty(ProtocolVersion::V2_0_0); - let resp_and_req = BinaryResponseAndRequest::new(resp, &req); + let resp_and_req = BinaryResponseAndRequest::new(resp, &req, actual_id); let result = validate_response(resp_and_req, expected_id, ¬ify); dbg!(&result); diff --git a/rpc_sidecar/src/rpcs/account.rs b/rpc_sidecar/src/rpcs/account.rs index 26db1720..bc175457 100644 --- a/rpc_sidecar/src/rpcs/account.rs +++ b/rpc_sidecar/src/rpcs/account.rs @@ -172,6 +172,7 @@ mod tests { Ok(BinaryResponseAndRequest::new( BinaryResponse::new_empty(SUPPORTED_PROTOCOL_VERSION), &[], + 0, )) } _ => unimplemented!(), @@ -213,6 +214,7 @@ mod tests { Ok(BinaryResponseAndRequest::new( BinaryResponse::new_empty(SUPPORTED_PROTOCOL_VERSION), &[], + 0, )) } _ => unimplemented!(), @@ -257,6 +259,7 @@ mod tests { SUPPORTED_PROTOCOL_VERSION, ), &[], + 0, )) } _ => unimplemented!(), diff --git a/rpc_sidecar/src/rpcs/chain.rs b/rpc_sidecar/src/rpcs/chain.rs index 38290a26..0a7ee4bc 100644 --- a/rpc_sidecar/src/rpcs/chain.rs +++ b/rpc_sidecar/src/rpcs/chain.rs @@ -704,6 +704,7 @@ mod tests { Ok(BinaryResponseAndRequest::new( BinaryResponse::from_value(self.block.clone(), SUPPORTED_PROTOCOL_VERSION), &[], + 0, )) } BinaryRequest::Get(GetRequest::Information { info_type_tag, .. }) @@ -716,6 +717,7 @@ mod tests { SUPPORTED_PROTOCOL_VERSION, ), &[], + 0, )) } BinaryRequest::Get(GetRequest::Record { @@ -758,6 +760,7 @@ mod tests { SUPPORTED_PROTOCOL_VERSION, ), &[], + 0, )) } BinaryRequest::Get(GetRequest::State(req)) @@ -778,6 +781,7 @@ mod tests { SUPPORTED_PROTOCOL_VERSION, ), &[], + 0, )) } req => unimplemented!("unexpected request: {:?}", req), diff --git a/rpc_sidecar/src/rpcs/info.rs b/rpc_sidecar/src/rpcs/info.rs index 72973459..8cbe625d 100644 --- a/rpc_sidecar/src/rpcs/info.rs +++ b/rpc_sidecar/src/rpcs/info.rs @@ -757,6 +757,7 @@ mod tests { Ok(BinaryResponseAndRequest::new( BinaryResponse::from_value(transaction, SUPPORTED_PROTOCOL_VERSION), &[], + 0, )) } req => unimplemented!("unexpected request: {:?}", req), diff --git a/rpc_sidecar/src/rpcs/speculative_exec.rs b/rpc_sidecar/src/rpcs/speculative_exec.rs index f2ecddda..83884a1b 100644 --- a/rpc_sidecar/src/rpcs/speculative_exec.rs +++ b/rpc_sidecar/src/rpcs/speculative_exec.rs @@ -246,6 +246,7 @@ mod tests { SUPPORTED_PROTOCOL_VERSION, ), &[], + 0, )) } BinaryRequest::TrySpeculativeExec { .. } => Ok(BinaryResponseAndRequest::new( @@ -254,6 +255,7 @@ mod tests { SUPPORTED_PROTOCOL_VERSION, ), &[], + 0, )), req => unimplemented!("unexpected request: {:?}", req), } diff --git a/rpc_sidecar/src/rpcs/state.rs b/rpc_sidecar/src/rpcs/state.rs index 0fa0f506..94f7ee23 100644 --- a/rpc_sidecar/src/rpcs/state.rs +++ b/rpc_sidecar/src/rpcs/state.rs @@ -1260,6 +1260,7 @@ mod tests { SUPPORTED_PROTOCOL_VERSION, ), &[], + 0, )) } BinaryRequest::Get(GetRequest::State(req)) @@ -1280,6 +1281,7 @@ mod tests { Ok(BinaryResponseAndRequest::new( BinaryResponse::from_value(bids, SUPPORTED_PROTOCOL_VERSION), &[], + 0, )) } BinaryRequest::Get(GetRequest::State(req)) @@ -1300,6 +1302,7 @@ mod tests { Ok(BinaryResponseAndRequest::new( BinaryResponse::from_value(bids, SUPPORTED_PROTOCOL_VERSION), &[], + 0, )) } BinaryRequest::Get(GetRequest::State(req)) @@ -1321,6 +1324,7 @@ mod tests { Ok(BinaryResponseAndRequest::new( BinaryResponse::from_value(result, SUPPORTED_PROTOCOL_VERSION), &[], + 0, )) } BinaryRequest::Get(GetRequest::State(req)) @@ -1339,6 +1343,7 @@ mod tests { Ok(BinaryResponseAndRequest::new( BinaryResponse::from_value(result, SUPPORTED_PROTOCOL_VERSION), &[], + 0, )) } req => unimplemented!("unexpected request: {:?}", req), @@ -1405,6 +1410,7 @@ mod tests { SUPPORTED_PROTOCOL_VERSION, ), &[], + 0, )) } BinaryRequest::Get(GetRequest::State(req)) @@ -1425,6 +1431,7 @@ mod tests { Ok(BinaryResponseAndRequest::new( BinaryResponse::from_value(bids, SUPPORTED_PROTOCOL_VERSION), &[], + 0, )) } BinaryRequest::Get(GetRequest::State(req)) @@ -1445,6 +1452,7 @@ mod tests { Ok(BinaryResponseAndRequest::new( BinaryResponse::from_value(bids, SUPPORTED_PROTOCOL_VERSION), &[], + 0, )) } BinaryRequest::Get(GetRequest::State(req)) @@ -1468,6 +1476,7 @@ mod tests { Ok(BinaryResponseAndRequest::new( BinaryResponse::from_value(result, SUPPORTED_PROTOCOL_VERSION), &[], + 0, )) } BinaryRequest::Get(GetRequest::State(req)) @@ -1483,6 +1492,7 @@ mod tests { Ok(BinaryResponseAndRequest::new( BinaryResponse::new_empty(SUPPORTED_PROTOCOL_VERSION), &[], + 0, )) } BinaryRequest::Get(GetRequest::State(req)) @@ -1502,6 +1512,7 @@ mod tests { Ok(BinaryResponseAndRequest::new( BinaryResponse::from_value(result, SUPPORTED_PROTOCOL_VERSION), &[], + 0, )) } req => unimplemented!("unexpected request: {:?}", req), @@ -1559,6 +1570,7 @@ mod tests { Ok(BinaryResponseAndRequest::new( BinaryResponse::new_empty(SUPPORTED_PROTOCOL_VERSION), &[], + 0, )) } BinaryRequest::Get(GetRequest::Information { info_type_tag, .. }) @@ -1571,6 +1583,7 @@ mod tests { SUPPORTED_PROTOCOL_VERSION, ), &[], + 0, )) } req => unimplemented!("unexpected request: {:?}", req), @@ -1624,6 +1637,7 @@ mod tests { SUPPORTED_PROTOCOL_VERSION, ), &[], + 0, )) } BinaryRequest::Get(GetRequest::State(req)) @@ -1644,6 +1658,7 @@ mod tests { SUPPORTED_PROTOCOL_VERSION, ), &[], + 0, )) } BinaryRequest::Get(GetRequest::State(req)) @@ -1669,6 +1684,7 @@ mod tests { SUPPORTED_PROTOCOL_VERSION, ), &[], + 0, )) } BinaryRequest::Get(GetRequest::State(req)) @@ -1690,6 +1706,7 @@ mod tests { SUPPORTED_PROTOCOL_VERSION, ), &[], + 0, )) } BinaryRequest::Get(GetRequest::State(req)) @@ -1707,6 +1724,7 @@ mod tests { SUPPORTED_PROTOCOL_VERSION, ), &[], + 0, )) } req => unimplemented!("unexpected request: {:?}", req), @@ -1834,6 +1852,7 @@ mod tests { SUPPORTED_PROTOCOL_VERSION, ), &[], + 0, )) } BinaryRequest::Get(GetRequest::State(req)) @@ -1848,6 +1867,7 @@ mod tests { Ok(BinaryResponseAndRequest::new( BinaryResponse::new_empty(SUPPORTED_PROTOCOL_VERSION), &[], + 0, )) } req => unimplemented!("unexpected request: {:?}", req), @@ -1936,6 +1956,7 @@ mod tests { SUPPORTED_PROTOCOL_VERSION, ), &[], + 0, )) } BinaryRequest::Get(GetRequest::State(req)) @@ -1959,6 +1980,7 @@ mod tests { SUPPORTED_PROTOCOL_VERSION, ), &[], + 0, )) } req => unimplemented!("unexpected request: {:?}", req), @@ -2010,6 +2032,7 @@ mod tests { SUPPORTED_PROTOCOL_VERSION, ), &[], + 0, )) } BinaryRequest::Get(GetRequest::State(req)) @@ -2024,6 +2047,7 @@ mod tests { Ok(BinaryResponseAndRequest::new( BinaryResponse::new_empty(SUPPORTED_PROTOCOL_VERSION), &[], + 0, )) } req => unimplemented!("unexpected request: {:?}", req), @@ -2220,6 +2244,7 @@ mod tests { SUPPORTED_PROTOCOL_VERSION, ), &[], + 0, )) } req => unimplemented!("unexpected request: {:?}", req), @@ -2242,6 +2267,7 @@ mod tests { Ok(BinaryResponseAndRequest::new( BinaryResponse::from_value(self.0.clone(), SUPPORTED_PROTOCOL_VERSION), &[], + 0, )) } req => unimplemented!("unexpected request: {:?}", req), @@ -2271,6 +2297,7 @@ mod tests { SUPPORTED_PROTOCOL_VERSION, ), &[], + 0, )) } BinaryRequest::Get(GetRequest::State(req)) @@ -2279,6 +2306,7 @@ mod tests { Ok(BinaryResponseAndRequest::new( BinaryResponse::from_value(self.result.clone(), SUPPORTED_PROTOCOL_VERSION), &[], + 0, )) } req => unimplemented!("unexpected request: {:?}", req), @@ -2308,6 +2336,7 @@ mod tests { SUPPORTED_PROTOCOL_VERSION, ), &[], + 0, )) } BinaryRequest::Get(GetRequest::State(req)) @@ -2328,6 +2357,7 @@ mod tests { SUPPORTED_PROTOCOL_VERSION, ), &[], + 0, )) } req => unimplemented!("unexpected request: {:?}", req), @@ -2350,6 +2380,7 @@ mod tests { Ok(BinaryResponseAndRequest::new( BinaryResponse::from_value(self.0.clone(), SUPPORTED_PROTOCOL_VERSION), &[], + 0, )) } req => unimplemented!("unexpected request: {:?}", req), From 3e403715da2a50cb4aa191d62cabdd8fd91544cf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rafa=C5=82=20Chabowski?= Date: Wed, 29 May 2024 17:05:16 +0200 Subject: [PATCH 18/45] Prevent potential infinite loop when retrying to get a response from binary port --- rpc_sidecar/src/node_client.rs | 90 ++++++++++++++++++++++++++++++---- rpc_sidecar/src/testing/mod.rs | 29 ++++++++--- sidecar/src/component.rs | 10 ++-- 3 files changed, 109 insertions(+), 20 deletions(-) diff --git a/rpc_sidecar/src/node_client.rs b/rpc_sidecar/src/node_client.rs index 058fbcf6..185ba00a 100644 --- a/rpc_sidecar/src/node_client.rs +++ b/rpc_sidecar/src/node_client.rs @@ -37,6 +37,12 @@ use tokio::{ }; use tracing::{error, field, info, warn}; +const MAX_MISMATCHED_ID_RETRIES: u8 = 100; +#[cfg(not(test))] +const INITIAL_REQUEST_ID: u16 = 0; +#[cfg(test)] +const INITIAL_REQUEST_ID: u16 = 1; + #[async_trait] pub trait NodeClient: Send + Sync { async fn send_request(&self, req: BinaryRequest) -> Result; @@ -480,6 +486,8 @@ pub enum Error { RequestFailed(String), #[error("request id mismatch: expected {expected}, got {got}")] RequestResponseIdMismatch { expected: u16, got: u16 }, + #[error("failed to get a response with correct id {max} times, giving up")] + TooManyMismatchedResponses { max: u8 }, #[error("failed to deserialize the original request provided with the response: {0}")] OriginalRequestDeserialization(String), #[error("failed to deserialize the envelope of a response: {0}")] @@ -634,7 +642,7 @@ impl FramedNodeClient { reconnect, shutdown, config, - current_request_id: AtomicU16::new(0), + current_request_id: AtomicU16::new(INITIAL_REQUEST_ID), }, reconnect_loop, )) @@ -682,7 +690,7 @@ impl FramedNodeClient { return Err(Error::RequestFailed(err.to_string())); }; - loop { + for _ in 0..MAX_MISMATCHED_ID_RETRIES { let Ok(maybe_response) = tokio::time::timeout( Duration::from_secs(self.config.message_timeout_secs), client.next(), @@ -714,6 +722,10 @@ impl FramedNodeClient { return Err(Error::RequestFailed("disconnected".to_owned())); } } + + Err(Error::TooManyMismatchedResponses { + max: MAX_MISMATCHED_ID_RETRIES, + }) } fn generate_payload(&self, req: BinaryRequest) -> (u16, BinaryMessage) { @@ -1018,9 +1030,13 @@ mod tests { let port = get_port(); let mut rng = TestRng::new(); let shutdown = Arc::new(tokio::sync::Notify::new()); - let _mock_server_handle = - start_mock_binary_port_responding_with_stored_value(port, None, Arc::clone(&shutdown)) - .await; + let _mock_server_handle = start_mock_binary_port_responding_with_stored_value( + port, + Some(INITIAL_REQUEST_ID), + None, + Arc::clone(&shutdown), + ) + .await; let config = NodeClientConfig::new_with_port_and_retries(port, 2); let (c, _) = FramedNodeClient::new(config).await.unwrap(); @@ -1040,6 +1056,7 @@ mod tests { sleep(Duration::from_secs(5)).await; let _mock_server_handle = start_mock_binary_port_responding_with_stored_value( port, + Some(INITIAL_REQUEST_ID), None, Arc::clone(&shutdown), ) @@ -1079,7 +1096,8 @@ mod tests { let shutdown = Arc::new(tokio::sync::Notify::new()); let mock_server_handle = start_mock_binary_port_responding_with_stored_value( port, - Some(0), + Some(INITIAL_REQUEST_ID), + None, Arc::clone(&shutdown), ) .await; @@ -1106,7 +1124,8 @@ mod tests { let _mock_server_handle = start_mock_binary_port_responding_with_stored_value( port, - Some(2), + Some(INITIAL_REQUEST_ID + 2), + None, Arc::clone(&shutdown), ) .await; @@ -1130,10 +1149,10 @@ mod tests { let port = get_port(); let config = NodeClientConfig::new_with_port(port); let shutdown = Arc::new(tokio::sync::Notify::new()); - let _ = start_mock_binary_port(port, vec![], Arc::clone(&shutdown)).await; + let _ = start_mock_binary_port(port, vec![], 1, Arc::clone(&shutdown)).await; let (c, _) = FramedNodeClient::new(config).await.unwrap(); - let generated_ids: Vec<_> = (0..10) + let generated_ids: Vec<_> = (INITIAL_REQUEST_ID..INITIAL_REQUEST_ID + 10) .map(|_| { let (_, binary_message) = c.generate_payload(get_dummy_request()); let header = BinaryRequestHeader::from_bytes(&binary_message.payload()) @@ -1143,7 +1162,10 @@ mod tests { }) .collect(); - assert_eq!(generated_ids, (0..10).collect::>()); + assert_eq!( + generated_ids, + (INITIAL_REQUEST_ID..INITIAL_REQUEST_ID + 10).collect::>() + ); } #[test] @@ -1192,4 +1214,52 @@ mod tests { dbg!(&result); assert!(result.is_ok()) } + + #[tokio::test] + async fn should_keep_retrying_to_get_response_up_to_the_limit() { + const LIMIT: u8 = MAX_MISMATCHED_ID_RETRIES - 1; + + let port = get_port(); + let mut rng = TestRng::new(); + let shutdown = Arc::new(tokio::sync::Notify::new()); + let _mock_server_handle = start_mock_binary_port_responding_with_stored_value( + port, + Some(0), + Some(LIMIT), + Arc::clone(&shutdown), + ) + .await; + let config = NodeClientConfig::new_with_port_and_retries(port, 2); + let (c, _) = FramedNodeClient::new(config).await.unwrap(); + + let res = query_global_state_for_string_value(&mut rng, &c) + .await + .unwrap_err(); + // Expect error different than 'TooManyMismatchResponses' + assert!(!matches!(res, Error::TooManyMismatchedResponses { .. })); + } + + #[tokio::test] + async fn should_quit_retrying_to_get_response_over_the_retry_limit() { + const LIMIT: u8 = MAX_MISMATCHED_ID_RETRIES; + + let port = get_port(); + let mut rng = TestRng::new(); + let shutdown = Arc::new(tokio::sync::Notify::new()); + let _mock_server_handle = start_mock_binary_port_responding_with_stored_value( + port, + Some(0), + Some(LIMIT), + Arc::clone(&shutdown), + ) + .await; + let config = NodeClientConfig::new_with_port_and_retries(port, 2); + let (c, _) = FramedNodeClient::new(config).await.unwrap(); + + let res = query_global_state_for_string_value(&mut rng, &c) + .await + .unwrap_err(); + // Expect 'TooManyMismatchResponses' error + assert!(matches!(res, Error::TooManyMismatchedResponses { max } if max == LIMIT)); + } } diff --git a/rpc_sidecar/src/testing/mod.rs b/rpc_sidecar/src/testing/mod.rs index d3cac181..119a4a2c 100644 --- a/rpc_sidecar/src/testing/mod.rs +++ b/rpc_sidecar/src/testing/mod.rs @@ -24,11 +24,16 @@ const MESSAGE_SIZE: u32 = 1024 * 1024 * 10; pub struct BinaryPortMock { port: u16, response: Vec, + number_of_responses: u8, } impl BinaryPortMock { - pub fn new(port: u16, response: Vec) -> Self { - Self { port, response } + pub fn new(port: u16, response: Vec, number_of_responses: u8) -> Self { + Self { + port, + response, + number_of_responses, + } } pub async fn start(&self, shutdown: Arc) { @@ -46,7 +51,7 @@ impl BinaryPortMock { match val { Ok((stream, _addr)) => { let response_payload = self.response.clone(); - tokio::spawn(handle_client(stream, response_payload)); + tokio::spawn(handle_client(stream, response_payload, self.number_of_responses)); } Err(io_err) => { println!("acceptance failure: {:?}", io_err); @@ -58,14 +63,16 @@ impl BinaryPortMock { } } -async fn handle_client(stream: TcpStream, response: Vec) { +async fn handle_client(stream: TcpStream, response: Vec, number_of_responses: u8) { let mut client = Framed::new(stream, BinaryMessageCodec::new(MESSAGE_SIZE)); let next_message = client.next().await; if next_message.is_some() { tokio::spawn({ async move { - let _ = client.send(BinaryMessage::new(response)).await; + for _ in 0..number_of_responses { + let _ = client.send(BinaryMessage::new(response.clone())).await; + } } }); } @@ -78,6 +85,7 @@ pub fn get_port() -> u16 { pub async fn start_mock_binary_port_responding_with_stored_value( port: u16, request_id: Option, + number_of_responses: Option, shutdown: Arc, ) -> JoinHandle<()> { let value = StoredValue::CLValue(CLValue::from_t("Foo").unwrap()); @@ -86,16 +94,23 @@ pub async fn start_mock_binary_port_responding_with_stored_value( let val = BinaryResponse::from_value(data, protocol_version); let request = get_dummy_request_payload(request_id); let response = BinaryResponseAndRequest::new(val, &request, request_id.unwrap_or_default()); - start_mock_binary_port(port, response.to_bytes().unwrap(), shutdown).await + start_mock_binary_port( + port, + response.to_bytes().unwrap(), + number_of_responses.unwrap_or(1), // Single response by default + shutdown, + ) + .await } pub async fn start_mock_binary_port( port: u16, data: Vec, + number_of_responses: u8, shutdown: Arc, ) -> JoinHandle<()> { let handler = tokio::spawn(async move { - let binary_port = BinaryPortMock::new(port, data); + let binary_port = BinaryPortMock::new(port, data, number_of_responses); binary_port.start(shutdown).await; }); sleep(Duration::from_secs(3)).await; // This should be handled differently, preferably the mock binary port should inform that it already bound to the port diff --git a/sidecar/src/component.rs b/sidecar/src/component.rs index ba2ee1e3..ef277643 100644 --- a/sidecar/src/component.rs +++ b/sidecar/src/component.rs @@ -358,9 +358,13 @@ mod tests { async fn given_rpc_api_server_component_when_config_should_return_some() { let port = get_port(); let shutdown = Arc::new(tokio::sync::Notify::new()); - let _mock_server_handle = - start_mock_binary_port_responding_with_stored_value(port, None, Arc::clone(&shutdown)) - .await; + let _mock_server_handle = start_mock_binary_port_responding_with_stored_value( + port, + None, + None, + Arc::clone(&shutdown), + ) + .await; let component = RpcApiComponent::new(); let mut config = all_components_all_enabled(); config.rpc_server.as_mut().unwrap().node_client = From 6369daa5d9dad38c3f7d647aea6024118fb70720 Mon Sep 17 00:00:00 2001 From: ipopescu Date: Mon, 3 Jun 2024 17:12:32 +0200 Subject: [PATCH 19/45] Resolved questions and using full labels --- LEGACY_SSE_EMULATION.md | 109 ++++++++++++++++------------------------ 1 file changed, 42 insertions(+), 67 deletions(-) diff --git a/LEGACY_SSE_EMULATION.md b/LEGACY_SSE_EMULATION.md index c88db0d9..542e1a01 100644 --- a/LEGACY_SSE_EMULATION.md +++ b/LEGACY_SSE_EMULATION.md @@ -1,28 +1,5 @@ # The Legacy SSE Emulation - Casper node versions 2.0 or greater (2.x) produce different SSE events than 1.x versions. Also, 1.x Casper nodes used 3 SSE endpoints (`/events/sigs`, `/events/deploys`, `/events/main`), while 2.x nodes expose all the SSE events on one endpoint (`/events`). @@ -30,7 +7,7 @@ Generally, the changes in 2.x regarding SSE are somewhat backward-incompatible. SSE emulation is off by default. To enable it, follow the steps below and read the main [README.md](./README.md#sse-server-configuration) file describing how to configure the SSE server. -> **Note**: This document refers to legacy events as V1 events, and to events streamed by nodes with version 2.x as V2 events. +> **Note**: 2.x node versions label new block events with `Version2`. In the rare case that a 2.x node sees a legacy block, it will label events coming from this block with `Version1`. The notion of Version1 and Version2 is new to 2.x, and wasn't present in 1.x node versions. So, for the legacy SSE emulation, both Version1 and Version2 BlockAdded events will be transformed to the old BlockAdded event format from 1.x. **LIMITATIONS:** @@ -42,7 +19,7 @@ The legacy SSE emulation does not map 2.x events to 1.x events in a 1-to-1 fashi ## Configuration -Currently, the only possible emulation is the V1 SSE API. To enable the emulation, set the `emulate_legacy_sse_apis` setting to `["V1"]`: +To enable the legacy SSE emulation, set the `emulate_legacy_sse_apis` setting to `["V1"]`. Currently, this is the only possible value: ``` [sse_server] @@ -57,7 +34,7 @@ This setting will expose three legacy SSE endpoints with the following events st - `/events/deploys`- `ApiVersion`, `DeployAccepted` and `Shutdown` - `/events/sigs` - `ApiVersion`, `FinalitySignature` and `Shutdown` -Those endpoints will emit events in the same format as the V1 SSE API of the Casper node. +Those endpoints will emit events in the same format as the legacy SSE API of the Casper node. ## Event Mapping @@ -69,14 +46,12 @@ The legacy SSE ApiVersion event is the same as the current version. ### The `BlockAdded` event - +The Sidecar can emit a legacy `BlockAdded` event by unwrapping the 2.x event structure and creating a 1.x emulated event structure. -A V1 `BlockAdded` event will be unwrapped and passed as a legacy `BlockAdded` event on the 2.x `events` endpoint. For instance, the V1 `BlockAdded` event will be translated to a 1.x emulated event as shown below. +A Version1 `BlockAdded` event will be unwrapped and passed as a legacy `BlockAdded` event as shown below.
-V1 BlockAdded in 2.x +Version1 BlockAdded in 2.x ```json { @@ -141,7 +116,7 @@ A V1 `BlockAdded` event will be unwrapped and passed as a legacy `BlockAdded` ev
-Emulated 1.x BlockAdded (from V1 BlockAdded) +Emulated 1.x BlockAdded (from Version1) ```json { @@ -205,25 +180,27 @@ A V1 `BlockAdded` event will be unwrapped and passed as a legacy `BlockAdded` ev When the 2.x event stream emits a legacy `BlockAdded` event, the following mapping rules apply: -- `block_hash` will be copied from V2 to V1. -- `block.block_hash` will be copied from V2 to V1. +- `block_hash` will be copied from Version2 to Version1. +- `block.block_hash` will be copied from Version2 to Version1. - `block.header.era_end`: - - If the `era_end` is a V1 variety - it will be copied. - - If the `era_end` is a V2 variety: - - V2 `next_era_validator_weights` will be copied from V2 `next_era_validator_weights`. - - V1 `era_report` will be assembled from the V2 `era_end.equivocators`, `era_end.rewards` and `era_end.inactive_validators` fields. - - If one of the `rewards` contains a reward that doesn't fit in a u64 (because V2 has U512 type in rewards values) - the whole `era_end` **WILL BE OMITTED** from the legacy V1 block (value None). - - V2 field `next_era_gas_price` has no equivalent in V1 and will be omitted. -- `block.header.current_gas_price` this field only exists in V2 and will be omitted from the V1 block header. -- `block.header.proposer` will be copied from V2 to V1 `block.body.proposer`. -- other `block.header.*` fields will be copied from V2 to V1. -- `block.body.deploy_hashes` will be based on V2 `block.body.standard` transactions. Bear in mind, that only values of transactions of type `Deploy` will be copied to V1 `block.body.deploy_hashes` array. -- `block.body.transfer_hashes` will be based on V2 `block.body.mint` transactions. Bear in mind, that only values of transactions of type `Deploy` will be copied to V1 `block.body.transfer_hashes` array. + - If the `era_end` is a Version1 variety - it will be copied. + - If the `era_end` is a Version2 variety: + - Version2 `next_era_validator_weights` will be copied from Version2 `next_era_validator_weights`. + - Version1 `era_report` will be assembled from the Version2 `era_end.equivocators`, `era_end.rewards` and `era_end.inactive_validators` fields. + - If one of the `rewards` contains a reward that doesn't fit in a u64 (because Version2 has U512 type in rewards values) - the whole `era_end` **WILL BE OMITTED** from the legacy Version1 block (value None). + - Version2 field `next_era_gas_price` has no equivalent in Version1 and will be omitted. +- `block.header.current_gas_price` this field only exists in Version2 and will be omitted from the Version1 block header. +- `block.header.proposer` will be copied from Version2 to Version1 `block.body.proposer`. +- other `block.header.*` fields will be copied from Version2 to Version1. +- `block.body.deploy_hashes` will be based on Version2 `block.body.standard` transactions. Bear in mind, that only values of transactions of type `Deploy` will be copied to Version1 `block.body.deploy_hashes` array. +- `block.body.transfer_hashes` will be based on Version2 `block.body.mint` transactions. Bear in mind, that only values of transactions of type `Deploy` will be copied to Version1 `block.body.transfer_hashes` array. Here is an example mapping demonstrating the rules above: + + -All V2 events will be omitted from legacy SSE event streams. For example, the following event will not be streamed. +Version1 events will be omitted from legacy SSE event streams. For example, the following event will not be streamed. ```json "TransactionAccepted": { @@ -573,9 +550,7 @@ A 2.x `TransactionExpired` event will be mapped to a `DeployExpired` event.


- - -All V1 variants will be omitted from legacy SSE streams. For example, a 2.x V1 `TransactionExpired` event will not be streamed. +All Version1 variants will be omitted from legacy SSE streams. For example, the following Version1 `TransactionExpired` event will not be streamed: ```json { @@ -606,27 +581,27 @@ When translating the `ExecutionResultV2` (`ex_v2`) to a legacy `ExecutionResult` - If the `ex_v2.error_message` is not empty, the `ExecutionResult` will be of type `Failure`, and the `ex_v1.error_message` will be set to that value. Otherwise, `ex_v1` will be of type `Success`. - The `ex_v1.cost` will be set to the `ex_v2.cost`. - The `ex_v1.transfers` list will always be empty since the 2.x node no longer uses a' TransferAddr' notion. -- The `ex_v1.effect` will be populated based on the `ex_v2.effects` field, applying the rules from [Translating Effects from V2](#translating-effects-from-v2). +- The `ex_v1.effect` will be populated based on the `ex_v2.effects` field, applying the rules from [Translating Effects from Version2](#translating-effects-from-v2). -#### Translating `Effects` from V2 +#### Translating `Effects` from Version2 -When translating the `Effects` from V2 to V1, the following rules apply: +When translating the `Effects` from Version2 to Version1, the following rules apply: - The output `operations` field will always be an empty list since the 2.x node no longer uses this concept for execution results. - For `transforms`, the objects will be constructed based on the `ex_v2.effects` with the following exceptions: - - The V2 `AddKeys` transform will be translated to the V1 `NamedKeys` transform. - - The V2 `Write` transform will be translated by applying the rules from paragraph [Translating Write transforms from V2](#translating-write-transform-from-v2). If at least one `Write` transform is not translatable (yielding a `None` value), the transform will be an empty array. + - The Version2 `AddKeys` transform will be translated to the Version1 `NamedKeys` transform. + - The Version2 `Write` transform will be translated by applying the rules from paragraph [Translating Write transforms from Version2](#translating-write-transform-from-v2). If at least one `Write` transform is not translatable (yielding a `None` value), the transform will be an empty array. -#### Translating `Write` transforms from V2 +#### Translating `Write` transforms from Version2 -When translating `Write` transforms from V2 to V1, the following rules apply: +When translating `Write` transforms from Version2 to Version1, the following rules apply: - `CLValue`: will be copied to the `WriteCLValue` transform. -- `Account`: will be copied to the `WriteAccount` transform, assigning the V2 `account_hash` as the value for `WriteAccount`. -- `ContractWasm`: a `WriteContractWasm` transform will be created. Please note that the `WriteContractWasm` will not contain data, so the V2 details will be omitted. -- `Contract`: a `WriteContract` transform will be created. Please note that the `WriteContract` will not contain data, so the V2 details will be omitted. - -- `ContractPackage`: a `WriteContractPackage` transform will be created. Please note that the `WriteContractPackage` will not contain data, so the V2 details will be omitted. +- `Account`: will be copied to the `WriteAccount` transform, assigning the Version2 `account_hash` as the value for `WriteAccount`. +- `ContractWasm`: a `WriteContractWasm` transform will be created. Please note that the `WriteContractWasm` will not contain data, so the Version2 details will be omitted. +- `Contract`: a `WriteContract` transform will be created. Please note that the `WriteContract` will not contain data, so the Version2 details will be omitted. + +- `ContractPackage`: a `WriteContractPackage` transform will be created. Please note that the `WriteContractPackage` will not contain data, so the Version2 details will be omitted. - `LegacyTransfer`: a `WriteTransfer` transform will be created. Data will be copied. - `DeployInfo`: a `WriteDeployInfo` transform will be created. Data will be copied. - `EraInfo`: an `EraInfo` transform will be created. Data will be copied. From ab19737795f7300f82042bd7538a75c87a7cb968 Mon Sep 17 00:00:00 2001 From: jacek-casper <145967538+jacek-casper@users.noreply.github.com> Date: Mon, 3 Jun 2024 18:02:01 +0100 Subject: [PATCH 20/45] Validate config to check whether any server is enabled (#320) * Validate config to check whether any server is enabled * Modify to check for runnable components --- sidecar/src/run.rs | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/sidecar/src/run.rs b/sidecar/src/run.rs index 63f509d1..d6aea8d1 100644 --- a/sidecar/src/run.rs +++ b/sidecar/src/run.rs @@ -29,10 +29,6 @@ async fn do_run( config: SidecarConfig, components: Vec>, ) -> Result { - if components.is_empty() { - info!("No sidecar components are defined/enabled. Exiting"); - return Ok(ExitCode::SUCCESS); - } let mut component_futures = Vec::new(); for component in components.iter() { let maybe_future = component.prepare_component_task(&config).await?; @@ -40,5 +36,9 @@ async fn do_run( component_futures.push(future); } } + if component_futures.is_empty() { + info!("No runnable sidecar components are defined/enabled. Exiting"); + return Ok(ExitCode::SUCCESS); + } futures::future::select_all(component_futures).await.0 } From 764e64cffca8985c0100c9c514decb591e434431 Mon Sep 17 00:00:00 2001 From: Iulia Popescu Date: Mon, 3 Jun 2024 20:52:44 +0200 Subject: [PATCH 21/45] Review feedback Co-authored-by: Adam Stone <97986246+ACStoneCL@users.noreply.github.com> --- LEGACY_SSE_EMULATION.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/LEGACY_SSE_EMULATION.md b/LEGACY_SSE_EMULATION.md index aafcd296..642e8e2f 100644 --- a/LEGACY_SSE_EMULATION.md +++ b/LEGACY_SSE_EMULATION.md @@ -12,7 +12,7 @@ SSE emulation is off by default. To enable it, follow the steps below and read t Before enabling the legacy SSE emulation, consider its limitations: - The legacy SSE emulation is a temporary solution and may be removed in a future major release of the node software. -The legacy SSE emulation does not map 2.x events to 1.x events in a 1-to-1 fashion. Some events are omitted, some are transformed, and some are passed through. Below are more details on the emulation's limitations. +- The legacy SSE emulation does not map 2.x events to 1.x events in a 1-to-1 fashion. Some events are omitted, some are transformed, and some are passed through. Below are more details on the emulation's limitations. - The legacy SSE emulation places an extra burden on resources. It will consume more resources than the native 2.x SSE API. - The legacy SSE emulation will consume more resources than the "native" 2.x SSE API. From dfbe60afc6fbc787a9453d57303e9d48e6998a1d Mon Sep 17 00:00:00 2001 From: Iulia Popescu Date: Mon, 3 Jun 2024 20:53:14 +0200 Subject: [PATCH 22/45] Review feedback Co-authored-by: Adam Stone <97986246+ACStoneCL@users.noreply.github.com> --- rpc_sidecar/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rpc_sidecar/README.md b/rpc_sidecar/README.md index 324e4d1a..fda4d167 100644 --- a/rpc_sidecar/README.md +++ b/rpc_sidecar/README.md @@ -22,7 +22,7 @@ The Sidecar maintains a TCP connection with the node and communicates using a cu ## Discovering the JSON RPC API -Once setup and running as described [here](../README.md), the Sidecar can be queried for its JSON RPC API using the `rpc.discover` method, as shown below. The result will be a list of RPC methods and their parameters. +Once setup and running as described [here](../README.md), the Sidecar can be queried for its JSON-RPC API using the `rpc.discover` method, as shown below. The result will be a list of RPC methods and their parameters. ```bash curl -X POST http://localhost:/rpc -H 'Content-Type: application/json' -d '{"jsonrpc": "2.0", "method": "rpc.discover", "id": 1}' From e38a2a2cef7568a1e2ba907188e6c08024101985 Mon Sep 17 00:00:00 2001 From: ipopescu Date: Tue, 4 Jun 2024 17:49:51 +0200 Subject: [PATCH 23/45] Update diagram labels for consistency --- README.md | 33 ++++++++++++++++++--------------- 1 file changed, 18 insertions(+), 15 deletions(-) diff --git a/README.md b/README.md index 2a011248..b51479ee 100644 --- a/README.md +++ b/README.md @@ -55,18 +55,21 @@ The Casper Sidecar provides the following functionalities: The Sidecar has the following components and external dependencies: ```mermaid +--- +title: The Casper Sidecar Components +--- graph LR; - subgraph CASPER-SIDECAR + subgraph CASPER_SIDECAR SSE_SERVER["SSE server"] - RPC_API_SERVER["RPC API server (json)"] + RPC_API_SERVER["RPC API server (JSON)"] REST_API["Rest API server"] ADMIN_API["Admin API server"] end - CONFIG{{"Config file (toml)"}} - CONFIG --> CASPER-SIDECAR + CONFIG{{"Config file (TOML)"}} + CONFIG --> CASPER_SIDECAR STORAGE[(Storage)] - NODE_SSE(("Casper Node SSE port")) - NODE_BINARY(("Casper Node binary port")) + NODE_SSE(("Casper node SSE port")) + NODE_BINARY(("Casper node binary port")) RPC_API_SERVER --> NODE_BINARY SSE_SERVER --> NODE_SSE SSE_SERVER --> STORAGE @@ -82,21 +85,21 @@ The SSE Server has these components: CLIENT{Client} CLIENT --> SSE_SERVER_API STORAGE[("Storage")] - CONFIG{{"Config file (toml)"}} + CONFIG{{"Config file (TOML)"}} MAIN --1.reads--> CONFIG NODE_SSE{Node SSE port} SSE_LISTENER --2--> STORAGE NODE_SSE --1--> SSE_LISTENER - subgraph "Casper Sidecar" + subgraph CASPER_SIDECAR MAIN[main.rs] - MAIN --2.spawns---> SSE-SERVER - subgraph SSE-SERVER + MAIN --2.spawns---> SSE_SERVER + subgraph SSE_SERVER SSE_SERVER_API["SSE API"] RING_BUFFER["Events buffer"] SSE_SERVER_API --> RING_BUFFER SSE_LISTENER --3--> RING_BUFFER - subgraph "For connection in connections" - SSE_LISTENER["SSE Listener"] + subgraph "connection" + SSE_LISTENER["SSE listener"] end end end @@ -126,9 +129,9 @@ The Sidecar offers an optional REST API that allows clients to query the events CLIENT --> REST_API STORAGE[("Storage")] REST_API --> STORAGE - CONFIG{{"Config file (toml)"}} + CONFIG{{"Config file (TOML)"}} MAIN --1.reads--> CONFIG - subgraph "Casper Sidecar" + subgraph CASPER_SIDECAR MAIN[main.rs] MAIN --2.spawns--> REST_API REST_API["REST API"] @@ -145,7 +148,7 @@ The Sidecar offers an administrative API to allow an operator to check its curre CLIENT --> ADMIN_API CONFIG{{Config file}} MAIN --1.reads--> CONFIG - subgraph "Casper Sidecar" + subgraph CASPER_SIDECAR MAIN[main.rs] MAIN --2.spawns--> ADMIN_API ADMIN_API["ADMIN API"] From 286fef428e584b7f76027eb899ac51b9b643f28e Mon Sep 17 00:00:00 2001 From: ipopescu Date: Tue, 4 Jun 2024 23:00:09 +0200 Subject: [PATCH 24/45] Add a brief introduction --- README.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index b51479ee..21c96cb8 100644 --- a/README.md +++ b/README.md @@ -39,7 +39,8 @@ ## Summary of Purpose -The Casper Sidecar application runs in tandem with the node process, and its primary purpose is to: +The Casper Sidecar is an application running in tandem with the node process. It allows subscribers to monitor a node's event stream, query stored events, and query the node's JSON RPC API, thus receiving faster responses and reducing the load placed on the node. Its primary purpose is to: + * Offload the node from broadcasting SSE events to multiple clients. * Provide client features that aren't part of the nodes' functionality, nor should they be. From 58dfb905c1eb5d26997410b1d5906fc1704efbab Mon Sep 17 00:00:00 2001 From: ipopescu Date: Tue, 4 Jun 2024 23:33:07 +0200 Subject: [PATCH 25/45] Minor error fix --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 21c96cb8..e10b64d8 100644 --- a/README.md +++ b/README.md @@ -111,7 +111,7 @@ The SSE Listener processes events in this order: 2. Store the event. 3. Publish the event to the SSE API. -Casper nodes offer an event stream API that returns server-sent events (SSEs) with JSON-encoded data. The Sidecar reads the event stream of all connected nodes, acting as a passthrough and replicating the SSE interface of the connected nodes. The Sidecar can: +Casper nodes offer an event stream API that returns server-sent events (SSEs) with JSON-encoded data. The Sidecar reads the event stream of all connected nodes, acting as a passthrough and replicating the SSE interface of the connected nodes. The Sidecar can: * Republish the current events from the node to clients listening to Sidecar's SSE API. @@ -122,7 +122,7 @@ Enabling and configuring the SSE Server of the Sidecar is optional. ### The REST API server -The Sidecar offers an optional REST API that allows clients to query the events stored in external storage. Node operators can discover the specific endpoints of the REST API using [OpenAPI] (#openapi-specification) and [Swagger] (#swagger-documentation). The [usage instructions](USAGE.md) provide more details. +The Sidecar offers an optional REST API that allows clients to query the events stored in external storage. You can discover the specific endpoints of the REST API using [OpenAPI](#openapi-specification) and [Swagger](#swagger-documentation). The [usage instructions](USAGE.md) provide more details. ```mermaid graph LR; From c1af7cfd7480502ad8d7a38e3177d9f12b528fe9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rafa=C5=82=20Chabowski?= Date: Wed, 5 Jun 2024 14:41:17 +0200 Subject: [PATCH 26/45] Temporarily point to forked node repo with necessary changes --- Cargo.lock | 2 ++ Cargo.toml | 10 +++------- 2 files changed, 5 insertions(+), 7 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index e0d6eb04..eee9cdc7 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -468,6 +468,7 @@ dependencies = [ [[package]] name = "casper-binary-port" version = "1.0.0" +source = "git+https://github.com/rafal-ch/casper-node.git?branch=binary_port_fixes#caae87b0473987f31d965ca8e59cc3cac9b79ff2" dependencies = [ "bincode", "bytes", @@ -669,6 +670,7 @@ dependencies = [ [[package]] name = "casper-types" version = "5.0.0" +source = "git+https://github.com/rafal-ch/casper-node.git?branch=binary_port_fixes#caae87b0473987f31d965ca8e59cc3cac9b79ff2" dependencies = [ "base16", "base64 0.13.1", diff --git a/Cargo.toml b/Cargo.toml index cf583ec0..3133d674 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -14,8 +14,8 @@ members = [ anyhow = "1" async-stream = "0.3.4" async-trait = "0.1.77" -casper-types = { git = "https://github.com/casper-network/casper-node.git", branch = "feat-2.0" } -casper-binary-port = { git = "https://github.com/casper-network/casper-node.git", branch = "feat-2.0" } +casper-types = { git = "https://github.com/rafal-ch/casper-node.git", branch = "binary_port_fixes" } +casper-binary-port = { git = "https://github.com/rafal-ch/casper-node.git", branch = "binary_port_fixes" } casper-event-sidecar = { path = "./event_sidecar", version = "1.0.0" } casper-event-types = { path = "./types", version = "1.0.0" } casper-rpc-sidecar = { path = "./rpc_sidecar", version = "1.0.0" } @@ -30,8 +30,4 @@ tokio = "1.23.1" toml = "0.5.8" tracing = { version = "0", default-features = false } tracing-subscriber = "0" -serde = { version = "1", default-features = false } - -[patch.'https://github.com/casper-network/casper-node.git'] -casper-binary-port = { path = "../casper-node/binary_port" } -casper-types = { path = "../casper-node/types" } \ No newline at end of file +serde = { version = "1", default-features = false } \ No newline at end of file From e5de10a75dfa734c97985dab12244101499dccff Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rafa=C5=82=20Chabowski?= Date: Thu, 6 Jun 2024 15:44:14 +0200 Subject: [PATCH 27/45] Promote binary port error from `u8` to `u16` --- rpc_sidecar/src/node_client.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/rpc_sidecar/src/node_client.rs b/rpc_sidecar/src/node_client.rs index 185ba00a..dfdd57c6 100644 --- a/rpc_sidecar/src/node_client.rs +++ b/rpc_sidecar/src/node_client.rs @@ -515,11 +515,11 @@ pub enum Error { #[error("received a response with an unsupported protocol version: {0}")] UnsupportedProtocolVersion(ProtocolVersion), #[error("received an unexpected node error: {message} ({code})")] - UnexpectedNodeError { message: String, code: u8 }, + UnexpectedNodeError { message: String, code: u16 }, } impl Error { - fn from_error_code(code: u8) -> Self { + fn from_error_code(code: u16) -> Self { match ErrorCode::try_from(code) { Ok(ErrorCode::FunctionDisabled) => Self::FunctionIsDisabled, Ok(ErrorCode::RootNotFound) => Self::UnknownStateRootHash, From 7a72933de02ae0d4410254a626ca87efbc37f571 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rafa=C5=82=20Chabowski?= Date: Thu, 6 Jun 2024 16:49:07 +0200 Subject: [PATCH 28/45] Update `casper-binary-port` and `casper-types` dependencies --- Cargo.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index eee9cdc7..9e5280b2 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -468,7 +468,7 @@ dependencies = [ [[package]] name = "casper-binary-port" version = "1.0.0" -source = "git+https://github.com/rafal-ch/casper-node.git?branch=binary_port_fixes#caae87b0473987f31d965ca8e59cc3cac9b79ff2" +source = "git+https://github.com/rafal-ch/casper-node.git?branch=binary_port_fixes#6b3041c49f177d97dd338c450f98f65f640cc34a" dependencies = [ "bincode", "bytes", @@ -670,7 +670,7 @@ dependencies = [ [[package]] name = "casper-types" version = "5.0.0" -source = "git+https://github.com/rafal-ch/casper-node.git?branch=binary_port_fixes#caae87b0473987f31d965ca8e59cc3cac9b79ff2" +source = "git+https://github.com/rafal-ch/casper-node.git?branch=binary_port_fixes#6b3041c49f177d97dd338c450f98f65f640cc34a" dependencies = [ "base16", "base64 0.13.1", From a2801e82e117a6b57ada2221fb3bce24136c7612 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rafa=C5=82=20Chabowski?= Date: Thu, 6 Jun 2024 17:02:12 +0200 Subject: [PATCH 29/45] Satisfy clippy --- rpc_sidecar/src/node_client.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/rpc_sidecar/src/node_client.rs b/rpc_sidecar/src/node_client.rs index dfdd57c6..e48db132 100644 --- a/rpc_sidecar/src/node_client.rs +++ b/rpc_sidecar/src/node_client.rs @@ -1149,13 +1149,13 @@ mod tests { let port = get_port(); let config = NodeClientConfig::new_with_port(port); let shutdown = Arc::new(tokio::sync::Notify::new()); - let _ = start_mock_binary_port(port, vec![], 1, Arc::clone(&shutdown)).await; + let _mock_server_handle = start_mock_binary_port(port, vec![], 1, Arc::clone(&shutdown)).await; let (c, _) = FramedNodeClient::new(config).await.unwrap(); let generated_ids: Vec<_> = (INITIAL_REQUEST_ID..INITIAL_REQUEST_ID + 10) .map(|_| { let (_, binary_message) = c.generate_payload(get_dummy_request()); - let header = BinaryRequestHeader::from_bytes(&binary_message.payload()) + let header = BinaryRequestHeader::from_bytes(binary_message.payload()) .unwrap() .0; header.id() From 598593c2f9ca7f7c538546c0ddd4331b61e01be1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rafa=C5=82=20Chabowski?= Date: Thu, 6 Jun 2024 17:15:41 +0200 Subject: [PATCH 30/45] Update formatting --- rpc_sidecar/src/node_client.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/rpc_sidecar/src/node_client.rs b/rpc_sidecar/src/node_client.rs index e48db132..72022394 100644 --- a/rpc_sidecar/src/node_client.rs +++ b/rpc_sidecar/src/node_client.rs @@ -1149,7 +1149,8 @@ mod tests { let port = get_port(); let config = NodeClientConfig::new_with_port(port); let shutdown = Arc::new(tokio::sync::Notify::new()); - let _mock_server_handle = start_mock_binary_port(port, vec![], 1, Arc::clone(&shutdown)).await; + let _mock_server_handle = + start_mock_binary_port(port, vec![], 1, Arc::clone(&shutdown)).await; let (c, _) = FramedNodeClient::new(config).await.unwrap(); let generated_ids: Vec<_> = (INITIAL_REQUEST_ID..INITIAL_REQUEST_ID + 10) From 5dd23af92487c0ceda14322165e56a6e900d781d Mon Sep 17 00:00:00 2001 From: ipopescu Date: Fri, 7 Jun 2024 13:02:50 +0200 Subject: [PATCH 31/45] Review feedback incl. updated highlighting --- README.md | 36 ++++++++++++++-------------- USAGE.md | 53 ++++++++++++++++++++--------------------- resources/ETC_README.md | 2 +- rpc_sidecar/README.md | 2 +- 4 files changed, 46 insertions(+), 47 deletions(-) diff --git a/README.md b/README.md index e10b64d8..aef4af3f 100644 --- a/README.md +++ b/README.md @@ -52,6 +52,7 @@ The Casper Sidecar provides the following functionalities: * A server-sent events (SSE) server with an `/events` endpoint that streams all the events received from all connected nodes. The Sidecar also stores these events. * A REST API server that allows clients to query stored events. * A JSON RPC bridge between end users and a Casper node's binary port. +* Legacy emulation for clients using older versions of the SSE API. The Sidecar has the following components and external dependencies: @@ -177,7 +178,6 @@ The Sidecar also offers an RPC JSON API server that can be enabled and configure ## Configuring the Sidecar - The Sidecar service must be configured using a `.toml` file specified at runtime. This repository contains several sample configuration files that can be used as examples and adjusted according to your scenario: @@ -192,7 +192,7 @@ Once you create the configuration file and are ready to run the Sidecar service, Here is an example configuration for the RPC API server: -``` +```toml [rpc_server.main_server] enable_server = true address = '0.0.0.0:7777' @@ -251,7 +251,7 @@ max_attempts = 30 The Sidecar SSE server is used to connect to Casper nodes, listen to events from them, store them locally and re-broadcast them to clients. Here is a sample configuration for the SSE server: -``` +```toml [sse_server] enable_server = true emulate_legacy_sse_apis = ["V1"] @@ -272,7 +272,7 @@ The Sidecar's SSE component can connect to Casper nodes' SSE endpoints with vers The `node_connections` option configures the node (or multiple nodes) to which the Sidecar will connect and the parameters under which it will operate with that node. Connecting to multiple nodes requires multiple `[[sse_server.connections]]` sections. -``` +```toml [sse_server] enable_server = true @@ -328,7 +328,7 @@ sleep_between_keep_alive_checks_in_seconds = 30 Applications using version 1 of a Casper node's event stream server can still function using an emulated V1 SSE API for a limited time. Enabling the V1 SSE API emulation requires the `emulate_legacy_sse_apis` setting to be `["V1"]`: -``` +```toml [sse_server] enable_server = true emulate_legacy_sse_apis = ["V1"] @@ -345,7 +345,7 @@ See the [Legacy SSE Emulation](./LEGACY_SSE_EMULATION.md) page for more details. To configure the Sidecar's event stream server, specify the following settings: -``` +```toml [sse_server.event_stream_server] port = 19999 max_concurrent_subscribers = 100 @@ -360,7 +360,7 @@ event_stream_buffer_length = 5000 The following section determines outbound connection criteria for the Sidecar's REST server. -``` +```toml [rest_api_server] enable_server = true port = 18888 @@ -379,7 +379,7 @@ request_timeout_in_seconds = 10 This directory stores the SSE cache and an SQLite database if the Sidecar was configured to use SQLite. -``` +```toml [storage] storage_path = "./target/storage" ``` @@ -392,7 +392,7 @@ The Sidecar can connect to different types of databases. The current options are This section includes configurations for the SQLite database. -``` +```toml [storage.sqlite_config] file_name = "sqlite_database.db3" max_connections_in_pool = 100 @@ -445,7 +445,7 @@ However, DB connectivity can also be configured using the Sidecar configuration It is possible to completely omit the PostgreSQL configuration from the Sidecar's configuration file. In this case, the Sidecar will attempt to connect to the PostgreSQL using the database environment variables or use some default values for non-critical variables. -``` +```toml [storage.postgresql_config] database_name = "event_sidecar" host = "localhost" @@ -458,7 +458,7 @@ max_connections_in_pool = 30 This optional section configures the Sidecar's administrative server. If this section is not specified, the Sidecar will not start an admin server. -``` +```toml [admin_api_server] enable_server = true port = 18887 @@ -489,7 +489,7 @@ To compile, test, and run the Sidecar, install the following software first: After creating the configuration file, run the Sidecar using `cargo` and point to the configuration file using the `--path-to-config` option, as shown below. The command needs to run with `root` privileges. -```shell +```sh sudo cargo run -- --path-to-config ./resources/example_configs/EXAMPLE_NODE_CONFIG.toml ``` @@ -497,7 +497,7 @@ The Sidecar application leverages tracing, which can be controlled by setting th The following command will run the Sidecar application with the `INFO` log level. -``` +```sh RUST_LOG=info cargo run -p casper-sidecar -- --path-to-config ./resources/example_configs/EXAMPLE_NCTL_CONFIG.toml ``` @@ -515,13 +515,13 @@ Further details about log levels can be found [here](https://docs.rs/env_logger/ You can run the unit and integration tests included in this repository with the following command: -``` +```sh cargo test ``` You can also run the performance tests using this command: -``` +```sh cargo test -- --include-ignored ``` @@ -569,7 +569,7 @@ curl http://SIDECAR_URL:SIDECAR_ADMIN_PORT/metrics **Sample output**: -``` +```sh # HELP node_statuses Current status of node to which the Sidecar is connected. Numbers mean: 0 - preparing; 1 - connecting; 2 - connected; 3 - reconnecting; -1 - connections_exhausted -> used up all connection attempts ; -2 - incompatible -> node is in an incompatible version # TYPE node_statuses gauge node_statuses{node="35.180.42.211:9999"} 2 @@ -591,7 +591,7 @@ In the above `node_statuses`, you can see which nodes are connecting, which are To diagnose errors, look for `error` logs and check the `error_counts` on the metrics page, `http://SIDECAR_URL:SIDECAR_ADMIN_PORT/metrics`, where most of the errors related to data flow will be stored: -``` +```sh # HELP error_counts Error counts # TYPE error_counts counter error_counts{category="connection_manager",description="fetching_from_stream_failed"} 6 @@ -601,7 +601,7 @@ error_counts{category="connection_manager",description="fetching_from_stream_fai To monitor the Sidecar's memory consumption, observe the metrics page, `http://SIDECAR_URL:SIDECAR_ADMIN_PORT/metrics`. Search for `process_resident_memory_bytes`: -``` +```sh # HELP process_resident_memory_bytes Resident memory size in bytes. # TYPE process_resident_memory_bytes gauge process_resident_memory_bytes 292110336 diff --git a/USAGE.md b/USAGE.md index e56ed605..896d3d63 100644 --- a/USAGE.md +++ b/USAGE.md @@ -26,7 +26,7 @@ It is possible to monitor the Sidecar event stream using *cURL*, depending on ho The Sidecar can connect to Casper nodes with versions greater or equal to `2.0.0`. -```json +```sh curl -s http:///events ``` @@ -35,15 +35,15 @@ curl -s http:///events Given this [example configuration](./resources/example_configs/EXAMPLE_NODE_CONFIG.toml), here are the commands for each endpoint: - ```json - curl -sN http://127.0.0.1:19999/events - ``` +```sh +curl -sN http://127.0.0.1:19999/events +``` Also, the Sidecar exposes an endpoint for Sidecar-generated events: - ```json - curl -sN http://127.0.0.1:19999/events/sidecar - ``` +```sh +curl -sN http://127.0.0.1:19999/events/sidecar +``` ### Node events versioning @@ -53,7 +53,7 @@ If the node goes offline, the `ApiVersion` may differ when it restarts (i.e., in Here is an example of what the API version would look like while listening on the Sidecar’s event stream. The colons represent "keep-alive" messages. -``` +```sh curl -sN http://127.0.0.1:19999/events data:{"ApiVersion":"2.0.0"} @@ -74,7 +74,7 @@ id:21821471 When a client connects to the `events/sidecar` endpoint, it will receive a message containing the version of the Sidecar software. Release version `1.1.0` would look like this: -``` +```sh curl -sN http://127.0.0.1:19999/events/sidecar data:{"SidecarVersion":"1.1.0"} @@ -82,7 +82,6 @@ data:{"SidecarVersion":"1.1.0"} : : - ``` Note that the SidecarVersion differs from the APIVersion emitted by the node event streams. You will also see the keep-alive messages as colons, ensuring the connection is active. @@ -95,7 +94,7 @@ The Sidecar does not expose Shutdown events via its REST API. Here is an example of how the stream might look like if the node went offline for an upgrade and came back online after a Shutdown event with a new `ApiVersion`: -``` +```sh curl -sN http://127.0.0.1:19999/events data:{"ApiVersion":"2.0.0"} @@ -122,7 +121,6 @@ id:3 : : - ``` Note that the Sidecar can emit another type of shutdown event on the `events/sidecar` endpoint, as described below. @@ -133,7 +131,7 @@ If the Sidecar attempts to connect to a node that does not come back online with The message structure of the Sidecar shutdown event is the same as the [node shutdown event](#the-node-shutdown-event). The Sidecar event stream would look like this: -``` +```sh curl -sN http://127.0.0.1:19999/events/sidecar data:{"SidecarVersion":"1.1.0"} @@ -160,7 +158,7 @@ The path URL is `/block`. Example: -```json +```sh curl -s http://127.0.0.1:18888/block ``` @@ -182,7 +180,7 @@ The path URL is `/block/`. Enter a valid block hash. Example: -```json +```sh curl -s http://127.0.0.1:18888/block/bd2e0c36150a74f50d9884e38a0955f8b1cba94821b9828c5f54d8929d6151bc ``` @@ -203,7 +201,7 @@ The path URL is `/block/`. Enter a valid number represe Example: -```json +```sh curl -s http://127.0.0.1:18888/block/336460 ``` @@ -226,7 +224,7 @@ The output differs depending on the transaction's status, which changes over tim Example: -```json +```sh curl -s http://127.0.0.1:18888//transaction/version1/3141e85f8075c3a75c2a1abcc79810c07d103ff97c03200ab0d0baf91995fe4a ``` @@ -236,7 +234,8 @@ The sample output below is for a transaction that was accepted but has yet to be Transaction accepted but not processed yet ```json -{"transaction_hash": "3141e85f8075c3a75c2a1abcc79810c07d103ff97c03200ab0d0baf91995fe4a","transaction_accepted": {"header": {"api_version": "2.0.0","network_name": "casper-net-1"},"payload": {"transaction": {"Version1": {"hash": "3141e85f8075c3a75c2a1abcc79810c07d103ff97c03200ab0d0baf91995fe4a","header": {"chain_name": "casper-net-1","timestamp": "2024-03-20T13:31:59.772Z","ttl": "30m","body_hash": "40c7476a175fb97656ec6da1ace2f1900a9d353f1637943a30edd5385494b345","pricing_mode": {"Fixed": {"gas_price_tolerance": 1000}},"initiator_addr": {"PublicKey": "01d848e225db95e34328ca1c64d73ecda50f5070fd6b21037453e532d085a81973"}},"body": {"args": [],"target": {"Session": {"kind": "Standard","module_bytes":"","runtime": "VmCasperV1"}},"entry_point": {"Custom": "test"},"scheduling": "Standard"},"approvals": [{"signer": "01d848e225db95e34328ca1c64d73ecda50f5070fd6b21037453e532d085a81973","signature": "0154fd295f5d4d62544f63d70470de28b2bf2cddecac2a237b6a2a78d25ee14b21ea2861d711a51f57b3f9f74e247a8d26861eceead6569f233949864a9d5fa100"}]}}}},"transaction_processed": ,"transaction_expired": false}``` +{"transaction_hash": "3141e85f8075c3a75c2a1abcc79810c07d103ff97c03200ab0d0baf91995fe4a","transaction_accepted": {"header": {"api_version": "2.0.0","network_name": "casper-net-1"},"payload": {"transaction": {"Version1": {"hash": "3141e85f8075c3a75c2a1abcc79810c07d103ff97c03200ab0d0baf91995fe4a","header": {"chain_name": "casper-net-1","timestamp": "2024-03-20T13:31:59.772Z","ttl": "30m","body_hash": "40c7476a175fb97656ec6da1ace2f1900a9d353f1637943a30edd5385494b345","pricing_mode": {"Fixed": {"gas_price_tolerance": 1000}},"initiator_addr": {"PublicKey": "01d848e225db95e34328ca1c64d73ecda50f5070fd6b21037453e532d085a81973"}},"body": {"args": [],"target": {"Session": {"kind": "Standard","module_bytes":"","runtime": "VmCasperV1"}},"entry_point": {"Custom": "test"},"scheduling": "Standard"},"approvals": [{"signer": "01d848e225db95e34328ca1c64d73ecda50f5070fd6b21037453e532d085a81973","signature": "0154fd295f5d4d62544f63d70470de28b2bf2cddecac2a237b6a2a78d25ee14b21ea2861d711a51f57b3f9f74e247a8d26861eceead6569f233949864a9d5fa100"}]}}}},"transaction_processed": ,"transaction_expired": false} +```

@@ -260,7 +259,7 @@ The path URL is `/transaction/accepted///transaction/expired///transaction/expired/version1/`. E Example: -```json +```sh curl -s http://127.0.0.1:18888/transaction/processed/version1/8204af872d7d19ef8da947bce67c7a55449bc4e2aa12d2756e9ec7472b4854f7 ``` @@ -322,7 +321,7 @@ The path URL is `/faults/`. Enter a valid hexadecimal rep Example: -```json +```sh curl -s http://127.0.0.1:18888/faults/01a601840126a0363a6048bfcbb0492ab5a313a1a19dc4c695650d8f3b51302703 ``` @@ -333,7 +332,7 @@ The path URL is: `/faults/`. Enter an era identifier. Example: -```json +```sh curl -s http://127.0.0.1:18888/faults/2304 ``` @@ -345,7 +344,7 @@ The path URL is: `/signatures/`. Enter a valid block hash Example: -```json +```sh curl -s http://127.0.0.1:18888/signatures/85aa2a939bc3a4afc6d953c965bab333bb5e53185b96bb07b52c295164046da2 ``` @@ -357,7 +356,7 @@ The path URL is: `/step/`. Enter a valid era identifier. Example: -```json +```sh curl -s http://127.0.0.1:18888/step/7268 ``` @@ -367,7 +366,7 @@ If no filter URL was specified after the root address (HOST:PORT), an error mess Example: -```json +```sh curl http://127.0.0.1:18888 {"code":400,"message":"Invalid request path provided"} ``` @@ -378,7 +377,7 @@ If an invalid filter was specified, an error message will be returned. Example: -```json +```sh curl http://127.0.0.1:18888/other {"code":400,"message":"Invalid request path provided"} ``` diff --git a/resources/ETC_README.md b/resources/ETC_README.md index 2e5d1020..92a49c33 100644 --- a/resources/ETC_README.md +++ b/resources/ETC_README.md @@ -19,7 +19,7 @@ For more information, including how to setup the SSE, RPC, REST, and Admin serve This directory stores the SSE cache and a database if the Sidecar was configured to use one. -``` +```toml [storage] storage_path = "/var/lib/casper-sidecar" ``` diff --git a/rpc_sidecar/README.md b/rpc_sidecar/README.md index fda4d167..423d4c93 100644 --- a/rpc_sidecar/README.md +++ b/rpc_sidecar/README.md @@ -24,7 +24,7 @@ The Sidecar maintains a TCP connection with the node and communicates using a cu Once setup and running as described [here](../README.md), the Sidecar can be queried for its JSON-RPC API using the `rpc.discover` method, as shown below. The result will be a list of RPC methods and their parameters. -```bash +```sh curl -X POST http://localhost:/rpc -H 'Content-Type: application/json' -d '{"jsonrpc": "2.0", "method": "rpc.discover", "id": 1}' ``` From 7346fe3e8cbc931642141802e1867cbfd864d7c8 Mon Sep 17 00:00:00 2001 From: ipopescu Date: Fri, 7 Jun 2024 13:03:12 +0200 Subject: [PATCH 32/45] casper-json-rpc updated highlighting and cleanup --- json_rpc/README.md | 30 ++++++++++++++---------------- 1 file changed, 14 insertions(+), 16 deletions(-) diff --git a/json_rpc/README.md b/json_rpc/README.md index 9b16ca2d..b0c5cc91 100644 --- a/json_rpc/README.md +++ b/json_rpc/README.md @@ -1,4 +1,4 @@ -# `casper-json-rpc` +# The `casper-json-rpc` Library [![LOGO](https://raw.githubusercontent.com/casper-network/casper-node/master/images/casper-association-logo-primary.svg)](https://casper.network/) @@ -7,16 +7,15 @@ [![Documentation](https://docs.rs/casper-node/badge.svg)](https://docs.rs/casper-json-rpc) [![License](https://img.shields.io/badge/license-Apache-blue)](https://github.com/casper-network/casper-node/blob/master/LICENSE) -A library suitable for use as the framework for a JSON-RPC server. +The `casper-json-rpc` library described here can be used as the framework for a JSON-RPC server. # Usage -Normally usage will involve two steps: - * construct a set of request handlers using a - [`RequestHandlersBuilder`](https://docs.rs/casper-json-rpc/latest/casper_json_rpc/struct.RequestHandlersBuilder.html) - * call [`casper_json_rpc::route`](https://docs.rs/casper-json-rpc/latest/casper_json_rpc/fn.route.html) to construct a - boxed warp filter ready to be passed to [`warp::service`](https://docs.rs/warp/latest/warp/fn.service.html) for - example +Typical usage of this library involves two steps: + +* Construct a set of request handlers using a +[`RequestHandlersBuilder`](https://docs.rs/casper-json-rpc/latest/casper_json_rpc/struct.RequestHandlersBuilder.html). +* Call [`casper_json_rpc::route`](https://docs.rs/casper-json-rpc/latest/casper_json_rpc/fn.route.html) to construct a boxed warp filter ready to be passed to [`warp::service`](https://docs.rs/warp/latest/warp/fn.service.html). # Example @@ -61,15 +60,15 @@ async fn main() { } ``` -If this receives a request such as +The following is a sample request: -``` +```sh curl -X POST -H 'Content-Type: application/json' -d '{"jsonrpc":"2.0","id":"id","method":"get"}' http://127.0.0.1:3030/rpc ``` -then the server will respond with +Here is a sample response: -```json +```sh {"jsonrpc":"2.0","id":"id","result":"got it"} ``` @@ -77,13 +76,12 @@ then the server will respond with To return a JSON-RPC response indicating an error, use [`Error::new`](https://docs.rs/casper-json-rpc/latest/casper_json_rpc/struct.Error.html#method.new). Most error -conditions which require returning a reserved error are already handled in the provided warp filters. The only +conditions that require returning a reserved error are already handled in the provided warp filters. The only exception is -[`ReservedErrorCode::InvalidParams`](https://docs.rs/casper-json-rpc/latest/casper_json_rpc/enum.ReservedErrorCode.html#variant.InvalidParams) -which should be returned by any RPC handler which deems the provided `params: Option` to be invalid for any +[`ReservedErrorCode::InvalidParams`](https://docs.rs/casper-json-rpc/latest/casper_json_rpc/enum.ReservedErrorCode.html#variant.InvalidParams), which should be returned by any RPC handler that deems the provided `params: Option` to be invalid for any reason. -Generally a set of custom error codes should be provided. These should all implement +Generally, a set of custom error codes should be provided. These should all implement [`ErrorCodeT`](https://docs.rs/casper-json-rpc/latest/casper_json_rpc/trait.ErrorCodeT.html). ## Example custom error code From 4aa3748123fa0505443e2dff4462986f66b3601c Mon Sep 17 00:00:00 2001 From: ipopescu Date: Fri, 7 Jun 2024 13:30:22 +0200 Subject: [PATCH 33/45] Review feedback --- LEGACY_SSE_EMULATION.md | 10 ++-------- 1 file changed, 2 insertions(+), 8 deletions(-) diff --git a/LEGACY_SSE_EMULATION.md b/LEGACY_SSE_EMULATION.md index 642e8e2f..51144c95 100644 --- a/LEGACY_SSE_EMULATION.md +++ b/LEGACY_SSE_EMULATION.md @@ -198,8 +198,6 @@ When the 2.x event stream emits a legacy `BlockAdded` event, the following mappi Here is an example mapping demonstrating the rules above: - - -Version1 events will be omitted from legacy SSE event streams. For example, the following event will not be streamed. +All Version1 variants will be omitted from legacy SSE streams. For example, the following Version1 `TransactionAccepted` event will not be streamed: ```json "TransactionAccepted": { @@ -601,7 +596,6 @@ When translating `Write` transforms from Version2 to Version1, the following rul - `Account`: will be copied to the `WriteAccount` transform, assigning the Version2 `account_hash` as the value for `WriteAccount`. - `ContractWasm`: a `WriteContractWasm` transform will be created. Please note that the `WriteContractWasm` will not contain data, so the Version2 details will be omitted. - `Contract`: a `WriteContract` transform will be created. Please note that the `WriteContract` will not contain data, so the Version2 details will be omitted. - - `ContractPackage`: a `WriteContractPackage` transform will be created. Please note that the `WriteContractPackage` will not contain data, so the Version2 details will be omitted. - `LegacyTransfer`: a `WriteTransfer` transform will be created. Data will be copied. - `DeployInfo`: a `WriteDeployInfo` transform will be created. Data will be copied. From 74c42b08d58b582b372d921fbd9e997d92a10ad6 Mon Sep 17 00:00:00 2001 From: ipopescu Date: Fri, 7 Jun 2024 14:12:05 +0200 Subject: [PATCH 34/45] Review feedback - remove HTML --- LEGACY_SSE_EMULATION.md | 618 +++++++++++++++++++--------------------- 1 file changed, 300 insertions(+), 318 deletions(-) diff --git a/LEGACY_SSE_EMULATION.md b/LEGACY_SSE_EMULATION.md index 51144c95..9cd94933 100644 --- a/LEGACY_SSE_EMULATION.md +++ b/LEGACY_SSE_EMULATION.md @@ -1,6 +1,5 @@ # The Legacy SSE Emulation - Casper node versions 2.0 or greater (2.x) produce different SSE events than 1.x versions. Also, 1.x Casper nodes used 3 SSE endpoints (`/events/sigs`, `/events/deploys`, `/events/main`), while 2.x nodes expose all the SSE events on one endpoint (`/events`). Generally, the changes in 2.x regarding SSE are somewhat backward-incompatible. To collect all the data, clients should adopt the new SSE API. However, if some clients are not ready or do not need to adopt the new SSE API, they can use the legacy SSE emulation. @@ -22,7 +21,7 @@ Before enabling the legacy SSE emulation, consider its limitations: To enable the legacy SSE emulation, set the `emulate_legacy_sse_apis` setting to `["V1"]`. Currently, this is the only possible value: -``` +```toml [sse_server] (...) emulate_legacy_sse_apis = ["V1"] @@ -41,89 +40,30 @@ Those endpoints will emit events in the same format as the legacy SSE API of the There are limitations to what the Casper Sidecar can and will do. Below, you will find a list of mapping assumptions between 2.x events and 1.x events. -### The `ApiVersion` event +- [`ApiVersion` events](#the-apiversion-event) +- [`BlockAdded` events](#the-blockadded-event) +- [`TransactionAccepted` events](#the-transactionaccepted-event) +- [`TransactionExpired` events](#the-transactionexpired-event) +- [`TransactionProcessed` events](#the-transactionprocessed-event) + +### `ApiVersion` events The legacy SSE ApiVersion event is the same as the current version. -### The `BlockAdded` event +### `BlockAdded` events The Sidecar can emit a legacy `BlockAdded` event by unwrapping the 2.x event structure and creating a 1.x emulated event structure. A Version1 `BlockAdded` event will be unwrapped and passed as a legacy `BlockAdded` event as shown below. -
-Version1 BlockAdded in 2.x - - ```json - { - "BlockAdded": { - "block_hash": "d59359690ca5a251b513185da0767f744e77645adec82bb6ff785a89edc7591c", - "block": { - "Version1": { - "hash": "d59359690ca5a251b513185da0767f744e77645adec82bb6ff785a89edc7591c", - "header": { - "parent_hash": "90ca56a697f8b1b19cba08c642fd7f04669b8cd49bb9d652fca989f8a9f8bcea", - "state_root_hash": "9cce223fdbeab41dbbcf0b62f3fd857373131378d51776de26bb9f4fefe1e849", - "body_hash": "5f37be399c15b2394af48243ce10a62a7d12769dc5f7740b18ad3bf55bde5271", - "random_bit": true, - "accumulated_seed": "b3e1930565a80a874a443eaadefa1a340927fb8b347729bbd93e93935a47a9e4", - "era_end": { - "era_report": { - "equivocators": [ - "0203c9da857cfeccf001ce00720ae2e0d083629858b60ac05dd285ce0edae55f0c8e", - "02026fb7b629a2ec0132505cdf036f6ffb946d03a1c9b5da57245af522b842f145be" - ], - "rewards": [ - { - "validator": "01235b932586ae5cc3135f7a0dc723185b87e5bd3ae0ac126a92c14468e976ff25", - "amount": 129457537 - } - ], - "inactive_validators": [] - }, - "next_era_validator_weights": [ - { - "validator": "0198957673ad060503e2ec7d98dc71af6f90ad1f854fe18025e3e7d0d1bbe5e32b", - "weight": "1" - }, - { - "validator": "02022d6bc4e3012cc4ae467b5525111cf7ed65883b05a1d924f1e654c64fad3a027c", - "weight": "2" - } - ] - }, - "timestamp": "2024-04-25T20:00:35.640Z", - "era_id": 601701, - "height": 6017012, - "protocol_version": "1.0.0" - }, - "body": { - "proposer": "0203426736da2554ebf1f8ee1d2ce4ab11b1e33419d7dfc1ce2fe1945faf00bacc9e", - "deploy_hashes": [ - "06950e4374dc88685634ec30bcddd68e6b46c109ccf6d29e2dfcf5367df75571", - "27a89dd58e6297a5244342b68b117afe2555131b896ad6ed4321edcd4130ae7b" - ], - "transfer_hashes": [ - "3e30b6c1c5dbca9277425846b42dc832cd3d8ce889c38d6bfc8bd95b3e1c403e", - "c990ba47146270655eaacc53d4115cbd980697f3d4e9c76bccfdfce82af6ce08" - ] - } - } - } - } - } - ``` - -
- -
-Emulated 1.x BlockAdded (from Version1) +**Version1 BlockAdded in 2.x:** - ```json - { - "BlockAdded": { - "block_hash": "d59359690ca5a251b513185da0767f744e77645adec82bb6ff785a89edc7591c", - "block": { +```json +{ + "BlockAdded": { + "block_hash": "d59359690ca5a251b513185da0767f744e77645adec82bb6ff785a89edc7591c", + "block": { + "Version1": { "hash": "d59359690ca5a251b513185da0767f744e77645adec82bb6ff785a89edc7591c", "header": { "parent_hash": "90ca56a697f8b1b19cba08c642fd7f04669b8cd49bb9d652fca989f8a9f8bcea", @@ -175,9 +115,68 @@ A Version1 `BlockAdded` event will be unwrapped and passed as a legacy `BlockAdd } } } - ``` -


+} +``` +**Emulated 1.x BlockAdded (from Version1):** + +```json +{ + "BlockAdded": { + "block_hash": "d59359690ca5a251b513185da0767f744e77645adec82bb6ff785a89edc7591c", + "block": { + "hash": "d59359690ca5a251b513185da0767f744e77645adec82bb6ff785a89edc7591c", + "header": { + "parent_hash": "90ca56a697f8b1b19cba08c642fd7f04669b8cd49bb9d652fca989f8a9f8bcea", + "state_root_hash": "9cce223fdbeab41dbbcf0b62f3fd857373131378d51776de26bb9f4fefe1e849", + "body_hash": "5f37be399c15b2394af48243ce10a62a7d12769dc5f7740b18ad3bf55bde5271", + "random_bit": true, + "accumulated_seed": "b3e1930565a80a874a443eaadefa1a340927fb8b347729bbd93e93935a47a9e4", + "era_end": { + "era_report": { + "equivocators": [ + "0203c9da857cfeccf001ce00720ae2e0d083629858b60ac05dd285ce0edae55f0c8e", + "02026fb7b629a2ec0132505cdf036f6ffb946d03a1c9b5da57245af522b842f145be" + ], + "rewards": [ + { + "validator": "01235b932586ae5cc3135f7a0dc723185b87e5bd3ae0ac126a92c14468e976ff25", + "amount": 129457537 + } + ], + "inactive_validators": [] + }, + "next_era_validator_weights": [ + { + "validator": "0198957673ad060503e2ec7d98dc71af6f90ad1f854fe18025e3e7d0d1bbe5e32b", + "weight": "1" + }, + { + "validator": "02022d6bc4e3012cc4ae467b5525111cf7ed65883b05a1d924f1e654c64fad3a027c", + "weight": "2" + } + ] + }, + "timestamp": "2024-04-25T20:00:35.640Z", + "era_id": 601701, + "height": 6017012, + "protocol_version": "1.0.0" + }, + "body": { + "proposer": "0203426736da2554ebf1f8ee1d2ce4ab11b1e33419d7dfc1ce2fe1945faf00bacc9e", + "deploy_hashes": [ + "06950e4374dc88685634ec30bcddd68e6b46c109ccf6d29e2dfcf5367df75571", + "27a89dd58e6297a5244342b68b117afe2555131b896ad6ed4321edcd4130ae7b" + ], + "transfer_hashes": [ + "3e30b6c1c5dbca9277425846b42dc832cd3d8ce889c38d6bfc8bd95b3e1c403e", + "c990ba47146270655eaacc53d4115cbd980697f3d4e9c76bccfdfce82af6ce08" + ] + } + } + } +} +``` When the 2.x event stream emits a legacy `BlockAdded` event, the following mapping rules apply: @@ -198,261 +197,189 @@ When the 2.x event stream emits a legacy `BlockAdded` event, the following mappi Here is an example mapping demonstrating the rules above: -
-Version2 BlockAdded in 2.x - - ```json - { - "BlockAdded": { - "block_hash": "2df9fb8909443fba928ed0536a79780cdb4557d0c05fdf762a1fd61141121422", - "block": { - "Version2": { - "hash": "2df9fb8909443fba928ed0536a79780cdb4557d0c05fdf762a1fd61141121422", - "header": { - "proposer": "01d3eec0445635f136ae560b43e9d8f656a6ba925f01293eaf2610b39ebe0fc28d", - "parent_hash": "b8f5e9afd2e54856aa1656f962d07158f0fdf9cfac0f9992875f31f6bf2623a2", - "state_root_hash": "cbf02d08bb263aa8915507c172b5f590bbddcd68693fb1c71758b5684b011730", - "body_hash": "6041ab862a1e14a43a8e8a9a42dad27091915a337d18060c22bd3fe7b4f39607", - "random_bit": false, - "accumulated_seed": "a0e424710f4fba036ba450b40f2bd7a842b176cf136f3af1952a2a13eb02616c", - "era_end": { - "equivocators": [ - "01cc718e9dea652577bffad3471d0db7d03ba30923780a2a8fd1e3dd9b4e72dc54", - "0203e4532e401326892aa8ebc16b6986bd35a6c96a1f16c28db67fd7e87cb6913817", - "020318a52d5b2d545def8bf0ee5ea7ddea52f1fbf106c8b69848e40c5460e20c9f62" - ], - "inactive_validators": [ - "01cc718e9dea652577bffad3471d0db7d03ba30923780a2a8fd1e3dd9b4e72dc55", - "01cc718e9dea652577bffad3471d0db7d03ba30923780a2a8fd1e3dd9b4e72dc56" - ], - "next_era_validator_weights": [ - { - "validator": "02038b238d774c3c4228a0430e3a078e1a2533f9c87cccbcf695637502d8d6057a63", - "weight": "1" - }, - { - "validator": "0102ffd4d2812d68c928712edd012fbcad54367bc6c5c254db22cf696772856566", - "weight": "2" - } - ], - "rewards": { - "02028b18c949d849b377988ea5191b39340975db25f8b80f37cc829c9f79dbfb19fc": "749546792", - "02028002c063228ff4e9d22d69154c499b86a4f7fdbf1d1e20f168b62da537af64c2": "788342677", - "02038efa405f648c72f36b0e5f37db69ab213d44404591b24de21383d8cc161101ec": "86241635", - "01f6bbd4a6fd10534290c58edb6090723d481cea444a8e8f70458e5136ea8c733c": "941794198" - }, - "next_era_gas_price": 1 - }, - "timestamp": "2024-04-25T20:31:39.895Z", - "era_id": 419571, - "height": 4195710, - "protocol_version": "2.0.0", - "current_gas_price": 1 - }, - "body": { - "transactions": { - "0": [{ - "Deploy": "58aca0009fc41bd045d303db9e9f07416ff1fd8c76ecd98545eedf86f9459e80" - }, - { - "Deploy": "58aca0009fc41bd045d303db9e9f07416ff1fd8c76ecd98545eedf86f9459e81" - }, - { - "Version1": "58aca0009fc41bd045d303db9e9f07416ff1fd8c76ecd98545eedf86f9459e82" - }], - "1": [{ - "Deploy": "58aca0009fc41bd045d303db9e9f07416ff1fd8c76ecd98545eedf86f9459e83" - }, - { - "Deploy": "58aca0009fc41bd045d303db9e9f07416ff1fd8c76ecd98545eedf86f9459e84" - }, - { - "Version1": "58aca0009fc41bd045d303db9e9f07416ff1fd8c76ecd98545eedf86f9459e85" - }], - "2": [{ - "Deploy": "58aca0009fc41bd045d303db9e9f07416ff1fd8c76ecd98545eedf86f9459e86" - }, - { - "Deploy": "58aca0009fc41bd045d303db9e9f07416ff1fd8c76ecd98545eedf86f9459e87" - }, - { - "Version1": "58aca0009fc41bd045d303db9e9f07416ff1fd8c76ecd98545eedf86f9459e88" - }], - "3": [{ - "Deploy": "58aca0009fc41bd045d303db9e9f07416ff1fd8c76ecd98545eedf86f9459e89" - }, - { - "Deploy": "58aca0009fc41bd045d303db9e9f07416ff1fd8c76ecd98545eedf86f9459e90" - }, - { - "Version1": "58aca0009fc41bd045d303db9e9f07416ff1fd8c76ecd98545eedf86f9459e91" - }] - } - "rewarded_signatures": [[240], [0], [0]] - } - } - } - } - } - ``` +**Version2 BlockAdded in 2.x:** -
- -
-Emulated 1.x BlockAdded (from Version2 BlockAdded) - - ```json - { - "BlockAdded": { - "block_hash": "2df9fb8909443fba928ed0536a79780cdb4557d0c05fdf762a1fd61141121422", - "block": { +```json +{ + "BlockAdded": { + "block_hash": "2df9fb8909443fba928ed0536a79780cdb4557d0c05fdf762a1fd61141121422", + "block": { + "Version2": { "hash": "2df9fb8909443fba928ed0536a79780cdb4557d0c05fdf762a1fd61141121422", "header": { + "proposer": "01d3eec0445635f136ae560b43e9d8f656a6ba925f01293eaf2610b39ebe0fc28d", "parent_hash": "b8f5e9afd2e54856aa1656f962d07158f0fdf9cfac0f9992875f31f6bf2623a2", "state_root_hash": "cbf02d08bb263aa8915507c172b5f590bbddcd68693fb1c71758b5684b011730", "body_hash": "6041ab862a1e14a43a8e8a9a42dad27091915a337d18060c22bd3fe7b4f39607", "random_bit": false, "accumulated_seed": "a0e424710f4fba036ba450b40f2bd7a842b176cf136f3af1952a2a13eb02616c", "era_end": { - "era_report": { - "equivocators": [ - "01cc718e9dea652577bffad3471d0db7d03ba30923780a2a8fd1e3dd9b4e72dc54", - "0203e4532e401326892aa8ebc16b6986bd35a6c96a1f16c28db67fd7e87cb6913817", - "020318a52d5b2d545def8bf0ee5ea7ddea52f1fbf106c8b69848e40c5460e20c9f62" - ], - "rewards": [ - { - "validator": "01f6bbd4a6fd10534290c58edb6090723d481cea444a8e8f70458e5136ea8c733c", - "amount": 941794198 - }, - { - "validator": "02028002c063228ff4e9d22d69154c499b86a4f7fdbf1d1e20f168b62da537af64c2", - "amount": 788342677 - }, - { - "validator": "02028b18c949d849b377988ea5191b39340975db25f8b80f37cc829c9f79dbfb19fc", - "amount": 749546792 - }, - { - "validator": "02038efa405f648c72f36b0e5f37db69ab213d44404591b24de21383d8cc161101ec", - "amount": 86241635 - } - ], - "inactive_validators": [ - "01cc718e9dea652577bffad3471d0db7d03ba30923780a2a8fd1e3dd9b4e72dc55", - "01cc718e9dea652577bffad3471d0db7d03ba30923780a2a8fd1e3dd9b4e72dc56" - ] - }, + "equivocators": [ + "01cc718e9dea652577bffad3471d0db7d03ba30923780a2a8fd1e3dd9b4e72dc54", + "0203e4532e401326892aa8ebc16b6986bd35a6c96a1f16c28db67fd7e87cb6913817", + "020318a52d5b2d545def8bf0ee5ea7ddea52f1fbf106c8b69848e40c5460e20c9f62" + ], + "inactive_validators": [ + "01cc718e9dea652577bffad3471d0db7d03ba30923780a2a8fd1e3dd9b4e72dc55", + "01cc718e9dea652577bffad3471d0db7d03ba30923780a2a8fd1e3dd9b4e72dc56" + ], "next_era_validator_weights": [ - { - "validator": "0102ffd4d2812d68c928712edd012fbcad54367bc6c5c254db22cf696772856566", - "weight": "2" - }, { "validator": "02038b238d774c3c4228a0430e3a078e1a2533f9c87cccbcf695637502d8d6057a63", "weight": "1" + }, + { + "validator": "0102ffd4d2812d68c928712edd012fbcad54367bc6c5c254db22cf696772856566", + "weight": "2" } - ] + ], + "rewards": { + "02028b18c949d849b377988ea5191b39340975db25f8b80f37cc829c9f79dbfb19fc": "749546792", + "02028002c063228ff4e9d22d69154c499b86a4f7fdbf1d1e20f168b62da537af64c2": "788342677", + "02038efa405f648c72f36b0e5f37db69ab213d44404591b24de21383d8cc161101ec": "86241635", + "01f6bbd4a6fd10534290c58edb6090723d481cea444a8e8f70458e5136ea8c733c": "941794198" + }, + "next_era_gas_price": 1 }, "timestamp": "2024-04-25T20:31:39.895Z", "era_id": 419571, "height": 4195710, - "protocol_version": "1.0.0" + "protocol_version": "2.0.0", + "current_gas_price": 1 }, "body": { - "proposer": "01d3eec0445635f136ae560b43e9d8f656a6ba925f01293eaf2610b39ebe0fc28d", - "deploy_hashes": [ - "58aca0009fc41bd045d303db9e9f07416ff1fd8c76ecd98545eedf86f9459e89", - "58aca0009fc41bd045d303db9e9f07416ff1fd8c76ecd98545eedf86f9459e90" - ], - "transfer_hashes": [ - "58aca0009fc41bd045d303db9e9f07416ff1fd8c76ecd98545eedf86f9459e80", - "58aca0009fc41bd045d303db9e9f07416ff1fd8c76ecd98545eedf86f9459e81" - ] + "transactions": { + "0": [{ + "Deploy": "58aca0009fc41bd045d303db9e9f07416ff1fd8c76ecd98545eedf86f9459e80" + }, + { + "Deploy": "58aca0009fc41bd045d303db9e9f07416ff1fd8c76ecd98545eedf86f9459e81" + }, + { + "Version1": "58aca0009fc41bd045d303db9e9f07416ff1fd8c76ecd98545eedf86f9459e82" + }], + "1": [{ + "Deploy": "58aca0009fc41bd045d303db9e9f07416ff1fd8c76ecd98545eedf86f9459e83" + }, + { + "Deploy": "58aca0009fc41bd045d303db9e9f07416ff1fd8c76ecd98545eedf86f9459e84" + }, + { + "Version1": "58aca0009fc41bd045d303db9e9f07416ff1fd8c76ecd98545eedf86f9459e85" + }], + "2": [{ + "Deploy": "58aca0009fc41bd045d303db9e9f07416ff1fd8c76ecd98545eedf86f9459e86" + }, + { + "Deploy": "58aca0009fc41bd045d303db9e9f07416ff1fd8c76ecd98545eedf86f9459e87" + }, + { + "Version1": "58aca0009fc41bd045d303db9e9f07416ff1fd8c76ecd98545eedf86f9459e88" + }], + "3": [{ + "Deploy": "58aca0009fc41bd045d303db9e9f07416ff1fd8c76ecd98545eedf86f9459e89" + }, + { + "Deploy": "58aca0009fc41bd045d303db9e9f07416ff1fd8c76ecd98545eedf86f9459e90" + }, + { + "Version1": "58aca0009fc41bd045d303db9e9f07416ff1fd8c76ecd98545eedf86f9459e91" + }] + } + "rewarded_signatures": [[240], [0], [0]] } } } } - ``` - -
- - -### The `TransactionAccepted` event - -Version1 `TransactionAccepted` events will be unwrapped and translated to legacy `DeployAccepted` events on the legacy SSE stream. +} +``` -
-Version1 TransactionAccepted in 2.x +**Emulated 1.x BlockAdded (from Version2 BlockAdded):** - ```json - { - "TransactionAccepted": { - "Deploy": { - "hash": "5a7709969c210db93d3c21bf49f8bf705d7c75a01609f606d04b0211af171d43", - "header": { - "account": "02022c07e061d6e0b43bbaa25717b021c2ddc0f701a223946a0883b57ae842917438", - "timestamp": "2020-08-07T01:28:27.360Z", - "ttl": "4m 22s", - "gas_price": 72, - "body_hash": "aa2a111c086628a161001160756c5884e32fde0356bb85f484a3e55682ad089f", - "dependencies": [], - "chain_name": "casper-example" - }, - "payment": { - "StoredContractByName": { - "name": "casper-example", - "entry_point": "example-entry-point", - "args": [ - [ - "amount", - { - "cl_type": "U512", - "bytes": "0400f90295", - "parsed": "2500000000" - } - ] - ] - } - }, - "session": { - "StoredContractByHash": { - "hash": "dfb621e7012df48fe1d40fd8015b5e2396c477c9587e996678551148a06d3a89", - "entry_point": "8sY9fUUCwoiFZmxKo8kj", - "args": [ - [ - "YbZWtEuL4D6oMTJmUWvj", - { - "cl_type": { - "List": "U8" - }, - "bytes": "5a000000909ffe7807b03a5db0c3c183648710db16d408d8425a4e373fc0422a4efed1ab0040bc08786553fcac4521528c9fafca0b0fb86f4c6e9fb9db7a1454dda8ed612c4ea4c9a6378b230ae1e3c236e37d6ebee94339a56cb4be582a", - "parsed": [144, 159, 254, 120, 7] - } - ] +```json +{ + "BlockAdded": { + "block_hash": "2df9fb8909443fba928ed0536a79780cdb4557d0c05fdf762a1fd61141121422", + "block": { + "hash": "2df9fb8909443fba928ed0536a79780cdb4557d0c05fdf762a1fd61141121422", + "header": { + "parent_hash": "b8f5e9afd2e54856aa1656f962d07158f0fdf9cfac0f9992875f31f6bf2623a2", + "state_root_hash": "cbf02d08bb263aa8915507c172b5f590bbddcd68693fb1c71758b5684b011730", + "body_hash": "6041ab862a1e14a43a8e8a9a42dad27091915a337d18060c22bd3fe7b4f39607", + "random_bit": false, + "accumulated_seed": "a0e424710f4fba036ba450b40f2bd7a842b176cf136f3af1952a2a13eb02616c", + "era_end": { + "era_report": { + "equivocators": [ + "01cc718e9dea652577bffad3471d0db7d03ba30923780a2a8fd1e3dd9b4e72dc54", + "0203e4532e401326892aa8ebc16b6986bd35a6c96a1f16c28db67fd7e87cb6913817", + "020318a52d5b2d545def8bf0ee5ea7ddea52f1fbf106c8b69848e40c5460e20c9f62" + ], + "rewards": [ + { + "validator": "01f6bbd4a6fd10534290c58edb6090723d481cea444a8e8f70458e5136ea8c733c", + "amount": 941794198 + }, + { + "validator": "02028002c063228ff4e9d22d69154c499b86a4f7fdbf1d1e20f168b62da537af64c2", + "amount": 788342677 + }, + { + "validator": "02028b18c949d849b377988ea5191b39340975db25f8b80f37cc829c9f79dbfb19fc", + "amount": 749546792 + }, + { + "validator": "02038efa405f648c72f36b0e5f37db69ab213d44404591b24de21383d8cc161101ec", + "amount": 86241635 + } + ], + "inactive_validators": [ + "01cc718e9dea652577bffad3471d0db7d03ba30923780a2a8fd1e3dd9b4e72dc55", + "01cc718e9dea652577bffad3471d0db7d03ba30923780a2a8fd1e3dd9b4e72dc56" ] - } + }, + "next_era_validator_weights": [ + { + "validator": "0102ffd4d2812d68c928712edd012fbcad54367bc6c5c254db22cf696772856566", + "weight": "2" + }, + { + "validator": "02038b238d774c3c4228a0430e3a078e1a2533f9c87cccbcf695637502d8d6057a63", + "weight": "1" + } + ] }, - "approvals": [ - { - "signer": "02022c07e061d6e0b43bbaa25717b021c2ddc0f701a223946a0883b57ae842917438", - "signature": "025d0a7ba37bebe6774681ca5adecb70fa4eef56821eb344bf0f6867e171a899a87edb2b8bf70f2cb47a1670a6baf2cded1fad535ee53a2f65da91c82ebf30945b" - } + "timestamp": "2024-04-25T20:31:39.895Z", + "era_id": 419571, + "height": 4195710, + "protocol_version": "1.0.0" + }, + "body": { + "proposer": "01d3eec0445635f136ae560b43e9d8f656a6ba925f01293eaf2610b39ebe0fc28d", + "deploy_hashes": [ + "58aca0009fc41bd045d303db9e9f07416ff1fd8c76ecd98545eedf86f9459e89", + "58aca0009fc41bd045d303db9e9f07416ff1fd8c76ecd98545eedf86f9459e90" + ], + "transfer_hashes": [ + "58aca0009fc41bd045d303db9e9f07416ff1fd8c76ecd98545eedf86f9459e80", + "58aca0009fc41bd045d303db9e9f07416ff1fd8c76ecd98545eedf86f9459e81" ] } } } - ``` +} +``` -
+### `TransactionAccepted` events +Version1 `TransactionAccepted` events will be unwrapped and translated to legacy `DeployAccepted` events on the legacy SSE stream. -
-Emulated 1.x DeployAccepted (from Version1) +**Version1 TransactionAccepted in 2.x:** - ```json - { - "DeployAccepted": { +```json +{ + "TransactionAccepted": { + "Deploy": { "hash": "5a7709969c210db93d3c21bf49f8bf705d7c75a01609f606d04b0211af171d43", "header": { "account": "02022c07e061d6e0b43bbaa25717b021c2ddc0f701a223946a0883b57ae842917438", @@ -505,9 +432,67 @@ Version1 `TransactionAccepted` events will be unwrapped and translated to legacy ] } } - ``` +} +``` + +**Emulated 1.x DeployAccepted (from Version1):** -


+```json +{ + "DeployAccepted": { + "hash": "5a7709969c210db93d3c21bf49f8bf705d7c75a01609f606d04b0211af171d43", + "header": { + "account": "02022c07e061d6e0b43bbaa25717b021c2ddc0f701a223946a0883b57ae842917438", + "timestamp": "2020-08-07T01:28:27.360Z", + "ttl": "4m 22s", + "gas_price": 72, + "body_hash": "aa2a111c086628a161001160756c5884e32fde0356bb85f484a3e55682ad089f", + "dependencies": [], + "chain_name": "casper-example" + }, + "payment": { + "StoredContractByName": { + "name": "casper-example", + "entry_point": "example-entry-point", + "args": [ + [ + "amount", + { + "cl_type": "U512", + "bytes": "0400f90295", + "parsed": "2500000000" + } + ] + ] + } + }, + "session": { + "StoredContractByHash": { + "hash": "dfb621e7012df48fe1d40fd8015b5e2396c477c9587e996678551148a06d3a89", + "entry_point": "8sY9fUUCwoiFZmxKo8kj", + "args": [ + [ + "YbZWtEuL4D6oMTJmUWvj", + { + "cl_type": { + "List": "U8" + }, + "bytes": "5a000000909ffe7807b03a5db0c3c183648710db16d408d8425a4e373fc0422a4efed1ab0040bc08786553fcac4521528c9fafca0b0fb86f4c6e9fb9db7a1454dda8ed612c4ea4c9a6378b230ae1e3c236e37d6ebee94339a56cb4be582a", + "parsed": [144, 159, 254, 120, 7] + } + ] + ] + } + }, + "approvals": [ + { + "signer": "02022c07e061d6e0b43bbaa25717b021c2ddc0f701a223946a0883b57ae842917438", + "signature": "025d0a7ba37bebe6774681ca5adecb70fa4eef56821eb344bf0f6867e171a899a87edb2b8bf70f2cb47a1670a6baf2cded1fad535ee53a2f65da91c82ebf30945b" + } + ] + } +} +``` All Version1 variants will be omitted from legacy SSE streams. For example, the following Version1 `TransactionAccepted` event will not be streamed: @@ -517,34 +502,31 @@ All Version1 variants will be omitted from legacy SSE streams. For example, the ... ``` -### The `TransactionExpired` event +### `TransactionExpired` events Other transaction types will be unwrapped and sent as legacy deploy types. A 2.x `TransactionExpired` event will be mapped to a `DeployExpired` event. -
-TransactionExpired mapped to DeployExpired +**TransactionExpired mapped to DeployExpired:** - ```json - { - "TransactionExpired": { - "transaction_hash": { - "Deploy": "565d7147e28be402c34208a133fd59fde7ac785ae5f0298cb5fb7adfb1b054a8" - } +```json +{ + "TransactionExpired": { + "transaction_hash": { + "Deploy": "565d7147e28be402c34208a133fd59fde7ac785ae5f0298cb5fb7adfb1b054a8" } } - ``` +} +``` - ```json - { - "DeployExpired": { - "deploy_hash": "565d7147e28be402c34208a133fd59fde7ac785ae5f0298cb5fb7adfb1b054a8" - } +```json +{ + "DeployExpired": { + "deploy_hash": "565d7147e28be402c34208a133fd59fde7ac785ae5f0298cb5fb7adfb1b054a8" } - ``` - -


+} +``` All Version1 variants will be omitted from legacy SSE streams. For example, the following Version1 `TransactionExpired` event will not be streamed: @@ -558,7 +540,7 @@ All Version1 variants will be omitted from legacy SSE streams. For example, the } ``` -### The `TransactionProcessed` event +### `TransactionProcessed` events When translating a `TransactionProcessed` event to a legacy `DeployProcessed` event, the following rules apply: From 8f8a786113e05ededca4ee4298a2f3e97f1b2ca0 Mon Sep 17 00:00:00 2001 From: jacek-casper <145967538+jacek-casper@users.noreply.github.com> Date: Fri, 7 Jun 2024 18:54:23 +0100 Subject: [PATCH 35/45] Implement the reward endpoint (#321) * Implement a reward endpoint Signed-off-by: Jacek Malec <145967538+jacek-casper@users.noreply.github.com> * Map new errors * Error code update * Update error handling * Make errors more consistent --------- Signed-off-by: Jacek Malec <145967538+jacek-casper@users.noreply.github.com> --- Cargo.lock | 4 +- Cargo.toml | 4 +- resources/test/rpc_schema.json | 129 +++++++++++++++++++++++ rpc_sidecar/src/http_server.rs | 3 +- rpc_sidecar/src/node_client.rs | 38 ++++++- rpc_sidecar/src/rpcs/docs.rs | 8 +- rpc_sidecar/src/rpcs/error.rs | 12 +++ rpc_sidecar/src/rpcs/error_code.rs | 17 ++++ rpc_sidecar/src/rpcs/info.rs | 158 ++++++++++++++++++++++++++++- 9 files changed, 357 insertions(+), 16 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 9afe610a..804eeff4 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -468,7 +468,7 @@ dependencies = [ [[package]] name = "casper-binary-port" version = "1.0.0" -source = "git+https://github.com/casper-network/casper-node.git?branch=feat-2.0#f803ee53db31edd5f7f3c1fa1e0ec0ea59550158" +source = "git+https://github.com/jacek-casper/casper-node.git?branch=reward-binary-request#41aea404afba337a4ef89fef6089a802228e5680" dependencies = [ "bincode", "bytes", @@ -670,7 +670,7 @@ dependencies = [ [[package]] name = "casper-types" version = "5.0.0" -source = "git+https://github.com/casper-network/casper-node.git?branch=feat-2.0#f803ee53db31edd5f7f3c1fa1e0ec0ea59550158" +source = "git+https://github.com/jacek-casper/casper-node.git?branch=reward-binary-request#41aea404afba337a4ef89fef6089a802228e5680" dependencies = [ "base16", "base64 0.13.1", diff --git a/Cargo.toml b/Cargo.toml index 4a8f6c46..bd8c00aa 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -14,8 +14,8 @@ members = [ anyhow = "1" async-stream = "0.3.4" async-trait = "0.1.77" -casper-types = { git = "https://github.com/casper-network/casper-node.git", branch = "feat-2.0" } -casper-binary-port = { git = "https://github.com/casper-network/casper-node.git", branch = "feat-2.0" } +casper-types = { git = "https://github.com/jacek-casper/casper-node.git", branch = "reward-binary-request" } +casper-binary-port = { git = "https://github.com/jacek-casper/casper-node.git", branch = "reward-binary-request" } casper-event-sidecar = { path = "./event_sidecar", version = "1.0.0" } casper-event-types = { path = "./types", version = "1.0.0" } casper-rpc-sidecar = { path = "./rpc_sidecar", version = "1.0.0" } diff --git a/resources/test/rpc_schema.json b/resources/test/rpc_schema.json index a8d13a23..6cb616c8 100644 --- a/resources/test/rpc_schema.json +++ b/resources/test/rpc_schema.json @@ -1513,6 +1513,106 @@ } ] }, + { + "name": "info_get_reward", + "summary": "returns the reward for a given era and a validator or a delegator", + "params": [ + { + "name": "validator", + "schema": { + "description": "The public key of the validator.", + "$ref": "#/components/schemas/PublicKey" + }, + "required": true + }, + { + "name": "era_identifier", + "schema": { + "description": "The era identifier. If `None`, the last finalized era is used.", + "anyOf": [ + { + "$ref": "#/components/schemas/EraIdentifier" + }, + { + "type": "null" + } + ] + }, + "required": false + }, + { + "name": "delegator", + "schema": { + "description": "The public key of the delegator. If `Some`, the rewards for the delegator are returned. If `None`, the rewards for the validator are returned.", + "anyOf": [ + { + "$ref": "#/components/schemas/PublicKey" + }, + { + "type": "null" + } + ] + }, + "required": false + } + ], + "result": { + "name": "info_get_reward_result", + "schema": { + "description": "Result for \"info_get_reward\" RPC response.", + "type": "object", + "required": [ + "api_version", + "era_id", + "reward_amount" + ], + "properties": { + "api_version": { + "description": "The RPC API version.", + "type": "string" + }, + "reward_amount": { + "description": "The total reward amount in the requested era.", + "$ref": "#/components/schemas/U512" + }, + "era_id": { + "description": "The era for which the reward was calculated.", + "$ref": "#/components/schemas/EraId" + } + }, + "additionalProperties": false + } + }, + "examples": [ + { + "name": "info_get_reward_example", + "params": [ + { + "name": "era_identifier", + "value": { + "Era": 1 + } + }, + { + "name": "validator", + "value": "01d9bf2148748a85c89da5aad8ee0b0fc2d105fd39d41a4c796536354f0ae2900c" + }, + { + "name": "delegator", + "value": "01d9bf2148748a85c89da5aad8ee0b0fc2d105fd39d41a4c796536354f0ae2900c" + } + ], + "result": { + "name": "info_get_reward_example_result", + "value": { + "api_version": "2.0.0", + "reward_amount": "42", + "era_id": 1 + } + } + } + ] + }, { "name": "info_get_validator_changes", "summary": "returns status changes of active validators", @@ -7784,6 +7884,35 @@ }, "additionalProperties": false }, + "EraIdentifier": { + "description": "Identifier for an era.", + "oneOf": [ + { + "type": "object", + "required": [ + "Era" + ], + "properties": { + "Era": { + "$ref": "#/components/schemas/EraId" + } + }, + "additionalProperties": false + }, + { + "type": "object", + "required": [ + "Block" + ], + "properties": { + "Block": { + "$ref": "#/components/schemas/BlockIdentifier" + } + }, + "additionalProperties": false + } + ] + }, "JsonValidatorChanges": { "description": "The changes in a validator's status.", "type": "object", diff --git a/rpc_sidecar/src/http_server.rs b/rpc_sidecar/src/http_server.rs index 4ceb9ed2..43f93bcf 100644 --- a/rpc_sidecar/src/http_server.rs +++ b/rpc_sidecar/src/http_server.rs @@ -6,7 +6,7 @@ use casper_json_rpc::{CorsOrigin, RequestHandlersBuilder}; use crate::{ rpcs::{ - info::{GetPeers, GetStatus, GetTransaction}, + info::{GetPeers, GetReward, GetStatus, GetTransaction}, state::{GetAddressableEntity, QueryBalanceDetails}, }, NodeClient, @@ -54,6 +54,7 @@ pub async fn run( GetTransaction::register_as_handler(node.clone(), &mut handlers); GetPeers::register_as_handler(node.clone(), &mut handlers); GetStatus::register_as_handler(node.clone(), &mut handlers); + GetReward::register_as_handler(node.clone(), &mut handlers); GetEraInfoBySwitchBlock::register_as_handler(node.clone(), &mut handlers); GetEraSummary::register_as_handler(node.clone(), &mut handlers); GetAuctionInfo::register_as_handler(node.clone(), &mut handlers); diff --git a/rpc_sidecar/src/node_client.rs b/rpc_sidecar/src/node_client.rs index 27503408..293b130f 100644 --- a/rpc_sidecar/src/node_client.rs +++ b/rpc_sidecar/src/node_client.rs @@ -14,15 +14,16 @@ use tokio_util::codec::Framed; use casper_binary_port::{ BalanceResponse, BinaryMessage, BinaryMessageCodec, BinaryRequest, BinaryRequestHeader, BinaryResponse, BinaryResponseAndRequest, ConsensusValidatorChanges, DictionaryItemIdentifier, - DictionaryQueryResult, ErrorCode, GetRequest, GetTrieFullResult, GlobalStateQueryResult, - GlobalStateRequest, InformationRequest, KeyPrefix, NodeStatus, PayloadEntity, PurseIdentifier, - RecordId, SpeculativeExecutionResult, TransactionWithExecutionInfo, + DictionaryQueryResult, EraIdentifier, ErrorCode, GetRequest, GetTrieFullResult, + GlobalStateQueryResult, GlobalStateRequest, InformationRequest, KeyPrefix, NodeStatus, + PayloadEntity, PurseIdentifier, RecordId, RewardResponse, SpeculativeExecutionResult, + TransactionWithExecutionInfo, }; use casper_types::{ bytesrepr::{self, FromBytes, ToBytes}, AvailableBlockRange, BlockHash, BlockHeader, BlockIdentifier, ChainspecRawBytes, Digest, - GlobalStateIdentifier, Key, KeyTag, Peers, ProtocolVersion, SignedBlock, StoredValue, - Transaction, TransactionHash, Transfer, + GlobalStateIdentifier, Key, KeyTag, Peers, ProtocolVersion, PublicKey, SignedBlock, + StoredValue, Transaction, TransactionHash, Transfer, }; use std::{ fmt::{self, Display, Formatter}, @@ -238,6 +239,24 @@ pub trait NodeClient: Send + Sync { let resp = self.read_info(InformationRequest::NodeStatus).await?; parse_response::(&resp.into())?.ok_or(Error::EmptyEnvelope) } + + async fn read_reward( + &self, + era_identifier: Option, + validator: PublicKey, + delegator: Option, + ) -> Result, Error> { + let validator = validator.into(); + let delegator = delegator.map(Into::into); + let resp = self + .read_info(InformationRequest::Reward { + era_identifier, + validator, + delegator, + }) + .await?; + parse_response::(&resp.into()) + } } #[derive(Debug, thiserror::Error, PartialEq, Eq)] @@ -497,6 +516,12 @@ pub enum Error { InvalidTransaction(InvalidTransactionOrDeploy), #[error("speculative execution has failed: {0}")] SpecExecutionFailed(String), + #[error("the switch block for the requested era was not found")] + SwitchBlockNotFound, + #[error("the parent of the switch block for the requested era was not found")] + SwitchBlockParentNotFound, + #[error("cannot serve rewards stored in V1 format")] + UnsupportedRewardsV1Request, #[error("received a response with an unsupported protocol version: {0}")] UnsupportedProtocolVersion(ProtocolVersion), #[error("received an unexpected node error: {message} ({code})")] @@ -509,6 +534,9 @@ impl Error { Ok(ErrorCode::FunctionDisabled) => Self::FunctionIsDisabled, Ok(ErrorCode::RootNotFound) => Self::UnknownStateRootHash, Ok(ErrorCode::FailedQuery) => Self::QueryFailedToExecute, + Ok(ErrorCode::SwitchBlockNotFound) => Self::SwitchBlockNotFound, + Ok(ErrorCode::SwitchBlockParentNotFound) => Self::SwitchBlockParentNotFound, + Ok(ErrorCode::UnsupportedRewardsV1Request) => Self::UnsupportedRewardsV1Request, Ok( err @ (ErrorCode::InvalidDeployChainName | ErrorCode::InvalidDeployDependenciesNoLongerSupported diff --git a/rpc_sidecar/src/rpcs/docs.rs b/rpc_sidecar/src/rpcs/docs.rs index cb6bbb84..772f892e 100644 --- a/rpc_sidecar/src/rpcs/docs.rs +++ b/rpc_sidecar/src/rpcs/docs.rs @@ -18,7 +18,10 @@ use super::{ chain::{ GetBlock, GetBlockTransfers, GetEraInfoBySwitchBlock, GetEraSummary, GetStateRootHash, }, - info::{GetChainspec, GetDeploy, GetPeers, GetStatus, GetTransaction, GetValidatorChanges}, + info::{ + GetChainspec, GetDeploy, GetPeers, GetReward, GetStatus, GetTransaction, + GetValidatorChanges, + }, state::{ GetAccountInfo, GetAddressableEntity, GetAuctionInfo, GetBalance, GetDictionaryItem, GetItem, QueryBalance, QueryBalanceDetails, QueryGlobalState, @@ -86,6 +89,9 @@ pub(crate) static OPEN_RPC_SCHEMA: Lazy = Lazy::new(|| { ); schema.push_without_params::("returns a list of peers connected to the node"); schema.push_without_params::("returns the current status of the node"); + schema.push_with_params::( + "returns the reward for a given era and a validator or a delegator", + ); schema .push_without_params::("returns status changes of active validators"); schema.push_without_params::( diff --git a/rpc_sidecar/src/rpcs/error.rs b/rpc_sidecar/src/rpcs/error.rs index fa6853c0..9444bf57 100644 --- a/rpc_sidecar/src/rpcs/error.rs +++ b/rpc_sidecar/src/rpcs/error.rs @@ -37,6 +37,8 @@ pub enum Error { AccountNotFound, #[error("the requested addressable entity was not found")] AddressableEntityNotFound, + #[error("the requested reward was not found")] + RewardNotFound, #[error("the requested account has been migrated to an addressable entity")] AccountMigratedToEntity, #[error("the provided dictionary value is {0} instead of a URef")] @@ -82,11 +84,21 @@ impl Error { Error::NodeRequest(_, NodeClientError::FunctionIsDisabled) => { Some(ErrorCode::FunctionIsDisabled) } + Error::NodeRequest(_, NodeClientError::SwitchBlockNotFound) => { + Some(ErrorCode::SwitchBlockNotFound) + } + Error::NodeRequest(_, NodeClientError::SwitchBlockParentNotFound) => { + Some(ErrorCode::SwitchBlockParentNotFound) + } + Error::NodeRequest(_, NodeClientError::UnsupportedRewardsV1Request) => { + Some(ErrorCode::UnsupportedRewardsV1Request) + } Error::InvalidPurseURef(_) => Some(ErrorCode::FailedToParseGetBalanceURef), Error::InvalidDictionaryKey(_) => Some(ErrorCode::FailedToParseQueryKey), Error::MainPurseNotFound => Some(ErrorCode::NoSuchMainPurse), Error::AccountNotFound => Some(ErrorCode::NoSuchAccount), Error::AddressableEntityNotFound => Some(ErrorCode::NoSuchAddressableEntity), + Error::RewardNotFound => Some(ErrorCode::NoRewardsFound), Error::AccountMigratedToEntity => Some(ErrorCode::AccountMigratedToEntity), Error::InvalidTypeUnderDictionaryKey(_) | Error::DictionaryKeyNotFound diff --git a/rpc_sidecar/src/rpcs/error_code.rs b/rpc_sidecar/src/rpcs/error_code.rs index 9e222bdb..085c08d5 100644 --- a/rpc_sidecar/src/rpcs/error_code.rs +++ b/rpc_sidecar/src/rpcs/error_code.rs @@ -53,6 +53,14 @@ pub enum ErrorCode { NoSuchAddressableEntity = -32020, /// The requested account has been migrated to an addressable entity. AccountMigratedToEntity = -32021, + /// The requested reward was not found. + NoRewardsFound = -32022, + /// The switch block for the requested era was not found. + SwitchBlockNotFound = -32023, + /// The parent of the switch block for the requested era was not found. + SwitchBlockParentNotFound = -32024, + /// Cannot serve rewards stored in V1 format + UnsupportedRewardsV1Request = -32025, } impl From for (i64, &'static str) { @@ -92,6 +100,15 @@ impl From for (i64, &'static str) { error_code as i64, "Account migrated to an addressable entity", ), + ErrorCode::NoRewardsFound => (error_code as i64, "No rewards found"), + ErrorCode::SwitchBlockNotFound => (error_code as i64, "Switch block not found"), + ErrorCode::SwitchBlockParentNotFound => { + (error_code as i64, "Switch block parent not found") + } + ErrorCode::UnsupportedRewardsV1Request => ( + error_code as i64, + "Cannot serve rewards stored in V1 format", + ), } } } diff --git a/rpc_sidecar/src/rpcs/info.rs b/rpc_sidecar/src/rpcs/info.rs index 72973459..16f1aa1b 100644 --- a/rpc_sidecar/src/rpcs/info.rs +++ b/rpc_sidecar/src/rpcs/info.rs @@ -3,16 +3,17 @@ use std::{collections::BTreeMap, str, sync::Arc}; use async_trait::async_trait; -use casper_binary_port::MinimalBlockInfo; +use casper_binary_port::{EraIdentifier as PortEraIdentifier, MinimalBlockInfo}; use once_cell::sync::Lazy; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use casper_types::{ execution::{ExecutionResult, ExecutionResultV2}, - ActivationPoint, AvailableBlockRange, Block, BlockHash, BlockSynchronizerStatus, - ChainspecRawBytes, Deploy, DeployHash, Digest, EraId, ExecutionInfo, NextUpgrade, Peers, - ProtocolVersion, PublicKey, TimeDiff, Timestamp, Transaction, TransactionHash, ValidatorChange, + ActivationPoint, AvailableBlockRange, Block, BlockHash, BlockIdentifier, + BlockSynchronizerStatus, ChainspecRawBytes, Deploy, DeployHash, Digest, EraId, ExecutionInfo, + NextUpgrade, Peers, ProtocolVersion, PublicKey, TimeDiff, Timestamp, Transaction, + TransactionHash, ValidatorChange, U512, }; use super::{ @@ -92,6 +93,16 @@ static GET_STATUS_RESULT: Lazy = Lazy::new(|| GetStatusResult { #[cfg(test)] build_version: String::from("1.0.0-xxxxxxxxx@DEBUG"), }); +static GET_REWARD_PARAMS: Lazy = Lazy::new(|| GetRewardParams { + era_identifier: Some(EraIdentifier::Era(EraId::new(1))), + validator: PublicKey::example().clone(), + delegator: Some(PublicKey::example().clone()), +}); +static GET_REWARD_RESULT: Lazy = Lazy::new(|| GetRewardResult { + api_version: DOCS_EXAMPLE_API_VERSION, + reward_amount: U512::from(42), + era_id: EraId::new(1), +}); /// Params for "info_get_deploy" RPC request. #[derive(Serialize, Deserialize, Debug, JsonSchema)] @@ -495,6 +506,84 @@ impl RpcWithoutParams for GetStatus { } } +/// Params for "info_get_reward" RPC request. +#[derive(Serialize, Deserialize, Debug, JsonSchema)] +#[serde(deny_unknown_fields)] +pub struct GetRewardParams { + /// The era identifier. If `None`, the last finalized era is used. + pub era_identifier: Option, + /// The public key of the validator. + pub validator: PublicKey, + /// The public key of the delegator. If `Some`, the rewards for the delegator are returned. + /// If `None`, the rewards for the validator are returned. + pub delegator: Option, +} + +impl DocExample for GetRewardParams { + fn doc_example() -> &'static Self { + &GET_REWARD_PARAMS + } +} + +/// Identifier for an era. +#[derive(Serialize, Deserialize, Debug, JsonSchema)] +pub enum EraIdentifier { + Era(EraId), + Block(BlockIdentifier), +} + +/// Result for "info_get_reward" RPC response. +#[derive(PartialEq, Eq, Serialize, Deserialize, Debug, JsonSchema)] +#[serde(deny_unknown_fields)] +pub struct GetRewardResult { + /// The RPC API version. + #[schemars(with = "String")] + pub api_version: ApiVersion, + /// The total reward amount in the requested era. + pub reward_amount: U512, + /// The era for which the reward was calculated. + pub era_id: EraId, +} + +impl DocExample for GetRewardResult { + fn doc_example() -> &'static Self { + &GET_REWARD_RESULT + } +} + +/// "info_get_reward" RPC. +pub struct GetReward {} + +#[async_trait] +impl RpcWithParams for GetReward { + const METHOD: &'static str = "info_get_reward"; + type RequestParams = GetRewardParams; + type ResponseResult = GetRewardResult; + + async fn do_handle_request( + node_client: Arc, + params: Self::RequestParams, + ) -> Result { + let identifier = match params.era_identifier { + Some(EraIdentifier::Era(era_id)) => Some(PortEraIdentifier::Era(era_id)), + Some(EraIdentifier::Block(block_id)) => Some(PortEraIdentifier::Block(block_id)), + None => None, + }; + + let result = node_client + .read_reward(identifier, params.validator, params.delegator) + .await + .map_err(|err| Error::NodeRequest("rewards", err))? + .ok_or(Error::RewardNotFound)?; + + Ok(Self::ResponseResult { + api_version: CURRENT_API_VERSION, + reward_amount: result.amount(), + era_id: result.era_id(), + }) + } +} + #[cfg(not(test))] fn version_string() -> String { use std::env; @@ -526,7 +615,7 @@ mod tests { use crate::{rpcs::ErrorCode, ClientError, SUPPORTED_PROTOCOL_VERSION}; use casper_binary_port::{ BinaryRequest, BinaryResponse, BinaryResponseAndRequest, GetRequest, InformationRequest, - InformationRequestTag, TransactionWithExecutionInfo, + InformationRequestTag, RewardResponse, TransactionWithExecutionInfo, }; use casper_types::{ bytesrepr::{FromBytes, ToBytes}, @@ -715,6 +804,38 @@ mod tests { assert_eq!(err.code(), ErrorCode::VariantMismatch as i64); } + #[tokio::test] + async fn should_return_rewards() { + let rng = &mut TestRng::new(); + let reward_amount = U512::from(rng.gen_range(0..1000)); + let era_id = EraId::new(rng.gen_range(0..1000)); + let validator = PublicKey::random(rng); + let delegator = rng.gen::().then(|| PublicKey::random(rng)); + + let resp = GetReward::do_handle_request( + Arc::new(RewardMock { + reward_amount, + era_id, + }), + GetRewardParams { + era_identifier: Some(EraIdentifier::Era(era_id)), + validator: validator.clone(), + delegator, + }, + ) + .await + .expect("should handle request"); + + assert_eq!( + resp, + GetRewardResult { + api_version: CURRENT_API_VERSION, + reward_amount, + era_id, + } + ); + } + struct ValidTransactionMock { transaction_bytes: Vec, should_request_approvals: bool, @@ -763,4 +884,31 @@ mod tests { } } } + + struct RewardMock { + reward_amount: U512, + era_id: EraId, + } + + #[async_trait] + impl NodeClient for RewardMock { + async fn send_request( + &self, + req: BinaryRequest, + ) -> Result { + match req { + BinaryRequest::Get(GetRequest::Information { info_type_tag, .. }) + if InformationRequestTag::try_from(info_type_tag) + == Ok(InformationRequestTag::Reward) => + { + let resp = RewardResponse::new(self.reward_amount, self.era_id); + Ok(BinaryResponseAndRequest::new( + BinaryResponse::from_value(resp, SUPPORTED_PROTOCOL_VERSION), + &[], + )) + } + req => unimplemented!("unexpected request: {:?}", req), + } + } + } } From d362961641725c9451107fa7aae7544a54672e7f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rafa=C5=82=20Chabowski?= Date: Wed, 12 Jun 2024 11:32:57 +0200 Subject: [PATCH 36/45] Update expected schemas for min/max delegation amounts --- resources/test/rpc_schema.json | 14 ++++++++++++++ resources/test/speculative_rpc_schema.json | 14 ++++++++++++++ 2 files changed, 28 insertions(+) diff --git a/resources/test/rpc_schema.json b/resources/test/rpc_schema.json index 6cb616c8..c43a220e 100644 --- a/resources/test/rpc_schema.json +++ b/resources/test/rpc_schema.json @@ -4972,6 +4972,8 @@ "bonding_purse", "delegation_rate", "inactive", + "maximum_delegation_amount", + "minimum_delegation_amount", "staked_amount", "validator_public_key" ], @@ -5020,6 +5022,18 @@ "inactive": { "description": "`true` if validator has been \"evicted\"", "type": "boolean" + }, + "minimum_delegation_amount": { + "description": "Minimum allowed delegation amount in motes", + "type": "integer", + "format": "uint64", + "minimum": 0.0 + }, + "maximum_delegation_amount": { + "description": "Maximum allowed delegation amount in motes", + "type": "integer", + "format": "uint64", + "minimum": 0.0 } }, "additionalProperties": false diff --git a/resources/test/speculative_rpc_schema.json b/resources/test/speculative_rpc_schema.json index a35dbdb0..bb038eed 100644 --- a/resources/test/speculative_rpc_schema.json +++ b/resources/test/speculative_rpc_schema.json @@ -2947,6 +2947,8 @@ "bonding_purse", "delegation_rate", "inactive", + "maximum_delegation_amount", + "minimum_delegation_amount", "staked_amount", "validator_public_key" ], @@ -2995,6 +2997,18 @@ "inactive": { "description": "`true` if validator has been \"evicted\"", "type": "boolean" + }, + "minimum_delegation_amount": { + "description": "Minimum allowed delegation amount in motes", + "type": "integer", + "format": "uint64", + "minimum": 0.0 + }, + "maximum_delegation_amount": { + "description": "Maximum allowed delegation amount in motes", + "type": "integer", + "format": "uint64", + "minimum": 0.0 } }, "additionalProperties": false From 3818015881e5616f70c8ee89ed798f7b256fb469 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rafa=C5=82=20Chabowski?= Date: Wed, 12 Jun 2024 15:43:35 +0200 Subject: [PATCH 37/45] Point `casper-types` and `casper-binary-port` back to `feat-2.0` --- Cargo.lock | 4 ++-- Cargo.toml | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 804eeff4..399502b5 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -468,7 +468,7 @@ dependencies = [ [[package]] name = "casper-binary-port" version = "1.0.0" -source = "git+https://github.com/jacek-casper/casper-node.git?branch=reward-binary-request#41aea404afba337a4ef89fef6089a802228e5680" +source = "git+https://github.com/casper-network/casper-node.git?branch=feat-2.0#d15d2a15c95594cbe84016cd87db5f726c3349a1" dependencies = [ "bincode", "bytes", @@ -670,7 +670,7 @@ dependencies = [ [[package]] name = "casper-types" version = "5.0.0" -source = "git+https://github.com/jacek-casper/casper-node.git?branch=reward-binary-request#41aea404afba337a4ef89fef6089a802228e5680" +source = "git+https://github.com/casper-network/casper-node.git?branch=feat-2.0#d15d2a15c95594cbe84016cd87db5f726c3349a1" dependencies = [ "base16", "base64 0.13.1", diff --git a/Cargo.toml b/Cargo.toml index bd8c00aa..0c64330a 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -14,8 +14,8 @@ members = [ anyhow = "1" async-stream = "0.3.4" async-trait = "0.1.77" -casper-types = { git = "https://github.com/jacek-casper/casper-node.git", branch = "reward-binary-request" } -casper-binary-port = { git = "https://github.com/jacek-casper/casper-node.git", branch = "reward-binary-request" } +casper-types = { git = "https://github.com/casper-network/casper-node.git", branch = "feat-2.0" } +casper-binary-port = { git = "https://github.com/casper-network/casper-node.git", branch = "feat-2.0" } casper-event-sidecar = { path = "./event_sidecar", version = "1.0.0" } casper-event-types = { path = "./types", version = "1.0.0" } casper-rpc-sidecar = { path = "./rpc_sidecar", version = "1.0.0" } From 843d24b3d9891877e84e2f500f0201c1a4ac5729 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rafa=C5=82=20Chabowski?= Date: Wed, 12 Jun 2024 11:32:57 +0200 Subject: [PATCH 38/45] Update expected schemas for min/max delegation amounts --- resources/test/rpc_schema.json | 14 ++++++++++++++ resources/test/speculative_rpc_schema.json | 14 ++++++++++++++ 2 files changed, 28 insertions(+) diff --git a/resources/test/rpc_schema.json b/resources/test/rpc_schema.json index 6cb616c8..c43a220e 100644 --- a/resources/test/rpc_schema.json +++ b/resources/test/rpc_schema.json @@ -4972,6 +4972,8 @@ "bonding_purse", "delegation_rate", "inactive", + "maximum_delegation_amount", + "minimum_delegation_amount", "staked_amount", "validator_public_key" ], @@ -5020,6 +5022,18 @@ "inactive": { "description": "`true` if validator has been \"evicted\"", "type": "boolean" + }, + "minimum_delegation_amount": { + "description": "Minimum allowed delegation amount in motes", + "type": "integer", + "format": "uint64", + "minimum": 0.0 + }, + "maximum_delegation_amount": { + "description": "Maximum allowed delegation amount in motes", + "type": "integer", + "format": "uint64", + "minimum": 0.0 } }, "additionalProperties": false diff --git a/resources/test/speculative_rpc_schema.json b/resources/test/speculative_rpc_schema.json index a35dbdb0..bb038eed 100644 --- a/resources/test/speculative_rpc_schema.json +++ b/resources/test/speculative_rpc_schema.json @@ -2947,6 +2947,8 @@ "bonding_purse", "delegation_rate", "inactive", + "maximum_delegation_amount", + "minimum_delegation_amount", "staked_amount", "validator_public_key" ], @@ -2995,6 +2997,18 @@ "inactive": { "description": "`true` if validator has been \"evicted\"", "type": "boolean" + }, + "minimum_delegation_amount": { + "description": "Minimum allowed delegation amount in motes", + "type": "integer", + "format": "uint64", + "minimum": 0.0 + }, + "maximum_delegation_amount": { + "description": "Maximum allowed delegation amount in motes", + "type": "integer", + "format": "uint64", + "minimum": 0.0 } }, "additionalProperties": false From 91972f4215091f2855b455d43703873b6fabe9f3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rafa=C5=82=20Chabowski?= Date: Thu, 13 Jun 2024 17:30:50 +0200 Subject: [PATCH 39/45] Update dependencies --- Cargo.lock | 4 ++-- Cargo.toml | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 67296d9b..399502b5 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -468,7 +468,7 @@ dependencies = [ [[package]] name = "casper-binary-port" version = "1.0.0" -source = "git+https://github.com/rafal-ch/casper-node.git?branch=binary_port_fixes#ae8a555ac677005402cb6d7688bb1faee03f9b4b" +source = "git+https://github.com/casper-network/casper-node.git?branch=feat-2.0#d15d2a15c95594cbe84016cd87db5f726c3349a1" dependencies = [ "bincode", "bytes", @@ -670,7 +670,7 @@ dependencies = [ [[package]] name = "casper-types" version = "5.0.0" -source = "git+https://github.com/rafal-ch/casper-node.git?branch=binary_port_fixes#ae8a555ac677005402cb6d7688bb1faee03f9b4b" +source = "git+https://github.com/casper-network/casper-node.git?branch=feat-2.0#d15d2a15c95594cbe84016cd87db5f726c3349a1" dependencies = [ "base16", "base64 0.13.1", diff --git a/Cargo.toml b/Cargo.toml index 3133d674..8b978ef7 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -14,8 +14,8 @@ members = [ anyhow = "1" async-stream = "0.3.4" async-trait = "0.1.77" -casper-types = { git = "https://github.com/rafal-ch/casper-node.git", branch = "binary_port_fixes" } -casper-binary-port = { git = "https://github.com/rafal-ch/casper-node.git", branch = "binary_port_fixes" } +casper-types = { git = "https://github.com/casper-network/casper-node.git", branch = "feat-2.0" } +casper-binary-port = { git = "https://github.com/casper-network/casper-node.git", branch = "feat-2.0" } casper-event-sidecar = { path = "./event_sidecar", version = "1.0.0" } casper-event-types = { path = "./types", version = "1.0.0" } casper-rpc-sidecar = { path = "./rpc_sidecar", version = "1.0.0" } From 5346eb3e45885fbb829acde888957dcd49d8b56e Mon Sep 17 00:00:00 2001 From: ipopescu Date: Fri, 14 Jun 2024 14:40:37 +0200 Subject: [PATCH 40/45] Add installation steps --- resources/ETC_README.md | 41 ++++++++++++++++++++++++++++++++--------- 1 file changed, 32 insertions(+), 9 deletions(-) diff --git a/resources/ETC_README.md b/resources/ETC_README.md index 92a49c33..9ac25ceb 100644 --- a/resources/ETC_README.md +++ b/resources/ETC_README.md @@ -7,7 +7,8 @@ This page contains specific instructions for node operators. Before proceeding, - [Running and testing the Sidecar](../README.md#running-and-testing-the-sidecar) - [Troubleshooting tips](../README.md#troubleshooting-tips) -## Sidecar Configuration on the Node + +## Configuring the Sidecar The file `/etc/casper-sidecar/config.toml` holds a default configuration. This should work if installed on a Casper node. @@ -15,16 +16,30 @@ If you install the Sidecar on an external server, you must update the `ip-addres For more information, including how to setup the SSE, RPC, REST, and Admin servers, read the [configuration options](../README.md#configuring-the-sidecar) in the main README. -## Storage on the Node -This directory stores the SSE cache and a database if the Sidecar was configured to use one. +## Installing the Sidecar on a Node -```toml -[storage] -storage_path = "/var/lib/casper-sidecar" +The following command will install the Debian package for the Casper Sidecar service on various flavors of Linux. + + + +```bash +sudo apt install ./casper-sidecar_0.1.0-0_amd64.deb ``` -The DB setup is described [here](../README#database-connectivity-setup). +Check the service status: + +```bash +systemctl status casper-sidecar +``` + +Check the logs and make sure the service is running as expected. + +```bash +journalctl --no-pager -u casper-sidecar +``` + +If you see any errors, you may need to [update the configuration](#configuring-the-service) and restart the service with the commands below. ## Running the Sidecar on a Node @@ -38,9 +53,17 @@ The `casper-sidecar` service starts after installation, using the systemd servic `sudo systemctl start casper-sidecar.service` -### Logs -`journalctl --no-pager -u casper-sidecar` +## Sidecar Storage + +This directory stores the SSE cache and a database if the Sidecar was configured to use one. + +```toml +[storage] +storage_path = "/var/lib/casper-sidecar" +``` + +The DB setup is described [here](../README#database-connectivity-setup). ## Swagger Documentation From 4eaa6a369fe0b6042ab82574d40425444c54efe1 Mon Sep 17 00:00:00 2001 From: ipopescu Date: Fri, 14 Jun 2024 16:20:50 +0200 Subject: [PATCH 41/45] Add usage steps for replaying events --- USAGE.md | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/USAGE.md b/USAGE.md index 896d3d63..66cb5dd0 100644 --- a/USAGE.md +++ b/USAGE.md @@ -146,6 +146,24 @@ data:"Shutdown" id:8 ``` +## Replaying the Event Stream + +This command will replay the event stream from an old event onward. The server will replay all the cached events if the ID is 0 or if you specify an event ID already purged from the node's cache. + +Replace the `HOST`, `PORT`, and `ID` fields with the values needed. + +```sh +curl -sN http://HOST:PORT/events?start_from=ID +``` + +**Example:** + +```sh +curl -sN http://65.21.235.219:9999/events?start_from=29267508 +``` + +Note that certain shells like `zsh` may require an escape character before the question mark. + ## The REST Server The Sidecar provides a RESTful endpoint for useful queries about the state of the network. From 4127a853f5a0162485eedbf564e36eef5a9c5e7d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rafa=C5=82=20Chabowski?= Date: Tue, 18 Jun 2024 16:09:25 +0200 Subject: [PATCH 42/45] Temporarily point to repo with binary port changes --- Cargo.lock | 4 ++-- Cargo.toml | 6 +++--- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 399502b5..686b1685 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -468,7 +468,7 @@ dependencies = [ [[package]] name = "casper-binary-port" version = "1.0.0" -source = "git+https://github.com/casper-network/casper-node.git?branch=feat-2.0#d15d2a15c95594cbe84016cd87db5f726c3349a1" +source = "git+https://github.com/rafal-ch/casper-node.git?branch=binary_port_fixes#62e9eee0a3b5aeaa3debac06bde76419c0b0ac89" dependencies = [ "bincode", "bytes", @@ -670,7 +670,7 @@ dependencies = [ [[package]] name = "casper-types" version = "5.0.0" -source = "git+https://github.com/casper-network/casper-node.git?branch=feat-2.0#d15d2a15c95594cbe84016cd87db5f726c3349a1" +source = "git+https://github.com/rafal-ch/casper-node.git?branch=binary_port_fixes#62e9eee0a3b5aeaa3debac06bde76419c0b0ac89" dependencies = [ "base16", "base64 0.13.1", diff --git a/Cargo.toml b/Cargo.toml index 8b978ef7..1a2b8da0 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -14,8 +14,8 @@ members = [ anyhow = "1" async-stream = "0.3.4" async-trait = "0.1.77" -casper-types = { git = "https://github.com/casper-network/casper-node.git", branch = "feat-2.0" } -casper-binary-port = { git = "https://github.com/casper-network/casper-node.git", branch = "feat-2.0" } +casper-types = { git = "https://github.com/rafal-ch/casper-node.git", branch = "binary_port_fixes" } +casper-binary-port = { git = "https://github.com/rafal-ch/casper-node.git", branch = "binary_port_fixes" } casper-event-sidecar = { path = "./event_sidecar", version = "1.0.0" } casper-event-types = { path = "./types", version = "1.0.0" } casper-rpc-sidecar = { path = "./rpc_sidecar", version = "1.0.0" } @@ -30,4 +30,4 @@ tokio = "1.23.1" toml = "0.5.8" tracing = { version = "0", default-features = false } tracing-subscriber = "0" -serde = { version = "1", default-features = false } \ No newline at end of file +serde = { version = "1", default-features = false } From 8201b54f7adf83571322e9a0c302df0e6d457c85 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rafa=C5=82=20Chabowski?= Date: Tue, 18 Jun 2024 17:48:19 +0200 Subject: [PATCH 43/45] Bring back dependencies to `feat-2.0` --- Cargo.lock | 4 ++-- Cargo.toml | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 2f799256..43f4883e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -468,7 +468,7 @@ dependencies = [ [[package]] name = "casper-binary-port" version = "1.0.0" -source = "git+https://github.com/rafal-ch/casper-node.git?branch=binary_port_fixes#298518cae099c67a09b76c532ed3a9c09ff75296" +source = "git+https://github.com/casper-network/casper-node.git?branch=feat-2.0#93974105e0ee2ce152891465e0f7661c701c0396" dependencies = [ "bincode", "bytes", @@ -670,7 +670,7 @@ dependencies = [ [[package]] name = "casper-types" version = "5.0.0" -source = "git+https://github.com/rafal-ch/casper-node.git?branch=binary_port_fixes#298518cae099c67a09b76c532ed3a9c09ff75296" +source = "git+https://github.com/casper-network/casper-node.git?branch=feat-2.0#93974105e0ee2ce152891465e0f7661c701c0396" dependencies = [ "base16", "base64 0.13.1", diff --git a/Cargo.toml b/Cargo.toml index 1a2b8da0..0c64330a 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -14,8 +14,8 @@ members = [ anyhow = "1" async-stream = "0.3.4" async-trait = "0.1.77" -casper-types = { git = "https://github.com/rafal-ch/casper-node.git", branch = "binary_port_fixes" } -casper-binary-port = { git = "https://github.com/rafal-ch/casper-node.git", branch = "binary_port_fixes" } +casper-types = { git = "https://github.com/casper-network/casper-node.git", branch = "feat-2.0" } +casper-binary-port = { git = "https://github.com/casper-network/casper-node.git", branch = "feat-2.0" } casper-event-sidecar = { path = "./event_sidecar", version = "1.0.0" } casper-event-types = { path = "./types", version = "1.0.0" } casper-rpc-sidecar = { path = "./rpc_sidecar", version = "1.0.0" } From c617b17552352aac8d81af5eea849ab0a5448eb1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rafa=C5=82=20Chabowski?= Date: Tue, 18 Jun 2024 17:48:42 +0200 Subject: [PATCH 44/45] Update schemas to cover `transaction_category` --- resources/test/rpc_schema.json | 52 +++------------------- resources/test/speculative_rpc_schema.json | 50 ++------------------- 2 files changed, 9 insertions(+), 93 deletions(-) diff --git a/resources/test/rpc_schema.json b/resources/test/rpc_schema.json index c43a220e..0e384132 100644 --- a/resources/test/rpc_schema.json +++ b/resources/test/rpc_schema.json @@ -222,7 +222,7 @@ ], "target": "Native", "entry_point": "Transfer", - "transaction_kind": 0, + "transaction_category": 0, "scheduling": "Standard" }, "approvals": [ @@ -557,7 +557,7 @@ ], "target": "Native", "entry_point": "Transfer", - "transaction_kind": 0, + "transaction_category": 0, "scheduling": "Standard" }, "approvals": [ @@ -3259,7 +3259,7 @@ "additionalProperties": false }, { - "description": "The cost of the transaction is determined by the cost table, per the transaction kind.", + "description": "The cost of the transaction is determined by the cost table, per the transaction category.", "type": "object", "required": [ "Fixed" @@ -3355,7 +3355,7 @@ "entry_point", "scheduling", "target", - "transaction_kind" + "transaction_category" ], "properties": { "args": { @@ -3367,7 +3367,7 @@ "entry_point": { "$ref": "#/components/schemas/TransactionEntryPoint" }, - "transaction_kind": { + "transaction_category": { "type": "integer", "format": "uint8", "minimum": 0.0 @@ -3434,19 +3434,10 @@ "Session": { "type": "object", "required": [ - "kind", "module_bytes", "runtime" ], "properties": { - "kind": { - "description": "The kind of session.", - "allOf": [ - { - "$ref": "#/components/schemas/TransactionSessionKind" - } - ] - }, "module_bytes": { "description": "The compiled Wasm.", "allOf": [ @@ -3585,39 +3576,6 @@ } ] }, - "TransactionSessionKind": { - "description": "Session kind of a Transaction.", - "oneOf": [ - { - "description": "A standard (non-special-case) session.\n\nThis kind of session is not allowed to install or upgrade a stored contract, but can call stored contracts.", - "type": "string", - "enum": [ - "Standard" - ] - }, - { - "description": "A session which installs a stored contract.", - "type": "string", - "enum": [ - "Installer" - ] - }, - { - "description": "A session which upgrades a previously-installed stored contract. Such a session must have \"package_id: PackageIdentifier\" runtime arg present.", - "type": "string", - "enum": [ - "Upgrader" - ] - }, - { - "description": "A session which doesn't call any stored contracts.\n\nThis kind of session is not allowed to install or upgrade a stored contract.", - "type": "string", - "enum": [ - "Isolated" - ] - } - ] - }, "TransactionEntryPoint": { "description": "Entry point of a Transaction.", "oneOf": [ diff --git a/resources/test/speculative_rpc_schema.json b/resources/test/speculative_rpc_schema.json index bb038eed..391d0eac 100644 --- a/resources/test/speculative_rpc_schema.json +++ b/resources/test/speculative_rpc_schema.json @@ -230,7 +230,7 @@ ], "target": "Native", "entry_point": "Transfer", - "transaction_kind": 0, + "transaction_category": 0, "scheduling": "Standard" }, "approvals": [ @@ -3777,7 +3777,7 @@ "additionalProperties": false }, { - "description": "The cost of the transaction is determined by the cost table, per the transaction kind.", + "description": "The cost of the transaction is determined by the cost table, per the transaction category.", "type": "object", "required": [ "Fixed" @@ -3838,7 +3838,7 @@ "entry_point", "scheduling", "target", - "transaction_kind" + "transaction_category" ], "properties": { "args": { @@ -3850,7 +3850,7 @@ "entry_point": { "$ref": "#/components/schemas/TransactionEntryPoint" }, - "transaction_kind": { + "transaction_category": { "type": "integer", "format": "uint8", "minimum": 0.0 @@ -3917,19 +3917,10 @@ "Session": { "type": "object", "required": [ - "kind", "module_bytes", "runtime" ], "properties": { - "kind": { - "description": "The kind of session.", - "allOf": [ - { - "$ref": "#/components/schemas/TransactionSessionKind" - } - ] - }, "module_bytes": { "description": "The compiled Wasm.", "allOf": [ @@ -4049,39 +4040,6 @@ } ] }, - "TransactionSessionKind": { - "description": "Session kind of a Transaction.", - "oneOf": [ - { - "description": "A standard (non-special-case) session.\n\nThis kind of session is not allowed to install or upgrade a stored contract, but can call stored contracts.", - "type": "string", - "enum": [ - "Standard" - ] - }, - { - "description": "A session which installs a stored contract.", - "type": "string", - "enum": [ - "Installer" - ] - }, - { - "description": "A session which upgrades a previously-installed stored contract. Such a session must have \"package_id: PackageIdentifier\" runtime arg present.", - "type": "string", - "enum": [ - "Upgrader" - ] - }, - { - "description": "A session which doesn't call any stored contracts.\n\nThis kind of session is not allowed to install or upgrade a stored contract.", - "type": "string", - "enum": [ - "Isolated" - ] - } - ] - }, "TransactionEntryPoint": { "description": "Entry point of a Transaction.", "oneOf": [ From bab93a8b7f8e4e668703f66faff8a074b2c16ae3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rafa=C5=82=20Chabowski?= Date: Tue, 18 Jun 2024 17:58:05 +0200 Subject: [PATCH 45/45] Update test fixture to cover `transaction_category` --- types/src/legacy_sse_data/fixtures.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/types/src/legacy_sse_data/fixtures.rs b/types/src/legacy_sse_data/fixtures.rs index ba0d1bd5..b51c4d8f 100644 --- a/types/src/legacy_sse_data/fixtures.rs +++ b/types/src/legacy_sse_data/fixtures.rs @@ -425,7 +425,7 @@ const RAW_TRANSACTION_ACCEPTED: &str = r#" "scheduling": { "FutureTimestamp": "2020-08-07T01:32:59.428Z" }, - "transaction_kind": 0 + "transaction_category": 0 }, "approvals": [ {