diff --git a/.github/workflows/deno_test.yml b/.github/workflows/deno_test.yml index ceba1d77d..82656e1f9 100644 --- a/.github/workflows/deno_test.yml +++ b/.github/workflows/deno_test.yml @@ -5,20 +5,6 @@ on: workflow_call: jobs: - formatting: - runs-on: ubuntu-latest - timeout-minutes: 5 - - steps: - - uses: actions/checkout@v4 - with: - sparse-checkout: indexer - - uses: denoland/setup-deno@v1 - with: - deno-version: vx.x.x - - name: Run formatting - run: deno fmt --check indexer/ - test: runs-on: ubuntu-latest timeout-minutes: 30 @@ -29,7 +15,10 @@ jobs: deno-version: v1.x # Checkout target branch and run tests - name: Run Tests with Coverage on target branch - run: DEFAULT_BLOCK_GAS_LIMIT=7000000 STARKNET_NETWORK=http://0.0.0.0:1010 KAKAROT_ADDRESS=0x11c5faab8a76b3caff6e243b8d13059a7fb723a0ca12bbaadde95fb9e501bda deno test --allow-env --allow-read --coverage=cov_profile + run: + DEFAULT_BLOCK_GAS_LIMIT=7000000 STARKNET_NETWORK=http://0.0.0.0:1010 + KAKAROT_ADDRESS=0x11c5faab8a76b3caff6e243b8d13059a7fb723a0ca12bbaadde95fb9e501bda + deno test --allow-env --allow-read --coverage=cov_profile - name: Generate Coverage Report on target branch run: deno coverage cov_profile --html - name: Install bc @@ -47,7 +36,10 @@ jobs: ref: ${{ github.base_ref }} clean: false - name: Run Tests with Coverage on base branch - run: DEFAULT_BLOCK_GAS_LIMIT=7000000 STARKNET_NETWORK=http://0.0.0.0:1010 KAKAROT_ADDRESS=0x11c5faab8a76b3caff6e243b8d13059a7fb723a0ca12bbaadde95fb9e501bda deno test --allow-env --allow-read --coverage=cov_profile_main + run: + DEFAULT_BLOCK_GAS_LIMIT=7000000 STARKNET_NETWORK=http://0.0.0.0:1010 + KAKAROT_ADDRESS=0x11c5faab8a76b3caff6e243b8d13059a7fb723a0ca12bbaadde95fb9e501bda + deno test --allow-env --allow-read --coverage=cov_profile_main - name: Generate HTML report from for the base branch run: deno coverage cov_profile_main --html - name: Extract coverage percentage from HTML for the base branch @@ -65,4 +57,4 @@ jobs: if (( $(echo "$change > 5.0" | bc -l) )); then echo "Coverage dropped by more than 5%!" exit 1 - fi \ No newline at end of file + fi diff --git a/.github/workflows/kakarot_release.yml b/.github/workflows/kakarot_release.yml index 848eee8a3..73bca5852 100644 --- a/.github/workflows/kakarot_release.yml +++ b/.github/workflows/kakarot_release.yml @@ -3,7 +3,7 @@ name: release on: release: - types: [ published ] + types: [published] env: REGISTRY_IMAGE: ghcr.io/${{ github.repository }}/node @@ -162,7 +162,9 @@ jobs: platforms: ${{ matrix.platform }} file: ./docker/indexer/Dockerfile labels: ${{ steps.meta.outputs.labels }} - outputs: type=image,name=${{ env.INDEXER_IMAGE }},push-by-digest=true,name-canonical=true,push=true + outputs: + type=image,name=${{ env.INDEXER_IMAGE + }},push-by-digest=true,name-canonical=true,push=true build-args: | APIBARA_DOWNLOAD_URL=${{ matrix.apibara_download_url }} - name: Export digest diff --git a/.github/workflows/pull_request.yml b/.github/workflows/pull_request.yml index 3fcd873aa..61973b773 100644 --- a/.github/workflows/pull_request.yml +++ b/.github/workflows/pull_request.yml @@ -1,7 +1,7 @@ --- name: Workflow - Pull Request -on: [pull_request] +on: [pull_request, merge_group] concurrency: group: ${{ github.workflow }}-${{ github.ref }} @@ -19,3 +19,10 @@ jobs: deno-tests: name: Deno tests uses: ./.github/workflows/deno_test.yml + + trunk-check: + name: Trunk check + permissions: + checks: write + contents: read + uses: ./.github/workflows/trunk_check.yml diff --git a/.github/workflows/rust_test.yml b/.github/workflows/rust_test.yml index 277ffd3eb..a98bb948e 100644 --- a/.github/workflows/rust_test.yml +++ b/.github/workflows/rust_test.yml @@ -5,17 +5,6 @@ on: workflow_call: jobs: - fmt: - name: fmt - runs-on: ubuntu-latest - timeout-minutes: 30 - steps: - - uses: actions/checkout@v4 - - uses: dtolnay/rust-toolchain@nightly - with: - components: rustfmt - - run: cargo fmt --all --check - test: runs-on: ubuntu-latest-16-cores timeout-minutes: 30 @@ -64,10 +53,6 @@ jobs: # Create dump - name: Create dump run: ./scripts/make_with_env.sh katana-genesis - - name: Lint - run: | - cargo check && - cargo clippy --workspace --all-features --all-targets -- -D warnings - name: Test code run: make test @@ -124,7 +109,7 @@ jobs: strategy: fail-fast: false matrix: - sim: [ ethereum/rpc ] + sim: [ethereum/rpc] include: - sim: ethereum/rpc-compat include: diff --git a/.github/workflows/staging.yml b/.github/workflows/staging.yml index b43e62ed8..5a8cb8671 100644 --- a/.github/workflows/staging.yml +++ b/.github/workflows/staging.yml @@ -46,7 +46,8 @@ jobs: scarb-version: 0.7.0 - name: Run staging script env: - KAKAROT_STAGING_PRIVATE_KEY: ${{ secrets.KAKAROT_STAGING_PRIVATE_KEY }} + KAKAROT_STAGING_PRIVATE_KEY: + ${{ secrets.KAKAROT_STAGING_PRIVATE_KEY }} run: | cd scripts sh upgrade.sh deploy test --staging diff --git a/.github/workflows/trunk_check.yml b/.github/workflows/trunk_check.yml new file mode 100644 index 000000000..aba94d01f --- /dev/null +++ b/.github/workflows/trunk_check.yml @@ -0,0 +1,26 @@ +name: Trunk + +on: + workflow_call: + +permissions: + checks: write # For trunk to post annotations + contents: read # For repo checkout + +jobs: + trunk-check: + name: Trunk + runs-on: ubuntu-latest + + steps: + - name: Checkout + uses: actions/checkout@v3 + + - run: rustup update + - name: Rust cache + uses: Swatinem/rust-cache@v2 + with: + cache-on-failure: "true" + + - name: Trunk Check + uses: trunk-io/trunk-action@v1 diff --git a/.hooks/pre-push b/.hooks/pre-push deleted file mode 100755 index 234df5b6e..000000000 --- a/.hooks/pre-push +++ /dev/null @@ -1,21 +0,0 @@ -#!/bin/sh - -cargo check --all-features -if [ $? -ne 0 ]; then - echo "Pre-push hook failed: cargo check detected errors." - exit 1 -fi - -cargo clippy --workspace --all-features --all-targets -- -D warnings -if [ $? -ne 0 ]; then - echo "Pre-push hook failed: cargo clippy detected warnings or errors." - exit 1 -fi - -cargo +nightly fmt --all --check -if [ $? -ne 0 ]; then - echo "Pre-push hook failed: cargo fmt detected formatting issues." - exit 1 -fi - -exit 0 diff --git a/.trunk/configs/.markdownlint.yaml b/.trunk/configs/.markdownlint.yaml new file mode 100644 index 000000000..b40ee9d7a --- /dev/null +++ b/.trunk/configs/.markdownlint.yaml @@ -0,0 +1,2 @@ +# Prettier friendly markdownlint config (all formatting rules disabled) +extends: markdownlint/style/prettier diff --git a/.trunk/configs/.prettierrc.json b/.trunk/configs/.prettierrc.json new file mode 100644 index 000000000..5b5bd9933 --- /dev/null +++ b/.trunk/configs/.prettierrc.json @@ -0,0 +1,3 @@ +{ + "proseWrap": "always" +} diff --git a/.trunk/configs/.yamllint.yaml b/.trunk/configs/.yamllint.yaml index 03deb40a5..184e251f8 100644 --- a/.trunk/configs/.yamllint.yaml +++ b/.trunk/configs/.yamllint.yaml @@ -1,10 +1,7 @@ rules: quoted-strings: required: only-when-needed - extra-allowed: ['{|}'] - empty-values: - forbid-in-block-mappings: true - forbid-in-flow-mappings: true + extra-allowed: ["{|}"] key-duplicates: {} octal-values: forbid-implicit-octal: true diff --git a/.trunk/trunk.yaml b/.trunk/trunk.yaml index a167c7350..a162ed189 100644 --- a/.trunk/trunk.yaml +++ b/.trunk/trunk.yaml @@ -13,19 +13,30 @@ runtimes: - go@1.21.0 - node@18.12.1 - python@3.10.8 + definitions: + - type: rust + system_version: allowed +# This is the section where you manage your linters. (https://docs.trunk.io/check/configuration) lint: disabled: - checkov + definitions: + - name: clippy + commands: + - name: lint + run: + cargo clippy --message-format json --locked --all-targets + --all-features -- --cap-lints=warn --no-deps enabled: - actionlint@1.7.1 - - clippy@1.65.0 + - clippy@SYSTEM + - rustfmt@SYSTEM - git-diff-check - hadolint@2.12.0 - markdownlint@0.41.0 - osv-scanner@1.8.2 - oxipng@9.1.1 - prettier@3.3.2 - - rustfmt@1.65.0 - shellcheck@0.10.0 - shfmt@3.6.0 - taplo@0.8.1 @@ -44,7 +55,7 @@ lint: actions: disabled: - trunk-announce - - trunk-check-pre-push - - trunk-fmt-pre-commit enabled: - trunk-upgrade-available + - trunk-fmt-pre-commit + - trunk-check-pre-push diff --git a/Cargo.toml b/Cargo.toml index 08c6c7468..ca56d2b37 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -98,8 +98,16 @@ serde_json = { version = "1", default-features = false } tracing = { version = "0.1", default-features = false } tracing-subscriber = { version = "0.3", features = ["env-filter"] } opentelemetry = { version = "0.23.0", features = ["metrics"] } -opentelemetry-otlp = { version = "0.16.0", features = ["trace", "tonic", "metrics"] } -opentelemetry_sdk = { version = "0.23.0", features = ["trace", "metrics", "rt-tokio"] } +opentelemetry-otlp = { version = "0.16.0", features = [ + "trace", + "tonic", + "metrics", +] } +opentelemetry_sdk = { version = "0.23.0", features = [ + "trace", + "metrics", + "rt-tokio", +] } tracing-opentelemetry = "0.24.0" diff --git a/Makefile b/Makefile index e210f5048..eed92cd2d 100644 --- a/Makefile +++ b/Makefile @@ -30,8 +30,6 @@ setup: .gitmodules cd lib/kakarot && make setup && make build && make build-sol && \ mv build/ssj/contracts_Cairo1Helpers.contract_class.json build/cairo1_helpers.json && rm -fr build/ssj ./scripts/extract_abi.sh - chmod +x ./scripts/install_hooks.sh - ./scripts/install_hooks.sh deploy-kakarot: cd lib/kakarot && STARKNET_NETWORK=$(STARKNET_NETWORK) poetry run python ./kakarot_scripts/deploy_kakarot.py && cd .. diff --git a/README.md b/README.md index 50bcd4df6..4a90f6227 100644 --- a/README.md +++ b/README.md @@ -66,23 +66,27 @@ - [Benchmarks](#benchmarks) - [Contributors ✨](#contributors-) - --- ## About -Kakarot RPC fits in the three-part architecture of the Kakarot zkEVM rollup ([Kakarot EVM Cairo Programs](https://github.com/kkrt-labs/kakarot), Kakarot RPC, [Kakarot Indexer](indexer/README.md)). It is the implementation of the Ethereum JSON-RPC specification made to interact with Kakarot zkEVM in a fully Ethereum-compatible way. +Kakarot RPC fits in the three-part architecture of the Kakarot zkEVM rollup +([Kakarot EVM Cairo Programs](https://github.com/kkrt-labs/kakarot), Kakarot +RPC, [Kakarot Indexer](indexer/README.md)). It is the implementation of the +Ethereum JSON-RPC specification made to interact with Kakarot zkEVM in a fully +Ethereum-compatible way. ![Kakarot zkEVM architecture](./docs/images/Kakarot%20zkEVM.png) -The Kakarot RPC layer's goal is to receive and output EVM-compatible -payloads & calls while interacting with an underlying StarknetOS client. This enables +The Kakarot RPC layer's goal is to receive and output EVM-compatible payloads & +calls while interacting with an underlying StarknetOS client. This enables Kakarot zkEVM to interact with the usual Ethereum tooling: Metamask, Hardhat, Foundry, etc. -Note that this is necessary because Kakarot zkEVM is implemented as a set of Cairo Programs that run on an underlying CairoVM (so-called StarknetOS) chain. +Note that this is necessary because Kakarot zkEVM is implemented as a set of +Cairo Programs that run on an underlying CairoVM (so-called StarknetOS) chain. This adapter layer is based on: @@ -111,7 +115,8 @@ TL;DR: - Run `cargo build` to build Kakarot RPC. - Test with `make test`. - Run Kakarot RPC in dev mode: - - Run dev RPC: `make run-dev` (you'll need a StarknetOS instance running in another process and Kakarot contracts deployed) + - Run dev RPC: `make run-dev` (you'll need a StarknetOS instance running in + another process and Kakarot contracts deployed) - Run with Docker Compose: - `make local-rpc-up` - To kill these processes, `make docker-down` @@ -120,19 +125,27 @@ TL;DR: ### Prerequisites -- [Rust](https://www.rust-lang.org/tools/install): The codebase is written in Rust to ensure high performance, maintainability, and a developer-friendly experience. -- [Docker](https://docs.docker.com/engine/install): Required for containerizing and running the various services and components in a consistent environment. -- [Python](https://www.python.org/): Used primarily for interacting with and building our Kakarot programs. -- [Poetry](https://python-poetry.org/docs/): A Python dependency management tool used for managing the dependencies of our Kakarot programs. -- [Deno](https://docs.deno.com/runtime/manual/): A JavaScript runtime used for our indexing service, based on the [Apibara](https://www.apibara.com/docs) third-party service. -- Make: Utilized to interact with the `Makefile` for running commands such as building the project or executing tests. +- [Rust](https://www.rust-lang.org/tools/install): The codebase is written in + Rust to ensure high performance, maintainability, and a developer-friendly + experience. +- [Docker](https://docs.docker.com/engine/install): Required for containerizing + and running the various services and components in a consistent environment. +- [Python](https://www.python.org/): Used primarily for interacting with and + building our Kakarot programs. +- [Poetry](https://python-poetry.org/docs/): A Python dependency management tool + used for managing the dependencies of our Kakarot programs. +- [Deno](https://docs.deno.com/runtime/manual/): A JavaScript runtime used for + our indexing service, based on the [Apibara](https://www.apibara.com/docs) + third-party service. +- Make: Utilized to interact with the `Makefile` for running commands such as + building the project or executing tests. ## Installation ### Setup the project -To set up the repository (pulling git submodule and building Cairo dependencies), -run: +To set up the repository (pulling git submodule and building Cairo +dependencies), run: ```console make setup @@ -142,12 +155,14 @@ Caveats: 1. the `setup` make command uses linux (MacOs compatible) commands to allow running the `./scripts/extract_abi.sh`. This script is used to use strongly - typed Rust bindings for Cairo programs. If you encounter problems when building - the project, try running `./scripts/extract_abi.sh`. -2. the [kakarot](https://github.com/kkrt-labs/kakarot) submodule uses Python to build - and deploy Kakarot contracts. If you don't have the right version available, we - recommend to use [pyenv](https://github.com/pyenv/pyenv) to install it. -3. We use a pre-commit hook to ensure code quality and consistency. The hook will be installed automatically by running `make setup`, otherwise you can install it manually by running the script `scripts/install_hooks.sh`. + typed Rust bindings for Cairo programs. If you encounter problems when + building the project, try running `./scripts/extract_abi.sh`. +2. the [kakarot](https://github.com/kkrt-labs/kakarot) submodule uses Python to + build and deploy Kakarot contracts. If you don't have the right version + available, we recommend to use [pyenv](https://github.com/pyenv/pyenv) to + install it. +3. We use a pre-commit hook to ensure code quality and consistency. The hook are + managed and automatically installed by trunk. ### Build from source @@ -157,7 +172,9 @@ To build the project from source (in release mode): cargo build --release ``` -Note that there are sometimes issues with some dependencies (notably scarb or cairo related packages, there are sometimes needs to `cargo clean` and `cargo build`) +Note that there are sometimes issues with some dependencies (notably scarb or +cairo related packages, there are sometimes needs to `cargo clean` and +`cargo build`) ### Environment variables @@ -177,13 +194,21 @@ The binaries will be located in `target/release/`. ### Dev mode with [Katana](https://github.com/dojoengine/dojo/tree/main/crates/katana) -To run a local Starknet sequencer, you can use Katana. Katana, developed by the Dojo team, is a sequencer designed to aid in local development. It allows you to perform all Starknet-related activities in a local environment, making it an efficient platform for development and testing. To run Katana and deploy the Kakarot zkEVM (a set of Cairo smart contracts implementing the EVM): +To run a local Starknet sequencer, you can use Katana. Katana, developed by the +Dojo team, is a sequencer designed to aid in local development. It allows you to +perform all Starknet-related activities in a local environment, making it an +efficient platform for development and testing. To run Katana and deploy the +Kakarot zkEVM (a set of Cairo smart contracts implementing the EVM): ```console make run-katana ``` -This command will install Katana and generate a genesis file at `.katana/genesis.json`. Katana's genesis configuration feature is a way to define the initial state and settings of the Kakarot blockchain network locally, providing a customizable starting point for the chain. Among other things, it allows you to: +This command will install Katana and generate a genesis file at +`.katana/genesis.json`. Katana's genesis configuration feature is a way to +define the initial state and settings of the Kakarot blockchain network locally, +providing a customizable starting point for the chain. Among other things, it +allows you to: - Specify the token used for network fees. - Allocate initial token balances to accounts. @@ -205,7 +230,8 @@ STARKNET_NETWORK=katana make run-dev Some notes on this local devnet: - this will run a devnet by running katana, **with contracts automatically - deployed**, so you don't have to do them manually (see in `./lib/kakarot/kakarot_scripts/deploy_kakarot.py` for the list of contracts). + deployed**, so you don't have to do them manually (see in + `./lib/kakarot/kakarot_scripts/deploy_kakarot.py` for the list of contracts). - the deployments and declarations for the devnet will be written to the `deployments/katana` folder inside your project root after a successful run of @@ -231,22 +257,31 @@ forge script scripts/PlainOpcodes.s.sol --broadcast --legacy --slow ### Configuration -Kakarot RPC is configurable through environment variables. -Check out `.env.example` file to see the environment variables. +Kakarot RPC is configurable through environment variables. Check out +`.env.example` file to see the environment variables. ## Running a Node in Various Environments -This section outlines how to run a complete node in different environments: local, staging, and production. Running a node involves several critical components to ensure the system operates effectively: +This section outlines how to run a complete node in different environments: +local, staging, and production. Running a node involves several critical +components to ensure the system operates effectively: -- **Starknet Engine**: Interacts with the Starknet ecosystem and processes transactions. +- **Starknet Engine**: Interacts with the Starknet ecosystem and processes + transactions. - **Kakarot Programs**: Implement the EVM logic using Cairo. -- **RPC Node**: Manages the Ethereum RPC logic, facilitating smooth interaction with the Kakarot chain. +- **RPC Node**: Manages the Ethereum RPC logic, facilitating smooth interaction + with the Kakarot chain. - **Apibara Service**: Monitors the Kakarot chain and indexes its data. -- **MongoDB**: Serves as the database for storing transactions after indexing and acts as the core component for fetching information. +- **MongoDB**: Serves as the database for storing transactions after indexing + and acts as the core component for fetching information. -By correctly configuring these components, you can ensure that the node functions as a robust part of the system. +By correctly configuring these components, you can ensure that the node +functions as a robust part of the system. -In the following sections we have tried to provide the most important parameters useful for understanding and configuring the node. However for the sake of brevity, certain parameters deemed less important are omitted and can all be found in the corresponding Docker compose files: +In the following sections we have tried to provide the most important parameters +useful for understanding and configuring the node. However for the sake of +brevity, certain parameters deemed less important are omitted and can all be +found in the corresponding Docker compose files: - Local: `docker-compose.yaml` - Staging: `docker-compose.staging.yaml` @@ -260,7 +295,8 @@ To start the entire infrastructure locally, use the following command: make local-rpc-up ``` -This command will use the `docker-compose.yaml` file to set up the whole infrastructure locally utilizing the following elements: +This command will use the `docker-compose.yaml` file to set up the whole +infrastructure locally utilizing the following elements: - **Katana (local sequencer)**: @@ -272,20 +308,27 @@ This command will use the `docker-compose.yaml` file to set up the whole infrast - **Kakarot EVM Programs**: - Prefunded Katana account with: - - Account address: `0xb3ff441a68610b30fd5e2abbf3a1548eb6ba6f3559f2862bf2dc757e5828ca`. - - Private key: `0x2bbf4f9fd0bbb2e60b0316c1fe0b76cf7a4d0198bd493ced9b8df2a3a24d68a`. + - Account address: + `0xb3ff441a68610b30fd5e2abbf3a1548eb6ba6f3559f2862bf2dc757e5828ca`. + - Private key: + `0x2bbf4f9fd0bbb2e60b0316c1fe0b76cf7a4d0198bd493ced9b8df2a3a24d68a`. - Anvil (local Ethereum node): - - Private key: `0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80`. + - Private key: + `0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80`. - Katana RPC URL: `http://starknet:5050`. - Network: `STARKNET_NETWORK=katana`. - **Kakarot RPC Node** on port 3030: - - MongoDB connection string: `MONGO_CONNECTION_STRING=mongodb://mongo:mongo@mongo:27017`. + - MongoDB connection string: + `MONGO_CONNECTION_STRING=mongodb://mongo:mongo@mongo:27017`. - Database name: `MONGO_DATABASE_NAME=kakarot-local`. - Max calldata felts: 30,000. - - Pending transactions stored in MongoDB, with a retry service running every second. - - Currently, Kakarot does not support pre-EIP-155 transactions, except for a whitelist of specific transaction hashes that can be found in the corresponding Docker compose file. + - Pending transactions stored in MongoDB, with a retry service running every + second. + - Currently, Kakarot does not support pre-EIP-155 transactions, except for a + whitelist of specific transaction hashes that can be found in the + corresponding Docker compose file. - **Apibara Indexer Service** on port 7171: @@ -293,25 +336,31 @@ This command will use the `docker-compose.yaml` file to set up the whole infrast - Configured with MongoDB and Kakarot addresses. - **MongoDB** with Mongo Express on port 27017 for data management. -- **Blockscout** on port 4000, provides a web interface for exploring and analyzing blockchain data. +- **Blockscout** on port 4000, provides a web interface for exploring and + analyzing blockchain data. ### Staging Environment -To start the entire infrastructure in the staging environment, use the following command: +To start the entire infrastructure in the staging environment, use the following +command: ```console make staging-rpc-up ``` -This command will use the `docker-compose.staging.yaml` file to set up the whole infrastructure in the staging configuration utilizing the following elements: +This command will use the `docker-compose.staging.yaml` file to set up the whole +infrastructure in the staging configuration utilizing the following elements: - **Starknet Full-Node (Juno)** on port 6060: - Pending block is synced to the head of the chain every second. - - Ethereum node websocket endpoint to be specified by env variable `ETH_NODE_WS` (for example `ETH_NODE_WS=wss://eth-sepolia.g.alchemy.com/v2/YOUR_API_KEY`). + - Ethereum node websocket endpoint to be specified by env variable + `ETH_NODE_WS` (for example + `ETH_NODE_WS=wss://eth-sepolia.g.alchemy.com/v2/YOUR_API_KEY`). - Network configuration: - Network name: `KKRT_BETA`. - - Network feeder URL: `https://gateway-beta.kakarot.sw-dev.io/feeder_gateway/`. + - Network feeder URL: + `https://gateway-beta.kakarot.sw-dev.io/feeder_gateway/`. - Network gateway URL: `https://gateway-beta.kakarot.sw-dev.io/gateway/`. - L1 chain ID: `11155111` (Ethereum Sepolia). - L2 chain ID: `kkrt`. @@ -332,11 +381,15 @@ This command will use the `docker-compose.staging.yaml` file to set up the whole - Starknet network URL: `http://starknet:6060`. - MongoDB connection string: `mongodb://mongo:mongo@mongo:27017`. - Database name: `kakarot-local`. - - Kakarot address: `0x2824d6ed6759ac4c4a54a39b78d04c0e48be8937237026bf8c3bf46a8bea722`. - - Uninitialized account class hash: `0x600f6862938312a05a0cfecba0dcaf37693efc9e4075a6adfb62e196022678e`. + - Kakarot address: + `0x2824d6ed6759ac4c4a54a39b78d04c0e48be8937237026bf8c3bf46a8bea722`. + - Uninitialized account class hash: + `0x600f6862938312a05a0cfecba0dcaf37693efc9e4075a6adfb62e196022678e`. - Max calldata felts: 30,000. - - Pending transactions stored in MongoDB, with a retry service running every 10 second. - - Whitelisted pre-EIP-155 transaction hashes (see the corresponding Docker compose file). + - Pending transactions stored in MongoDB, with a retry service running every + 10 second. + - Whitelisted pre-EIP-155 transaction hashes (see the corresponding Docker + compose file). - **Apibara DNA Indexer Service** on port 7171: @@ -344,22 +397,26 @@ This command will use the `docker-compose.staging.yaml` file to set up the whole - Configured with MongoDB and Kakarot addresses. - **MongoDB** with Mongo Express on port 27017 for data management. -- **Blockscout** on port 4001, provides a web interface for exploring and analyzing blockchain data. +- **Blockscout** on port 4001, provides a web interface for exploring and + analyzing blockchain data. ### Production Environment -To start the entire infrastructure in the production environment, use the following command: +To start the entire infrastructure in the production environment, use the +following command: ```console make testnet-rpc-up ``` -This command will use the `docker-compose.prod.yaml` file to set up the whole infrastructure in the production configuration utilizing the following elements: +This command will use the `docker-compose.prod.yaml` file to set up the whole +infrastructure in the production configuration utilizing the following elements: - **Starknet Full-Node (Juno)** on port 6060: - Synchronizes pending blocks to the head of the chain every second. - - Ethereum node websocket endpoint specified by `ETH_NODE_WS` (for example `ETH_NODE_WS=wss://eth-sepolia.g.alchemy.com/v2/YOUR_API_KEY`). + - Ethereum node websocket endpoint specified by `ETH_NODE_WS` (for example + `ETH_NODE_WS=wss://eth-sepolia.g.alchemy.com/v2/YOUR_API_KEY`). - Network configuration: - Network name: `kakarot-sepolia`. - Network feeder URL: `https://gateway.kakarot.sw-dev.io/feeder_gateway/`. @@ -383,11 +440,15 @@ This command will use the `docker-compose.prod.yaml` file to set up the whole in - Starknet network URL: `http://starknet:6060`. - MongoDB connection string: `mongodb://mongo:mongo@mongo:27017`. - Database name: `kakarot-local`. - - Kakarot address: `0x11c5faab8a76b3caff6e243b8d13059a7fb723a0ca12bbaadde95fb9e501bda`. - - Uninitialized account class hash: `0x600f6862938312a05a0cfecba0dcaf37693efc9e4075a6adfb62e196022678e`. - - Account contract class hash: `0x1276d0b017701646f8646b69de6c3b3584edce71879678a679f28c07a9971cf`. + - Kakarot address: + `0x11c5faab8a76b3caff6e243b8d13059a7fb723a0ca12bbaadde95fb9e501bda`. + - Uninitialized account class hash: + `0x600f6862938312a05a0cfecba0dcaf37693efc9e4075a6adfb62e196022678e`. + - Account contract class hash: + `0x1276d0b017701646f8646b69de6c3b3584edce71879678a679f28c07a9971cf`. - Max calldata felts: 30,000. - - Pending transactions stored in MongoDB, with a retry service running every 10 seconds. + - Pending transactions stored in MongoDB, with a retry service running every + 10 seconds. - Whitelisted pre-EIP-155 transaction hashes (see local environment). - **Apibara DNA Indexer Service** on port 7171: @@ -396,29 +457,45 @@ This command will use the `docker-compose.prod.yaml` file to set up the whole in - Configured with MongoDB and Kakarot addresses. - **MongoDB** with Mongo Express on port 27017 for data management. -- **Blockscout** on port 4001, provides a web interface for exploring and analyzing blockchain data. +- **Blockscout** on port 4001, provides a web interface for exploring and + analyzing blockchain data. ### Potential Pitfalls, Caveats, and Requirements -When setting up the Kakarot node in any environment, it's important to be aware of the following: +When setting up the Kakarot node in any environment, it's important to be aware +of the following: #### Requirements -- **Hardware**: Ensure your system meets the necessary hardware requirements for running Docker containers efficiently. A modern multi-core CPU, at least 16GB of RAM, and ample storage space are recommended. -- **Software**: Install the latest versions of Docker and Docker Compose to ensure compatibility with the provided configuration. -- **Network**: Stable internet connection for downloading images and communicating with remote services if needed. We have noticed difficulties on networks with low bandwidth. +- **Hardware**: Ensure your system meets the necessary hardware requirements for + running Docker containers efficiently. A modern multi-core CPU, at least 16GB + of RAM, and ample storage space are recommended. +- **Software**: Install the latest versions of Docker and Docker Compose to + ensure compatibility with the provided configuration. +- **Network**: Stable internet connection for downloading images and + communicating with remote services if needed. We have noticed difficulties on + networks with low bandwidth. #### Potential Pitfalls -- **Resource Limits**: Docker containers might consume significant system resources. Monitor system performance and consider adjusting container resource limits if necessary. -- **Network Configuration**: Ensure no port conflicts on your local machine, especially with ports 3030, 5050, 6060, 7171, 27017... used by the services. -- **Volume Persistence**: Docker volumes are used for data persistence. Ensure they are properly managed and backed up to prevent data loss. +- **Resource Limits**: Docker containers might consume significant system + resources. Monitor system performance and consider adjusting container + resource limits if necessary. +- **Network Configuration**: Ensure no port conflicts on your local machine, + especially with ports 3030, 5050, 6060, 7171, 27017... used by the services. +- **Volume Persistence**: Docker volumes are used for data persistence. Ensure + they are properly managed and backed up to prevent data loss. #### Caveats -- **Pre-EIP-155 Transactions**: Kakarot does not natively support pre-EIP-155 transactions, except for those whitelisted. Be cautious about transaction compatibility. -- **Environment Configuration**: Double-check environment variables and their values, particularly those related to security, such as private keys and database credentials. -- **Service Dependencies**: The order of service initialization is crucial. Dependencies between services must be respected to avoid runtime errors. +- **Pre-EIP-155 Transactions**: Kakarot does not natively support pre-EIP-155 + transactions, except for those whitelisted. Be cautious about transaction + compatibility. +- **Environment Configuration**: Double-check environment variables and their + values, particularly those related to security, such as private keys and + database credentials. +- **Service Dependencies**: The order of service initialization is crucial. + Dependencies between services must be respected to avoid runtime errors. ### API @@ -439,70 +516,77 @@ You can take a look at `rpc-call-examples` directory. Please note the following: In order to execute the Rust tests, follow the below instructions: - Run `make setup` in order to setup the project. -- Run `make test` which will create a Genesis test file for Kakarot - and launch tests. -- If you which to only run a specific test, be sure to first at least - run `make katana-genesis` once, then run `make test-target TARGET=test_you_want_to_run`. +- Run `make test` which will create a Genesis test file for Kakarot and launch + tests. +- If you which to only run a specific test, be sure to first at least run + `make katana-genesis` once, then run + `make test-target TARGET=test_you_want_to_run`. ### Apibara indexer tests -In order to run the Typescript unit tests, you will need to have [Deno](https://docs.deno.com/runtime/manual/) -installed. Then you can run `KAKAROT_ADDRESS=ADDRESS_YOU_WANT_TO_USE_FOR_KAKAROT -deno test --allow-env`. +In order to run the Typescript unit tests, you will need to have +[Deno](https://docs.deno.com/runtime/manual/) installed. Then you can run +`KAKAROT_ADDRESS=ADDRESS_YOU_WANT_TO_USE_FOR_KAKAROT deno test --allow-env`. ### Hive The [Hive](https://github.com/ethereum/hive/tree/master) end-to-end test suite -is set up in the Github Continuous Integration (CI) flow of the repository. -This ensures a safe guard when modifying the current RPC implementation and/or -the [execution layer](https://github.com/kkrt-labs/kakarot). - -Due to the current existing differences between the Kakarot EVM implementation which -aims to be a type 2 ZK-EVM (see the blog post from [Vitalik](https://vitalik.eth.limo/general/2022/08/04/zkevm.html) -for more details), some of the Hive tests need to be skipped or slightly modified -in order to pass. - -For the [hive rpc tests](https://github.com/kkrt-labs/hive/tree/master/simulators/ethereum/rpc), -all the websockets related tests are skipped as websockets aren't currently supported -by the Kakarot RPC. - -For the [hive rpc compatibility tests](https://github.com/kkrt-labs/hive/tree/master/simulators/ethereum/rpc-compat), +is set up in the Github Continuous Integration (CI) flow of the repository. This +ensures a safe guard when modifying the current RPC implementation and/or the +[execution layer](https://github.com/kkrt-labs/kakarot). + +Due to the current existing differences between the Kakarot EVM implementation +which aims to be a type 2 ZK-EVM (see the blog post from +[Vitalik](https://vitalik.eth.limo/general/2022/08/04/zkevm.html) for more +details), some of the Hive tests need to be skipped or slightly modified in +order to pass. + +For the +[hive rpc tests](https://github.com/kkrt-labs/hive/tree/master/simulators/ethereum/rpc), +all the websockets related tests are skipped as websockets aren't currently +supported by the Kakarot RPC. + +For the +[hive rpc compatibility tests](https://github.com/kkrt-labs/hive/tree/master/simulators/ethereum/rpc-compat), the following tests are skipped: -- debug_getRawBlock/get-block-n: the Kakarot implementation currently - doesn't compute the block hash following EVM standards. +- debug_getRawBlock/get-block-n: the Kakarot implementation currently doesn't + compute the block hash following EVM standards. - debug_getRawBlock/get-genesis: see `debug_getRawBlock/get-block-n`. - debug_getRawHeader/get-block-n: debug API is currently not supported by the Kakarot RPC. - debug_getRawHeader/get-genesis: debug API is currently not supported by the Kakarot RPC. -- debug_getRawHeader/get-invalid-number: debug API is currently not supported - by the Kakarot RPC. +- debug_getRawHeader/get-invalid-number: debug API is currently not supported by + the Kakarot RPC. - debug_getRawTransaction/get-invalid-hash: the Kakarot implementation of the debug_getRawTransaction endpoint uses `reth_primitives::B256` type when - deserializing the hash. This test is expected to fail as the provided hash - in the query doesn't start with `0x`. As this test doesn't bring much, we - decide to skip it. + deserializing the hash. This test is expected to fail as the provided hash in + the query doesn't start with `0x`. As this test doesn't bring much, we decide + to skip it. - eth_createAccessList/create-al-multiple-reads: the createAccessList endpoint is currently not supported by the Kakarot RPC. - eth_createAccessList/create-al-simple-contract: the createAccessList endpoint is currently not supported by the Kakarot RPC. - eth_createAccessList/create-al-simple-transfer: the createAccessList endpoint is currently not supported by the Kakarot RPC. -- eth_feeHistory/fee-history: the Kakarot implementation doesn't currently - set the block gas limit dynamically, which causes some disparity in the - returned data. Additionally, the rewards of the blocks aren't available. +- eth_feeHistory/fee-history: the Kakarot implementation doesn't currently set + the block gas limit dynamically, which causes some disparity in the returned + data. Additionally, the rewards of the blocks aren't available. - eth_getBalance/get-balance-blockhash: see `debug_getRawBlock/get-block-n`. - eth_getBlockByHash/get-block-by-hash: see `debug_getRawBlock/get-block-n`. -- eth_getBlockReceipts/get-block-receipts-by-hash: see `debug_getRawBlock/get-block-n`. -- eth_getBlockTransactionCountByHash/get-block-n: see `debug_getRawBlock/get-block-n`. -- eth_getBlockTransactionCountByHash/get-genesis: see `debug_getRawBlock/get-block-n`. +- eth_getBlockReceipts/get-block-receipts-by-hash: see + `debug_getRawBlock/get-block-n`. +- eth_getBlockTransactionCountByHash/get-block-n: see + `debug_getRawBlock/get-block-n`. +- eth_getBlockTransactionCountByHash/get-genesis: see + `debug_getRawBlock/get-block-n`. - eth_getProof/get-account-proof-blockhash: the getProof endpoint is currently not supported by the Kakarot RPC. -- eth_getProof/get-account-proof-with-storage: the getProof endpoint is currently - not supported by the Kakarot RPC. -- eth_getProof/get-account-proof: the getProof endpoint is currently - not supported by the Kakarot RPC. +- eth_getProof/get-account-proof-with-storage: the getProof endpoint is + currently not supported by the Kakarot RPC. +- eth_getProof/get-account-proof: the getProof endpoint is currently not + supported by the Kakarot RPC. - eth_getStorage/get-storage-invalid-key-too-large: the Kakarot implementation of the eth_getStorage endpoint uses `reth_primitives::U256` type when deserializing the number. This test is expected to fail as the provided block @@ -513,44 +597,47 @@ the following tests are skipped: of the ETH API. This test passes an invalid block hash `0xasdf` and expects the server to return with an error code `-32000` which corresponds to an invalid input error. The code derived from the `rpc` macro returns an error - code of `-32602` which corresponds to an invalid parameters error, whenever - it encounters issues when deserializing the input. We decide to ignore this - test as the only issue is the error code returned. -- eth_getTransactionByBlockHashAndIndex/get-block-n: see `debug_getRawBlock/get-block-n`. - -In addition to the tests we skip, some of the objects fields need to be ignored in -the passing tests: - -- For blocks: the hash, parent hash, timestamp, base fee per gas, difficulty, gas - limit, miner, size, state root, total difficulty and withdrawals are all skipped. - Due to the difference between a type 1 and a type 2 ZK-EVM, these fields are - currently not computed according to the EVM specifications and need to be skipped. + code of `-32602` which corresponds to an invalid parameters error, whenever it + encounters issues when deserializing the input. We decide to ignore this test + as the only issue is the error code returned. +- eth_getTransactionByBlockHashAndIndex/get-block-n: see + `debug_getRawBlock/get-block-n`. + +In addition to the tests we skip, some of the objects fields need to be ignored +in the passing tests: + +- For blocks: the hash, parent hash, timestamp, base fee per gas, difficulty, + gas limit, miner, size, state root, total difficulty and withdrawals are all + skipped. Due to the difference between a type 1 and a type 2 ZK-EVM, these + fields are currently not computed according to the EVM specifications and need + to be skipped. - For receipts, transactions and logs: the block hash is skipped. -If you which to run our hive test suite locally, the following steps should be taken: +If you which to run our hive test suite locally, the following steps should be +taken: - Set up the repo: `make setup`. -- Build a local docker image of the RPC. Check the hive [Dockerfile](docker/hive/Dockerfile) - for the values for `xxx` and `yyy`: +- Build a local docker image of the RPC. Check the hive + [Dockerfile](docker/hive/Dockerfile) for the values for `xxx` and `yyy`: ```shell docker build --build-arg APIBARA_STARKNET_BIN_DIR=xxx --build-arg APIBARA_SINK_BIN_DIR=yyy -t hive . -f docker/hive/Dockerfile ``` -- Checkout the Kakarot fork of hive: `git clone https://github.com/kkrt-labs/hive` +- Checkout the Kakarot fork of hive: + `git clone https://github.com/kkrt-labs/hive` - Build the hive binary: `go build hive.go` - Run the full rpc test suite against Kakarot: `./hive --sim "ethereum/rpc" --client kakarot` -- Additional filtering can be provided using `--sim.limit` if you which to run - a certain limited set of tests. +- Additional filtering can be provided using `--sim.limit` if you which to run a + certain limited set of tests. ## Project assistance If you want to say **thank you** or/and support active development of Kakarot RPC: -- Add a [GitHub Star](https://github.com/kkrt-labs/kakarot-rpc) to the - project. +- Add a [GitHub Star](https://github.com/kkrt-labs/kakarot-rpc) to the project. - Tweet about the Kakarot RPC: . ## Contributing @@ -565,11 +652,22 @@ for being involved! ## Glossary -- StarknetOS chain: also called CairoVM chain, or Starknet appchain, it is a full-node (or sequencer) that is powered by the Cairo VM (Cairo smart contracts can be deployed to it). It a chain that behaves in most ways similarly to Starknet L2. -- Kakarot Core EVM: The set of Cairo Programs that implement the Ethereum Virtual Machine instruction set. -- Katana: A StarknetOS sequencer developed by the Dojo team. Serves as the underlying StarknetOS client for Kakarot zkEVM locally. It is built with speed and minimalism in mind. -- Madara: A StarknetOS sequencer and full-node developed by the Madara (e.g. Pragma Oracle, Deoxys, etc.) and Starkware exploration teams. Based on the Substrate framework, it is built with decentralization and robustness in mind. -- Kakarot zkEVM: the entire system that forms the Kakarot zkRollup: the core EVM Cairo Programs and the StarknetOS chain they are deployed to, the RPC layer (this repository), and the Kakarot Indexer (the backend service that ingests Starknet data types and formats them in EVM format for RPC read requests). +- StarknetOS chain: also called CairoVM chain, or Starknet appchain, it is a + full-node (or sequencer) that is powered by the Cairo VM (Cairo smart + contracts can be deployed to it). It a chain that behaves in most ways + similarly to Starknet L2. +- Kakarot Core EVM: The set of Cairo Programs that implement the Ethereum + Virtual Machine instruction set. +- Katana: A StarknetOS sequencer developed by the Dojo team. Serves as the + underlying StarknetOS client for Kakarot zkEVM locally. It is built with speed + and minimalism in mind. +- Madara: A StarknetOS sequencer and full-node developed by the Madara (e.g. + Pragma Oracle, Deoxys, etc.) and Starkware exploration teams. Based on the + Substrate framework, it is built with decentralization and robustness in mind. +- Kakarot zkEVM: the entire system that forms the Kakarot zkRollup: the core EVM + Cairo Programs and the StarknetOS chain they are deployed to, the RPC layer + (this repository), and the Kakarot Indexer (the backend service that ingests + Starknet data types and formats them in EVM format for RPC read requests). ## Authors & contributors @@ -595,25 +693,25 @@ See [LICENSE](LICENSE) for more information. We warmly thank all the people who made this project possible. -- [Reth](https://github.com/paradigmxyz/reth) (Rust Ethereum), - Thank you for providing open source libraries for us to reuse. +- [Reth](https://github.com/paradigmxyz/reth) (Rust Ethereum), Thank you for + providing open source libraries for us to reuse. - [jsonrpsee](https://github.com/paritytech/jsonrpsee) -- Starkware and its exploration team, - thank you for helping and providing a great test environment with Madara. +- Starkware and its exploration team, thank you for helping and providing a + great test environment with Madara. - [Lambdaclass](https://github.com/lambdaclass) -- [Dojo](https://github.com/dojoengine/dojo), - thank you for providing great test utils. -- [starknet-rs](https://github.com/xJonathanLEI/starknet-rs), - thank you for a great SDK. +- [Dojo](https://github.com/dojoengine/dojo), thank you for providing great test + utils. +- [starknet-rs](https://github.com/xJonathanLEI/starknet-rs), thank you for a + great SDK. - All our contributors. This journey wouldn't be possible without you. ## Benchmarks -For now, Kakarot RPC provides a minimal benchmarking methodology. -You'll need [Bun](https://bun.sh/) installed locally. +For now, Kakarot RPC provides a minimal benchmarking methodology. You'll need +[Bun](https://bun.sh/) installed locally. -- Run a Starknet node locally (Katana or Madara), - e.g. `katana --block-time 6000 --disable-fee` if you have the dojo binary locally, +- Run a Starknet node locally (Katana or Madara), e.g. + `katana --block-time 6000 --disable-fee` if you have the dojo binary locally, or `make madara-rpc-up` for Madara. - Deploy the Kakarot smart contract (`make deploy-kakarot`) - Run the Kakarot RPC binary (`make run-dev`) diff --git a/benchmarks/README.md b/benchmarks/README.md index 39005dd76..eef8294c4 100644 --- a/benchmarks/README.md +++ b/benchmarks/README.md @@ -16,14 +16,14 @@ bun run benchmark-madara bun run benchmark-katana ``` -Note: -The benchmarks rely on a INTER_TRANSACTION_MS_DELAY environment variable. -It is aimed at spacing transactions between one another. -This achieves a two-fold goal: +Note: The benchmarks rely on a INTER_TRANSACTION_MS_DELAY environment variable. +It is aimed at spacing transactions between one another. This achieves a +two-fold goal: -- Refrain from filling the mempool too fast, - i.e. reach maximum capacity of the mempool/backlog before the end of the benchmark. -- allow clients to order the transactions with regards to their nonce, - as for now, only one wallet fires transactions, and nonces must be sequential. +- Refrain from filling the mempool too fast, i.e. reach maximum capacity of the + mempool/backlog before the end of the benchmark. +- allow clients to order the transactions with regards to their nonce, as for + now, only one wallet fires transactions, and nonces must be sequential. -This implies that one must calibrate a good value for INTER_TRANSACTION_MS_DELAY. +This implies that one must calibrate a good value for +INTER_TRANSACTION_MS_DELAY. diff --git a/docker-compose.prod.yaml b/docker-compose.prod.yaml index b814d48e0..d9003a88e 100644 --- a/docker-compose.prod.yaml +++ b/docker-compose.prod.yaml @@ -1,4 +1,3 @@ -# trunk-ignore-all(yamllint/empty-values) services: starknet: image: nethermind/juno:v0.11.1 @@ -7,19 +6,14 @@ services: volumes: - juno_files_prod:/var/lib/juno command: > - --pending-poll-interval "1s" - --http - --http-host 0.0.0.0 - --http-port 6060 - --db-path /var/lib/juno - --eth-node ${ETH_NODE_WS} - --cn-name kakarot-sepolia - --cn-feeder-url https://gateway.kakarot.sw-dev.io/feeder_gateway/ - --cn-gateway-url https://gateway.kakarot.sw-dev.io/gateway/ - --cn-l1-chain-id 0xaa36a7 - --cn-l2-chain-id kkrt - --cn-core-contract-address 0x74Ca1aC5BD4c3c97006d2B7b9375Dd3B6C17ACcD - --cn-unverifiable-range 0,1000000 + --pending-poll-interval "1s" --http --http-host 0.0.0.0 --http-port 6060 + --db-path /var/lib/juno --eth-node ${ETH_NODE_WS} --cn-name + kakarot-sepolia --cn-feeder-url + https://gateway.kakarot.sw-dev.io/feeder_gateway/ --cn-gateway-url + https://gateway.kakarot.sw-dev.io/gateway/ --cn-l1-chain-id 0xaa36a7 + --cn-l2-chain-id kkrt --cn-core-contract-address + 0x74Ca1aC5BD4c3c97006d2B7b9375Dd3B6C17ACcD --cn-unverifiable-range + 0,1000000 networks: - internal_prod starknet-explorer: @@ -87,21 +81,22 @@ services: DATABASE_POOL_SIZE: 10 DATABASE_TIMEOUT: 5000 ECTO_USE_SSL: "false" - BLOCKSCOUT_HOST: "0.0.0.0" + BLOCKSCOUT_HOST: 0.0.0.0 PORT: 4000 - APP_NAME: "Blockscout" - ETHEREUM_JSONRPC_VARIANT: "geth" - ETHEREUM_JSONRPC_HTTP_URL: "http://kakarot-rpc:3030" - ETHEREUM_JSONRPC_TRACE_URL: "http://kakarot-rpc:3030" + APP_NAME: Blockscout + ETHEREUM_JSONRPC_VARIANT: geth + ETHEREUM_JSONRPC_HTTP_URL: http://kakarot-rpc:3030 + ETHEREUM_JSONRPC_TRACE_URL: http://kakarot-rpc:3030 ports: - 4001:4000 networks: - internal_prod command: [ - "/bin/sh", - "-c", - "bin/blockscout eval \"Elixir.Explorer.ReleaseTasks.create_and_migrate()\" && bin/blockscout start" + /bin/sh, + -c, + bin/blockscout eval "Elixir.Explorer.ReleaseTasks.create_and_migrate()" + && bin/blockscout start, ] depends_on: - postgres diff --git a/docker-compose.staging.yaml b/docker-compose.staging.yaml index c2263b9c1..ff46271c5 100644 --- a/docker-compose.staging.yaml +++ b/docker-compose.staging.yaml @@ -1,4 +1,3 @@ -# trunk-ignore-all(yamllint/empty-values) services: starknet: image: nethermind/juno:v0.11.1 @@ -7,19 +6,13 @@ services: volumes: - ${HOME}/code/kkrt-labs/snapshots/juno_sepolia:/var/lib/juno command: > - --pending-poll-interval "1s" - --http - --http-host 0.0.0.0 - --http-port 6060 - --db-path /var/lib/juno - --eth-node ${ETH_NODE_WS} - --cn-name KKRT_BETA + --pending-poll-interval "1s" --http --http-host 0.0.0.0 --http-port 6060 + --db-path /var/lib/juno --eth-node ${ETH_NODE_WS} --cn-name KKRT_BETA --cn-feeder-url https://gateway-beta.kakarot.sw-dev.io/feeder_gateway/ --cn-gateway-url https://gateway-beta.kakarot.sw-dev.io/gateway/ - --cn-l1-chain-id 11155111 - --cn-l2-chain-id kkrt - --cn-core-contract-address 0xc7c9ea7fD0921Cb6EDd9a3184F88cF1b821aA82B - --cn-unverifiable-range 0,1000000 + --cn-l1-chain-id 11155111 --cn-l2-chain-id kkrt --cn-core-contract-address + 0xc7c9ea7fD0921Cb6EDd9a3184F88cF1b821aA82B --cn-unverifiable-range + 0,1000000 networks: - internal_staging starknet-explorer: @@ -87,21 +80,22 @@ services: DATABASE_POOL_SIZE: 10 DATABASE_TIMEOUT: 5000 ECTO_USE_SSL: "false" - BLOCKSCOUT_HOST: "0.0.0.0" + BLOCKSCOUT_HOST: 0.0.0.0 PORT: 4000 - APP_NAME: "Blockscout" - ETHEREUM_JSONRPC_VARIANT: "geth" - ETHEREUM_JSONRPC_HTTP_URL: "http://kakarot-rpc:3030" - ETHEREUM_JSONRPC_TRACE_URL: "http://kakarot-rpc:3030" + APP_NAME: Blockscout + ETHEREUM_JSONRPC_VARIANT: geth + ETHEREUM_JSONRPC_HTTP_URL: http://kakarot-rpc:3030 + ETHEREUM_JSONRPC_TRACE_URL: http://kakarot-rpc:3030 ports: - 4001:4000 networks: - internal_staging command: [ - "/bin/sh", - "-c", - "bin/blockscout eval \"Elixir.Explorer.ReleaseTasks.create_and_migrate()\" && bin/blockscout start" + /bin/sh, + -c, + bin/blockscout eval "Elixir.Explorer.ReleaseTasks.create_and_migrate()" + && bin/blockscout start, ] depends_on: - postgres diff --git a/docker-compose.yaml b/docker-compose.yaml index e8ca52db5..3f3389141 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -3,7 +3,7 @@ services: platform: linux/amd64 image: ghcr.io/foundry-rs/foundry:latest command: - [ "anvil --host 0.0.0.0 --port 8545 --load-state /app/.anvil/state.json" ] + [anvil --host 0.0.0.0 --port 8545 --load-state /app/.anvil/state.json] # Initial state has the StarknetMessaging and L1KakarotMessaging contracts deployed volumes: - .anvil:/app/.anvil @@ -107,21 +107,22 @@ services: DATABASE_POOL_SIZE: 10 DATABASE_TIMEOUT: 5000 ECTO_USE_SSL: "false" - BLOCKSCOUT_HOST: "0.0.0.0" + BLOCKSCOUT_HOST: 0.0.0.0 PORT: 4000 - APP_NAME: "Blockscout" - ETHEREUM_JSONRPC_VARIANT: "geth" - ETHEREUM_JSONRPC_HTTP_URL: "http://kakarot-rpc:3030" - ETHEREUM_JSONRPC_TRACE_URL: "http://kakarot-rpc:3030" + APP_NAME: Blockscout + ETHEREUM_JSONRPC_VARIANT: geth + ETHEREUM_JSONRPC_HTTP_URL: http://kakarot-rpc:3030 + ETHEREUM_JSONRPC_TRACE_URL: http://kakarot-rpc:3030 ports: - 4000:4000 networks: - internal command: [ - "/bin/sh", - "-c", - "bin/blockscout eval \"Elixir.Explorer.ReleaseTasks.create_and_migrate()\" && bin/blockscout start" + /bin/sh, + -c, + bin/blockscout eval "Elixir.Explorer.ReleaseTasks.create_and_migrate() + && bin/blockscout start, ] depends_on: - postgres diff --git a/docs/images/Kakarot RPC (lower level).png b/docs/images/Kakarot RPC (lower level).png index 27c65dd75..ddc9b43e8 100644 Binary files a/docs/images/Kakarot RPC (lower level).png and b/docs/images/Kakarot RPC (lower level).png differ diff --git a/docs/images/Kakarot zkEVM.png b/docs/images/Kakarot zkEVM.png index c08b73783..c8f46a30b 100644 Binary files a/docs/images/Kakarot zkEVM.png and b/docs/images/Kakarot zkEVM.png differ diff --git a/docs/images/logo.png b/docs/images/logo.png index 9bbed493a..da76e2d6b 100644 Binary files a/docs/images/logo.png and b/docs/images/logo.png differ diff --git a/docs/methods/eth_call.md b/docs/methods/eth_call.md index f56123356..c7c528039 100644 --- a/docs/methods/eth_call.md +++ b/docs/methods/eth_call.md @@ -9,8 +9,10 @@ ## Description -Submits an EVM call by wrapping the EVM compatible transaction object into a Starknet call. +Submits an EVM call by wrapping the EVM compatible transaction object into a +Starknet call. Kakarot Specificity: -- Call the Kakarot Cairo smart contract's entrypoint: `eth_call` with the EVM transaction fields as argument +- Call the Kakarot Cairo smart contract's entrypoint: `eth_call` with the EVM + transaction fields as argument diff --git a/docs/methods/eth_coinbase.md b/docs/methods/eth_coinbase.md index e28ee60d4..3f797f21b 100644 --- a/docs/methods/eth_coinbase.md +++ b/docs/methods/eth_coinbase.md @@ -12,5 +12,5 @@ Returns the Ethereum account controlled by the Kakarot zkEVM sequencer. Kakarot specifity: since Kakarot set of Cairo programs run on the StarknetOS -(i.e. on an underlying CairoVM client), -the coinbase is the EVM representation of a Starknet account that collects the fees. +(i.e. on an underlying CairoVM client), the coinbase is the EVM representation +of a Starknet account that collects the fees. diff --git a/docs/methods/eth_estimateGas.md b/docs/methods/eth_estimateGas.md index 53b388cc0..3b348bb11 100644 --- a/docs/methods/eth_estimateGas.md +++ b/docs/methods/eth_estimateGas.md @@ -9,8 +9,11 @@ ## Description -Generates and returns an estimate of how much gas is necessary to allow the transaction to complete. +Generates and returns an estimate of how much gas is necessary to allow the +transaction to complete. Kakarot Specificity: -- Call the Kakarot Cairo smart contract's entrypoint: `eth_call` with the EVM transaction fields as argument and get the returned `gas_used` variable. This value is the estimated gas needed to complete the transaction. +- Call the Kakarot Cairo smart contract's entrypoint: `eth_call` with the EVM + transaction fields as argument and get the returned `gas_used` variable. This + value is the estimated gas needed to complete the transaction. diff --git a/docs/methods/eth_feeHistory.md b/docs/methods/eth_feeHistory.md index 2a135d959..1bf00c115 100644 --- a/docs/methods/eth_feeHistory.md +++ b/docs/methods/eth_feeHistory.md @@ -9,13 +9,18 @@ ## Description -Returns transaction base fee per gas and effective priority fee per gas for the requested/supported block range. +Returns transaction base fee per gas and effective priority fee per gas for the +requested/supported block range. Kakarot Specificity: -- Since Kakarot has no fee market, this will return the base fee over a range of blocks (since priority fee is always null, we get `gasPrice == baseFee` all the time). -- The reward percentile logic does not apply, and the gasUsed ratio is hardcoded to 1. +- Since Kakarot has no fee market, this will return the base fee over a range of + blocks (since priority fee is always null, we get `gasPrice == baseFee` all + the time). +- The reward percentile logic does not apply, and the gasUsed ratio is hardcoded + to 1. Note: -- Using this endpoint is discouraged and is made somewhat compatible to avoid breaking existing backend logic. +- Using this endpoint is discouraged and is made somewhat compatible to avoid + breaking existing backend logic. diff --git a/docs/methods/eth_gasPrice.md b/docs/methods/eth_gasPrice.md index 4088f08e2..e7082ff20 100644 --- a/docs/methods/eth_gasPrice.md +++ b/docs/methods/eth_gasPrice.md @@ -18,4 +18,5 @@ For this reason: - gasPrice == baseFee - priority fee is generally a variable that isn't used. - - setting a EIP-1559 transaction with `maxPriorityFeePerGas > 0` has no effect. + - setting a EIP-1559 transaction with `maxPriorityFeePerGas > 0` has no + effect. diff --git a/docs/methods/eth_getCode.md b/docs/methods/eth_getCode.md index bc290f233..3f43a67b4 100644 --- a/docs/methods/eth_getCode.md +++ b/docs/methods/eth_getCode.md @@ -11,8 +11,11 @@ Returns the value from a storage position at a given address. -Kakarot specificity: note that Kakarot zkEVM is implemented as a set of Cairo Programs running on an underlying StarknetOS chain (so-called CairoVM chain). +Kakarot specificity: note that Kakarot zkEVM is implemented as a set of Cairo +Programs running on an underlying StarknetOS chain (so-called CairoVM chain). -Every deployed EVM smart contract is a Starknet smart contract under the hood. The EVM bytecode of a contract is stored as a Starknet storage variable. +Every deployed EVM smart contract is a Starknet smart contract under the hood. +The EVM bytecode of a contract is stored as a Starknet storage variable. -Running the eth_getCode amounts to querying a storage slot in the underlying StarknetOS chain. +Running the eth_getCode amounts to querying a storage slot in the underlying +StarknetOS chain. diff --git a/docs/methods/eth_getStorageAt.md b/docs/methods/eth_getStorageAt.md index 9afacc064..665fd64ba 100644 --- a/docs/methods/eth_getStorageAt.md +++ b/docs/methods/eth_getStorageAt.md @@ -11,8 +11,13 @@ Returns the value from a storage position at a given address. -Kakarot specificity: note that Kakarot zkEVM is implemented as a set of Cairo Programs running on an underlying StarknetOS chain (so-called CairoVM chain). +Kakarot specificity: note that Kakarot zkEVM is implemented as a set of Cairo +Programs running on an underlying StarknetOS chain (so-called CairoVM chain). -Every deployed EVM smart contract is a Starknet smart contract under the hood. The EVM storage layout is reproduced inside the Starknet storage. +Every deployed EVM smart contract is a Starknet smart contract under the hood. +The EVM storage layout is reproduced inside the Starknet storage. -Querying a storage slot in Kakarot amounts to querying the underlying Starknet smart contract storage using the Starknet JSON RPC specification. The Kakarot-RPC handles this logic under-the-hood at no additional abstraction cost or trust assumption. +Querying a storage slot in Kakarot amounts to querying the underlying Starknet +smart contract storage using the Starknet JSON RPC specification. The +Kakarot-RPC handles this logic under-the-hood at no additional abstraction cost +or trust assumption. diff --git a/docs/methods/eth_sendRawTransaction.md b/docs/methods/eth_sendRawTransaction.md index a8104533b..85072ff0f 100644 --- a/docs/methods/eth_sendRawTransaction.md +++ b/docs/methods/eth_sendRawTransaction.md @@ -9,11 +9,18 @@ ## Description -Submits a raw transaction by wrapping the EVM compatible transaction into a Starknet formatted transaction. Note that this operation does not come at any additional trust assumption. -The EVM signature and initial transaction payload will be verified inside a Cairo program (EOA Cairo implementation). +Submits a raw transaction by wrapping the EVM compatible transaction into a +Starknet formatted transaction. Note that this operation does not come at any +additional trust assumption. The EVM signature and initial transaction payload +will be verified inside a Cairo program (EOA Cairo implementation). Kakarot Specificity: -- Decode RLP encoded transaction, and pass signature in the Starknet metadata `transaction.signature` field -- Re-encode (RLP) transaction without the signature. The encoded transaction is ready to be keccak-hashed inside the Cairo program (this is pre-formatting without security degradation). -- For a given sender EVM address, compute the corresponding (bijective mapping) Starknet account. Send the Starknet transaction with `sender_address` field set as this Starknet account. +- Decode RLP encoded transaction, and pass signature in the Starknet metadata + `transaction.signature` field +- Re-encode (RLP) transaction without the signature. The encoded transaction is + ready to be keccak-hashed inside the Cairo program (this is pre-formatting + without security degradation). +- For a given sender EVM address, compute the corresponding (bijective mapping) + Starknet account. Send the Starknet transaction with `sender_address` field + set as this Starknet account. diff --git a/docs/rpc_api_status.md b/docs/rpc_api_status.md index add29ebf3..3b23b55da 100644 --- a/docs/rpc_api_status.md +++ b/docs/rpc_api_status.md @@ -17,6 +17,8 @@ The template for the method file can be found edit it corresponding to the method you're implementing. All methods should be documented in `./methods/{method}.md` + + | Name | Description | State | | ----------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----- | | eth_chainId | Returns the chain ID of the current network. | ✅ | @@ -60,3 +62,5 @@ documented in `./methods/{method}.md` | [eth_maxPriorityFeePerGas](./methods/eth_maxPriorityFeePerGas.md) | Returns the current maxPriorityFeePerGas per gas in wei. This value is equal to 0. | 🟡 | | [eth_feeHistory](./methods/eth_feeHistory.md) | Returns transaction base fee per gas and effective priority fee per gas for the requested/supported block range. | 🟡 | | eth_getProof | Returns the merkle proof for a given account and optionally some storage keys. | ✅ | + + diff --git a/indexer/docs/Monolith_Kakarot_zkEVM_not_live.png b/indexer/docs/Monolith_Kakarot_zkEVM_not_live.png index fbb5f62f4..b850c4085 100644 Binary files a/indexer/docs/Monolith_Kakarot_zkEVM_not_live.png and b/indexer/docs/Monolith_Kakarot_zkEVM_not_live.png differ diff --git a/indexer/docs/kakarot_indexer.png b/indexer/docs/kakarot_indexer.png index d882d2cee..fa93b9174 100644 Binary files a/indexer/docs/kakarot_indexer.png and b/indexer/docs/kakarot_indexer.png differ diff --git a/scripts/fetchKakarotTx.ts b/scripts/fetchKakarotTx.ts index 619088dea..48ec89649 100644 --- a/scripts/fetchKakarotTx.ts +++ b/scripts/fetchKakarotTx.ts @@ -57,8 +57,10 @@ async function collectTransactions(targetCount: number) { // Combine transaction data with receipts let transactionReceipts = getTransaction.map((tx) => { - let receipt = getTransactionReceipts.find((r) => - "transaction_hash" in tx && r.transaction_hash === tx.transaction_hash + let receipt = getTransactionReceipts.find( + (r) => + "transaction_hash" in tx && + r.transaction_hash === tx.transaction_hash, ); return { ...tx, ...receipt }; }); @@ -71,9 +73,7 @@ async function collectTransactions(targetCount: number) { transformedTransactions, eventsWithTransaction, toTypedEthTxTransaction, - } = transformTransactionsAndEvents( - transactionReceipts, - ); + } = transformTransactionsAndEvents(transactionReceipts); transactions = transformedTransactions; events = eventsWithTransaction; @@ -117,9 +117,7 @@ function transformBlockHeader(block: any): BlockHeader { } // Function to transform transactions and events -function transformTransactionsAndEvents( - transactions: any[], -): { +function transformTransactionsAndEvents(transactions: any[]): { transformedTransactions: TransactionWithReceipt[]; eventsWithTransaction: EventWithTransaction[]; toTypedEthTxTransaction: Transaction[]; @@ -154,13 +152,12 @@ function transformTransactionsAndEvents( actualFee: tx.actual_fee, contractAddress: tx.contractAddress, l2ToL1Messages: tx.messages_sent, - events: tx.events - .map((evt: any, evtIndex: number) => ({ - fromAddress: evt.from_address, - keys: evt.keys, - data: evt.data, - index: evtIndex, - })), + events: tx.events.map((evt: any, evtIndex: number) => ({ + fromAddress: evt.from_address, + keys: evt.keys, + data: evt.data, + index: evtIndex, + })), }; // Add the transformed transaction and receipt to the list diff --git a/scripts/install_hooks.sh b/scripts/install_hooks.sh deleted file mode 100755 index 7dbb86105..000000000 --- a/scripts/install_hooks.sh +++ /dev/null @@ -1,4 +0,0 @@ -cp .hooks/pre-push .git/hooks/pre-push -chmod +x .git/hooks/pre-push - -echo "Git hooks installed successfully."