diff --git a/.github/workflows/_shared-docker-clone.yaml b/.github/workflows/_shared-docker-clone.yaml new file mode 100644 index 00000000..89981ac2 --- /dev/null +++ b/.github/workflows/_shared-docker-clone.yaml @@ -0,0 +1,67 @@ + +name: Reusable docker clone workflow +on: + workflow_call: + inputs: + source_repository: + description: 'Source Docker Hub Repository' + default: '' + required: false + type: string + target_repository: + description: 'Target Docker Hub Repository' + default: '' + required: false + type: string + tags: + description: 'Docker Image Tags (JSON)' + default: '' + required: false + type: string + secrets: + DOCKERHUB_USERNAME: + description: 'Docker Hub Username' + required: false + DOCKERHUB_TOKEN: + description: 'Docker Hub Token' + required: false + +# shared build jobs +jobs: + clone_docker_images: + name: Clone docker images to ${{ inputs.target_repository }} + if: ${{ inputs.tags }} + runs-on: ubuntu-latest + strategy: + matrix: + tag: ${{ fromJSON(inputs.tags) }} + steps: + + # prepare docker + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v2 + - name: Login to Docker Hub + uses: docker/login-action@v2 + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + + - name: Get tag names + id: tags + run: | + tagstr="${{ matrix.tag }}" + tagparts=(${tagstr//:/ }) + echo "source_tag=${tagparts[0]}" >> $GITHUB_OUTPUT + if [ -z "${tagparts[1]}" ]; then + echo "target_tag=${tagparts[0]}" >> $GITHUB_OUTPUT + else + echo "target_tag=${tagparts[1]}" >> $GITHUB_OUTPUT + fi + + # build multiarch image + - name: "Copy docker image: ${{ steps.tags.outputs.target_tag }}" + run: | + docker buildx imagetools create -t ${{ inputs.target_repository }}:${{ steps.tags.outputs.target_tag }} \ + ${{ inputs.source_repository }}:${{ steps.tags.outputs.source_tag }}-amd64 \ + ${{ inputs.source_repository }}:${{ steps.tags.outputs.source_tag }}-arm64 + diff --git a/.github/workflows/build-dev.yml b/.github/workflows/build-dev.yml index 01b7565d..a8689c44 100644 --- a/.github/workflows/build-dev.yml +++ b/.github/workflows/build-dev.yml @@ -27,9 +27,8 @@ jobs: - name: "Load PR info" id: loadinfo run: | - run_builds="true" - has_docker_image_label="${{ contains(github.event.pull_request.labels.*.name, 'build-docker-image') }}" + run_builds="$has_docker_image_label" echo "docker image label: $has_docker_image_label" branch_name="${GITHUB_HEAD_REF:-${GITHUB_REF#refs/heads/}}" @@ -48,6 +47,7 @@ jobs: if [[ ! -z "$manual_tag" ]]; then has_docker_image_label="true" branch_name="$manual_tag" + run_builds="true" fi if [[ "$branch_name" == "master" ]] || [[ "$branch_name" =~ ^v[0-9] ]]; then @@ -60,13 +60,9 @@ jobs: echo "build_docker=$has_docker_image_label" >> $GITHUB_OUTPUT echo "docker_tag=$branch_name" >> $GITHUB_OUTPUT - check_source: - name: "Run code checks" - uses: ./.github/workflows/_shared-check.yaml - build_binaries: name: "Build Dora" - needs: [prinfo, check_source] + needs: [prinfo] if: ${{ needs.prinfo.outputs.run_builds == 'true' }} uses: ./.github/workflows/_shared-build.yaml with: diff --git a/.github/workflows/build-master.yml b/.github/workflows/build-master.yml index e1227840..20d49e4f 100644 --- a/.github/workflows/build-master.yml +++ b/.github/workflows/build-master.yml @@ -11,6 +11,9 @@ concurrency: group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} cancel-in-progress: true +permissions: + contents: write + jobs: check_source: @@ -32,6 +35,18 @@ jobs: DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }} DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }} + clone_docker_images: + name: "Copy docker images" + needs: [build_binaries] + uses: ./.github/workflows/_shared-docker-clone.yaml + with: + source_repository: "ethpandaops/dora" + target_repository: "pk910/dora-the-explorer" + tags: "['master:unstable']" + secrets: + DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_PK_USERNAME }} + DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_PK_TOKEN }} + create_snapshot_release: name: Create snapshot release needs: [build_binaries] @@ -101,16 +116,16 @@ jobs: ## Latest automatically built executables. (Unstable development snapshot) Built from master branch (commit: ${{ github.sha }}) - Please read the [wiki](https://github.com/pk910/dora/wiki) for setup / configuration instructions. + Please read the [wiki](https://github.com/ethpandaops/dora/wiki) for setup / configuration instructions. ### Release Artifacts | Release File | Description | | ------------- | ------------- | - | [dora_snapshot_windows_amd64.zip](https://github.com/pk910/dora/releases/download/snapshot/dora_snapshot_windows_amd64.zip) | dora executables for windows/amd64 | - | [dora_snapshot_linux_amd64.tar.gz](https://github.com/pk910/dora/releases/download/snapshot/dora_snapshot_linux_amd64.tar.gz) | dora executables for linux/amd64 | - | [dora_snapshot_linux_arm64.tar.gz](https://github.com/pk910/dora/releases/download/snapshot/dora_snapshot_linux_arm64.tar.gz) | dora executables for linux/arm64 | - | [dora_snapshot_darwin_amd64.tar.gz](https://github.com/pk910/dora/releases/download/snapshot/dora_snapshot_darwin_amd64.tar.gz) | dora executable for macos/amd64 | - | [dora_snapshot_darwin_arm64.tar.gz](https://github.com/pk910/dora/releases/download/snapshot/dora_snapshot_darwin_arm64.tar.gz) | dora executable for macos/arm64 | + | [dora_snapshot_windows_amd64.zip](https://github.com/ethpandaops/dora/releases/download/snapshot/dora_snapshot_windows_amd64.zip) | dora executables for windows/amd64 | + | [dora_snapshot_linux_amd64.tar.gz](https://github.com/ethpandaops/dora/releases/download/snapshot/dora_snapshot_linux_amd64.tar.gz) | dora executables for linux/amd64 | + | [dora_snapshot_linux_arm64.tar.gz](https://github.com/ethpandaops/dora/releases/download/snapshot/dora_snapshot_linux_arm64.tar.gz) | dora executables for linux/arm64 | + | [dora_snapshot_darwin_amd64.tar.gz](https://github.com/ethpandaops/dora/releases/download/snapshot/dora_snapshot_darwin_amd64.tar.gz) | dora executable for macos/amd64 | + | [dora_snapshot_darwin_arm64.tar.gz](https://github.com/ethpandaops/dora/releases/download/snapshot/dora_snapshot_darwin_arm64.tar.gz) | dora executable for macos/arm64 | env: GITHUB_TOKEN: ${{ github.token }} diff --git a/.github/workflows/build-release.yml b/.github/workflows/build-release.yml index 01d847e3..bfd81a4a 100644 --- a/.github/workflows/build-release.yml +++ b/.github/workflows/build-release.yml @@ -8,6 +8,9 @@ on: description: "Version Number ('0.x.y')" required: true +permissions: + contents: write + jobs: build_binaries: name: "Build Dora" @@ -22,12 +25,43 @@ jobs: secrets: DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }} DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }} + + clone_docker_images: + name: "Copy docker images" + needs: [build_binaries] + uses: ./.github/workflows/_shared-docker-clone.yaml + with: + source_repository: "ethpandaops/dora" + target_repository: "pk910/dora-the-explorer" + tags: "['v${{ inputs.version }}','v${{ inputs.version }}:latest']" + secrets: + DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_PK_USERNAME }} + DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_PK_TOKEN }} create_release: name: Create Release needs: [build_binaries] runs-on: ubuntu-latest steps: + - uses: actions/checkout@v3 + with: + fetch-depth: 100 + ref: ${{ github.sha }} + - name: "Generate release changelog" + id: changelog + run: | + git fetch --tags + prev_tag=$(git tag --sort=-version:refname | grep -e "^v[0-9.]*$" | head -n 1) + echo "previous release: $prev_tag" + if [ "$prev_tag" ]; then + changelog=$(git log --oneline --no-decorate $prev_tag..HEAD) + else + changelog=$(git log --oneline --no-decorate) + fi + echo "changelog<> $GITHUB_OUTPUT + echo " - ${changelog//$'\n'/$'\n' - }" >> $GITHUB_OUTPUT + echo "EOF" >> $GITHUB_OUTPUT + # download build artifacts - name: "Download build artifacts" uses: actions/download-artifact@v3 @@ -42,18 +76,18 @@ jobs: release_name: "v${{ inputs.version }}" tag_name: "v${{ inputs.version }}" body: | - ### Major Changes - ... + ### Changes + ${{ steps.changelog.outputs.changelog }} ### Release Artifacts - Please read through the [wiki](https://github.com/pk910/dora/wiki) for setup & configuration instructions. + Please read through the [wiki](https://github.com/ethpandaops/dora/wiki) for setup & configuration instructions. | Release File | Description | | ------------- | ------------- | - | [dora_${{ inputs.version }}_windows_amd64.zip](https://github.com/pk910/dora/releases/download/v${{ inputs.version }}/dora_${{ inputs.version }}_windows_amd64.zip) | dora executables for windows/amd64 | - | [dora_${{ inputs.version }}_linux_amd64.tar.gz](https://github.com/pk910/dora/releases/download/v${{ inputs.version }}/dora_${{ inputs.version }}_linux_amd64.tar.gz) | dora executables for linux/amd64 | - | [dora_${{ inputs.version }}_linux_arm64.tar.gz](https://github.com/pk910/dora/releases/download/v${{ inputs.version }}/dora_${{ inputs.version }}_linux_arm64.tar.gz) | dora executables for linux/arm64 | - | [dora_${{ inputs.version }}_darwin_amd64.tar.gz](https://github.com/pk910/dora/releases/download/v${{ inputs.version }}/dora_${{ inputs.version }}_darwin_amd64.tar.gz) | dora executable for macos/amd64 | - | [dora_${{ inputs.version }}_darwin_arm64.tar.gz](https://github.com/pk910/dora/releases/download/v${{ inputs.version }}/dora_${{ inputs.version }}_darwin_arm64.tar.gz) | dora executable for macos/arm64 | + | [dora_${{ inputs.version }}_windows_amd64.zip](https://github.com/ethpandaops/dora/releases/download/v${{ inputs.version }}/dora_${{ inputs.version }}_windows_amd64.zip) | dora executables for windows/amd64 | + | [dora_${{ inputs.version }}_linux_amd64.tar.gz](https://github.com/ethpandaops/dora/releases/download/v${{ inputs.version }}/dora_${{ inputs.version }}_linux_amd64.tar.gz) | dora executables for linux/amd64 | + | [dora_${{ inputs.version }}_linux_arm64.tar.gz](https://github.com/ethpandaops/dora/releases/download/v${{ inputs.version }}/dora_${{ inputs.version }}_linux_arm64.tar.gz) | dora executables for linux/arm64 | + | [dora_${{ inputs.version }}_darwin_amd64.tar.gz](https://github.com/ethpandaops/dora/releases/download/v${{ inputs.version }}/dora_${{ inputs.version }}_darwin_amd64.tar.gz) | dora executable for macos/amd64 | + | [dora_${{ inputs.version }}_darwin_arm64.tar.gz](https://github.com/ethpandaops/dora/releases/download/v${{ inputs.version }}/dora_${{ inputs.version }}_darwin_arm64.tar.gz) | dora executable for macos/arm64 | env: GITHUB_TOKEN: ${{ github.token }} diff --git a/.github/workflows/test-pr.yml b/.github/workflows/test-pr.yml new file mode 100644 index 00000000..043d11fb --- /dev/null +++ b/.github/workflows/test-pr.yml @@ -0,0 +1,17 @@ + +name: Check PR + +on: + pull_request: + branches: [ "master" ] + workflow_dispatch: + +concurrency: + group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} + cancel-in-progress: true + +jobs: + check_source: + name: "Run code checks" + uses: ./.github/workflows/_shared-check.yaml + diff --git a/Dockerfile b/Dockerfile index 73aebc81..4cb7aabe 100644 --- a/Dockerfile +++ b/Dockerfile @@ -12,7 +12,7 @@ RUN make build RELEASE=$release GOOS=$TARGETOS GOARCH=$TARGETARCH # final stage FROM debian:stable-slim WORKDIR /app -RUN apt-get update && apt-get install -y --no-install-recommends ca-certificates +RUN apt-get update && apt-get install -y --no-install-recommends ca-certificates && apt-get clean && rm -rf /var/lib/apt/lists/* RUN update-ca-certificates COPY --from=build-env /src/bin /app EXPOSE 8080 diff --git a/Dockerfile-stub b/Dockerfile-stub index 05d39e78..e4f4587d 100644 --- a/Dockerfile-stub +++ b/Dockerfile-stub @@ -2,7 +2,7 @@ # final stage FROM debian:stable-slim WORKDIR /app -RUN apt-get update && apt-get install -y --no-install-recommends ca-certificates +RUN apt-get update && apt-get install -y --no-install-recommends ca-certificates && apt-get clean && rm -rf /var/lib/apt/lists/* RUN update-ca-certificates COPY bin/* /app EXPOSE 8080 diff --git a/Makefile b/Makefile index c21d4628..57b1a190 100644 --- a/Makefile +++ b/Makefile @@ -2,9 +2,9 @@ BUILDTIME := $(shell date -u '+%Y-%m-%dT%H:%M:%SZ') VERSION := $(shell git rev-parse --short HEAD) -GOLDFLAGS += -X 'github.com/pk910/dora/utils.BuildVersion="$(VERSION)"' -GOLDFLAGS += -X 'github.com/pk910/dora/utils.Buildtime="$(BUILDTIME)"' -GOLDFLAGS += -X 'github.com/pk910/dora/utils.BuildRelease="$(RELEASE)"' +GOLDFLAGS += -X 'github.com/ethpandaops/dora/utils.BuildVersion="$(VERSION)"' +GOLDFLAGS += -X 'github.com/ethpandaops/dora/utils.Buildtime="$(BUILDTIME)"' +GOLDFLAGS += -X 'github.com/ethpandaops/dora/utils.BuildRelease="$(RELEASE)"' .PHONY: all test clean diff --git a/README.md b/README.md index 5840f671..c11ea7ee 100644 --- a/README.md +++ b/README.md @@ -1,8 +1,8 @@ # Dora the Beaconchain Explorer -[![Badge](https://github.com/pk910/dora/actions/workflows/build-master.yml/badge.svg)](https://github.com/pk910/dora/actions?query=workflow%3A%22Build+master%22) -[![Go Report Card](https://goreportcard.com/badge/github.com/pk910/dora)](https://goreportcard.com/report/github.com/pk910/dora) -[![GitHub release (latest by date)](https://img.shields.io/github/v/release/pk910/dora?label=Latest%20Release)](https://github.com/pk910/dora/releases/latest) +[![Badge](https://github.com/ethpandaops/dora/actions/workflows/build-master.yml/badge.svg)](https://github.com/ethpandaops/dora/actions?query=workflow%3A%22Build+master%22) +[![Go Report Card](https://goreportcard.com/badge/github.com/ethpandaops/dora)](https://goreportcard.com/report/github.com/ethpandaops/dora) +[![GitHub release (latest by date)](https://img.shields.io/github/v/release/ethpandaops/dora?label=Latest%20Release)](https://github.com/ethpandaops/dora/releases/latest) ## What is this? This is a lightweight beaconchain explorer. @@ -23,7 +23,7 @@ This "lightweight" explorer loads most of the information directly from an under * https://beaconlight.ephemery.dev/ # Setup & Configuration -Read through the [wiki](https://github.com/pk910/dora/wiki) for setup & configuration instructions. +Read through the [wiki](https://github.com/ethpandaops/dora/wiki) for setup & configuration instructions. ## Dependencies diff --git a/cache/redis_cache.go b/cache/redis_cache.go index 19c6746e..310b2ae0 100644 --- a/cache/redis_cache.go +++ b/cache/redis_cache.go @@ -9,7 +9,7 @@ import ( "github.com/go-redis/redis/v8" - "github.com/pk910/dora/utils" + "github.com/ethpandaops/dora/utils" ) type RedisCache struct { diff --git a/cache/tiered_cache.go b/cache/tiered_cache.go index d7e64154..a5a44208 100644 --- a/cache/tiered_cache.go +++ b/cache/tiered_cache.go @@ -9,7 +9,7 @@ import ( "github.com/coocood/freecache" "github.com/sirupsen/logrus" - "github.com/pk910/dora/utils" + "github.com/ethpandaops/dora/utils" ) // Tiered cache is a cache implementation combining a local & remote cache diff --git a/cmd/dora-explorer/main.go b/cmd/dora-explorer/main.go index a3e1760f..e8eb848e 100644 --- a/cmd/dora-explorer/main.go +++ b/cmd/dora-explorer/main.go @@ -10,12 +10,12 @@ import ( logger "github.com/sirupsen/logrus" "github.com/urfave/negroni" - "github.com/pk910/dora/db" - "github.com/pk910/dora/handlers" - "github.com/pk910/dora/services" - "github.com/pk910/dora/static" - "github.com/pk910/dora/types" - "github.com/pk910/dora/utils" + "github.com/ethpandaops/dora/db" + "github.com/ethpandaops/dora/handlers" + "github.com/ethpandaops/dora/services" + "github.com/ethpandaops/dora/static" + "github.com/ethpandaops/dora/types" + "github.com/ethpandaops/dora/utils" ) func main() { @@ -93,6 +93,7 @@ func startFrontend() { router.HandleFunc("/search", handlers.Search).Methods("GET") router.HandleFunc("/search/{type}", handlers.SearchAhead).Methods("GET") router.HandleFunc("/validators", handlers.Validators).Methods("GET") + router.HandleFunc("/validators/activity", handlers.ValidatorsActivity).Methods("GET") router.HandleFunc("/validator/{idxOrPubKey}", handlers.Validator).Methods("GET") router.HandleFunc("/validator/{index}/slots", handlers.ValidatorSlots).Methods("GET") diff --git a/config/holesky.chain.yml b/config/holesky.chain.yml index 1de37425..4f47b153 100644 --- a/config/holesky.chain.yml +++ b/config/holesky.chain.yml @@ -33,6 +33,10 @@ TERMINAL_BLOCK_HASH_ACTIVATION_EPOCH: 18446744073709551615 CAPELLA_FORK_VERSION: 0x04017000 CAPELLA_FORK_EPOCH: 256 +# Deneb +DENEB_FORK_VERSION: 0x05017000 +DENEB_FORK_EPOCH: 29696 + # Time parameters # --------------------------------------------------------------- # 12 seconds diff --git a/config/holesky.names.yml b/config/holesky.names.yml index 1fbfacdb..b913f5aa 100644 --- a/config/holesky.names.yml +++ b/config/holesky.names.yml @@ -42,3 +42,10 @@ 1410000-1459999: "stakely" 1460031-1460353: "reth" 1460355-1461031: "reth" +1552771-1562771: "ef_devops" + +"withdrawal:0x00000000001d2f94eB88124eEC6E1af72225c52F": "CryptoDevHub" +"withdrawal:0x428614Fb30e3007e5d628D09e8BDB0CE9720FAdB": "Huawei" +"withdrawal:0xF0179dEC45a37423EAD4FaD5fCb136197872EAd9": "Lido" +"withdrawal:0x55fbd2d6643de2e436409b193812bd1ed1c70765": "Obol Network" +"withdrawal:0x0efB35a6D6b14e7F5eEEbAD10A2145A68C99772D": "Frax Finance" diff --git a/config/mainnet.chain.yml b/config/mainnet.chain.yml index 78228609..d11f59b4 100644 --- a/config/mainnet.chain.yml +++ b/config/mainnet.chain.yml @@ -49,7 +49,7 @@ CAPELLA_FORK_VERSION: 0x03000000 CAPELLA_FORK_EPOCH: 194048 # April 12, 2023, 10:27:35pm UTC # Deneb DENEB_FORK_VERSION: 0x04000000 -DENEB_FORK_EPOCH: 18446744073709551615 +DENEB_FORK_EPOCH: 269568 # March 13, 2024, 01:55:35pm UTC # EIP6110 EIP6110_FORK_VERSION: 0x05000000 # temporary stub EIP6110_FORK_EPOCH: 18446744073709551615 diff --git a/config/prater.chain.yml b/config/prater.chain.yml index f562805b..e0f4a04a 100644 --- a/config/prater.chain.yml +++ b/config/prater.chain.yml @@ -41,7 +41,7 @@ CAPELLA_FORK_VERSION: 0x03001020 CAPELLA_FORK_EPOCH: 162304 # Sharding DENEB_FORK_VERSION: 0x04001020 -DENEB_FORK_EPOCH: 18446744073709551615 +DENEB_FORK_EPOCH: 231680 # Time parameters # --------------------------------------------------------------- diff --git a/config/preset-gnosis.chain.yml b/config/preset-gnosis.chain.yml index 3e12cb04..3396a0c0 100644 --- a/config/preset-gnosis.chain.yml +++ b/config/preset-gnosis.chain.yml @@ -1,4 +1,4 @@ -# From https://github.com/gnosischain/configs/tree/main/presets/gnosis +# From https://github.com/gnosischain/specs/tree/master/consensus/preset/gnosis # Mainnet preset - Phase0 # Misc @@ -136,3 +136,16 @@ MAX_WITHDRAWALS_PER_PAYLOAD: 8 # --------------------------------------------------------------- # 2**13 (= 8192) validators MAX_VALIDATORS_PER_WITHDRAWALS_SWEEP: 8192 + +# Mainnet preset - Deneb + +# Misc +# --------------------------------------------------------------- +# `uint64(4096)` +FIELD_ELEMENTS_PER_BLOB: 4096 +# `uint64(2**12)` (= 4096) +MAX_BLOB_COMMITMENTS_PER_BLOCK: 4096 +# `uint64(6)` +MAX_BLOBS_PER_BLOCK: 6 +# `floorlog2(get_generalized_index(BeaconBlockBody, 'blob_kzg_commitments')) + 1 + ceillog2(MAX_BLOB_COMMITMENTS_PER_BLOCK)` = 4 + 1 + 12 = 17 +KZG_COMMITMENT_INCLUSION_PROOF_DEPTH: 17 diff --git a/config/sepolia.chain.yml b/config/sepolia.chain.yml index ceee1752..65d8ba67 100644 --- a/config/sepolia.chain.yml +++ b/config/sepolia.chain.yml @@ -33,8 +33,8 @@ CAPELLA_FORK_VERSION: 0x90000072 CAPELLA_FORK_EPOCH: 56832 # Deneb -DENEB_FORK_VERSION: 0x04001020 -DENEB_FORK_EPOCH: 18446744073709551615 +DENEB_FORK_VERSION: 0x90000073 +DENEB_FORK_EPOCH: 132608 # Time parameters # --------------------------------------------------------------- @@ -62,7 +62,8 @@ EJECTION_BALANCE: 16000000000 MIN_PER_EPOCH_CHURN_LIMIT: 4 # 2**16 (= 65,536) CHURN_LIMIT_QUOTIENT: 65536 - +# [New in Deneb:EIP7514] 2**3 (= 8) +MAX_PER_EPOCH_ACTIVATION_CHURN_LIMIT: 8 # Fork choice # --------------------------------------------------------------- @@ -75,228 +76,41 @@ DEPOSIT_CHAIN_ID: 11155111 DEPOSIT_NETWORK_ID: 11155111 DEPOSIT_CONTRACT_ADDRESS: 0x7f02C3E3c98b133055B8B348B2Ac625669Ed295D -# Mainnet preset - Altair - -# Updated penalty values -# --------------------------------------------------------------- -# 3 * 2**24 (= 50,331,648) -INACTIVITY_PENALTY_QUOTIENT_ALTAIR: 50331648 -# 2**6 (= 64) -MIN_SLASHING_PENALTY_QUOTIENT_ALTAIR: 64 -# 2 -PROPORTIONAL_SLASHING_MULTIPLIER_ALTAIR: 2 - - -# Sync committee -# --------------------------------------------------------------- -# 2**9 (= 512) -SYNC_COMMITTEE_SIZE: 512 -# 2**8 (= 256) -EPOCHS_PER_SYNC_COMMITTEE_PERIOD: 256 - - -# Sync protocol -# --------------------------------------------------------------- -# 1 -MIN_SYNC_COMMITTEE_PARTICIPANTS: 1 -# SLOTS_PER_EPOCH * EPOCHS_PER_SYNC_COMMITTEE_PERIOD (= 32 * 256) -UPDATE_TIMEOUT: 8192 -# Mainnet preset - Bellatrix - -# Updated penalty values -# --------------------------------------------------------------- -# 2**24 (= 16,777,216) -INACTIVITY_PENALTY_QUOTIENT_BELLATRIX: 16777216 -# 2**5 (= 32) -MIN_SLASHING_PENALTY_QUOTIENT_BELLATRIX: 32 -# 3 -PROPORTIONAL_SLASHING_MULTIPLIER_BELLATRIX: 3 - -# Execution -# --------------------------------------------------------------- -# 2**30 (= 1,073,741,824) -MAX_BYTES_PER_TRANSACTION: 1073741824 -# 2**20 (= 1,048,576) -MAX_TRANSACTIONS_PER_PAYLOAD: 1048576 -# 2**8 (= 256) -BYTES_PER_LOGS_BLOOM: 256 -# 2**5 (= 32) -MAX_EXTRA_DATA_BYTES: 32 -# Minimal preset - Capella -# Mainnet preset - Custody Game - -# Time parameters -# --------------------------------------------------------------- -# 2**1 (= 2) epochs, 12.8 minutes -RANDAO_PENALTY_EPOCHS: 2 -# 2**15 (= 32,768) epochs, ~146 days -EARLY_DERIVED_SECRET_PENALTY_MAX_FUTURE_EPOCHS: 32768 -# 2**14 (= 16,384) epochs ~73 days -EPOCHS_PER_CUSTODY_PERIOD: 16384 -# 2**11 (= 2,048) epochs, ~9 days -CUSTODY_PERIOD_TO_RANDAO_PADDING: 2048 -# 2**15 (= 32,768) epochs, ~146 days -MAX_CHUNK_CHALLENGE_DELAY: 32768 - - -# Max operations -# --------------------------------------------------------------- -# 2**8 (= 256) -MAX_CUSTODY_KEY_REVEALS: 256 -# 2**0 (= 1) -MAX_EARLY_DERIVED_SECRET_REVEALS: 1 -# 2**2 (= 2) -MAX_CUSTODY_CHUNK_CHALLENGES: 4 -# 2** 4 (= 16) -MAX_CUSTODY_CHUNK_CHALLENGE_RESP: 16 -# 2**0 (= 1) -MAX_CUSTODY_SLASHINGS: 1 - - -# Reward and penalty quotients -# --------------------------------------------------------------- -EARLY_DERIVED_SECRET_REVEAL_SLOT_REWARD_MULTIPLE: 2 -# 2**8 (= 256) -MINOR_REWARD_QUOTIENT: 256 -# Mainnet preset - Phase0 - -# Misc -# --------------------------------------------------------------- -# 2**6 (= 64) -MAX_COMMITTEES_PER_SLOT: 64 -# 2**7 (= 128) -TARGET_COMMITTEE_SIZE: 128 -# 2**11 (= 2,048) -MAX_VALIDATORS_PER_COMMITTEE: 2048 -# See issue 563 -SHUFFLE_ROUND_COUNT: 90 -# 4 -HYSTERESIS_QUOTIENT: 4 -# 1 (minus 0.25) -HYSTERESIS_DOWNWARD_MULTIPLIER: 1 -# 5 (plus 1.25) -HYSTERESIS_UPWARD_MULTIPLIER: 5 - - -# Fork Choice -# --------------------------------------------------------------- -# 2**3 (= 8) -SAFE_SLOTS_TO_UPDATE_JUSTIFIED: 8 - - -# Gwei values -# --------------------------------------------------------------- -# 2**0 * 10**9 (= 1,000,000,000) Gwei -MIN_DEPOSIT_AMOUNT: 1000000000 -# 2**5 * 10**9 (= 32,000,000,000) Gwei -MAX_EFFECTIVE_BALANCE: 32000000000 -# 2**0 * 10**9 (= 1,000,000,000) Gwei -EFFECTIVE_BALANCE_INCREMENT: 1000000000 - +# Networking +# --------------------------------------------------------------- +# `10 * 2**20` (= 10485760, 10 MiB) +GOSSIP_MAX_SIZE: 10485760 +# `2**10` (= 1024) +MAX_REQUEST_BLOCKS: 1024 +# `2**8` (= 256) +EPOCHS_PER_SUBNET_SUBSCRIPTION: 256 +# `MIN_VALIDATOR_WITHDRAWABILITY_DELAY + CHURN_LIMIT_QUOTIENT // 2` (= 33024, ~5 months) +MIN_EPOCHS_FOR_BLOCK_REQUESTS: 33024 +# `10 * 2**20` (=10485760, 10 MiB) +MAX_CHUNK_SIZE: 10485760 +# 5s +TTFB_TIMEOUT: 5 +# 10s +RESP_TIMEOUT: 10 +ATTESTATION_PROPAGATION_SLOT_RANGE: 32 +# 500ms +MAXIMUM_GOSSIP_CLOCK_DISPARITY: 500 +MESSAGE_DOMAIN_INVALID_SNAPPY: 0x00000000 +MESSAGE_DOMAIN_VALID_SNAPPY: 0x01000000 +# 2 subnets per node +SUBNETS_PER_NODE: 2 +# 2**8 (= 64) +ATTESTATION_SUBNET_COUNT: 64 +ATTESTATION_SUBNET_EXTRA_BITS: 0 +# ceillog2(ATTESTATION_SUBNET_COUNT) + ATTESTATION_SUBNET_EXTRA_BITS +ATTESTATION_SUBNET_PREFIX_BITS: 6 -# Time parameters -# --------------------------------------------------------------- -# 2**0 (= 1) slots 12 seconds -MIN_ATTESTATION_INCLUSION_DELAY: 1 -# 2**5 (= 32) slots 6.4 minutes -SLOTS_PER_EPOCH: 32 -# 2**0 (= 1) epochs 6.4 minutes -MIN_SEED_LOOKAHEAD: 1 -# 2**2 (= 4) epochs 25.6 minutes -MAX_SEED_LOOKAHEAD: 4 -# 2**6 (= 64) epochs ~6.8 hours -EPOCHS_PER_ETH1_VOTING_PERIOD: 64 -# 2**13 (= 8,192) slots ~27 hours -SLOTS_PER_HISTORICAL_ROOT: 8192 -# 2**2 (= 4) epochs 25.6 minutes -MIN_EPOCHS_TO_INACTIVITY_PENALTY: 4 - - -# State list lengths -# --------------------------------------------------------------- -# 2**16 (= 65,536) epochs ~0.8 years -EPOCHS_PER_HISTORICAL_VECTOR: 65536 -# 2**13 (= 8,192) epochs ~36 days -EPOCHS_PER_SLASHINGS_VECTOR: 8192 -# 2**24 (= 16,777,216) historical roots, ~26,131 years -HISTORICAL_ROOTS_LIMIT: 16777216 -# 2**40 (= 1,099,511,627,776) validator spots -VALIDATOR_REGISTRY_LIMIT: 1099511627776 - - -# Reward and penalty quotients -# --------------------------------------------------------------- -# 2**6 (= 64) -BASE_REWARD_FACTOR: 64 -# 2**9 (= 512) -WHISTLEBLOWER_REWARD_QUOTIENT: 512 -# 2**3 (= 8) -PROPOSER_REWARD_QUOTIENT: 8 -# 2**26 (= 67,108,864) -INACTIVITY_PENALTY_QUOTIENT: 67108864 -# 2**7 (= 128) (lower safety margin at Phase 0 genesis) -MIN_SLASHING_PENALTY_QUOTIENT: 128 -# 1 (lower safety margin at Phase 0 genesis) -PROPORTIONAL_SLASHING_MULTIPLIER: 1 - - -# Max operations per block -# --------------------------------------------------------------- -# 2**4 (= 16) -MAX_PROPOSER_SLASHINGS: 16 -# 2**1 (= 2) -MAX_ATTESTER_SLASHINGS: 2 -# 2**7 (= 128) -MAX_ATTESTATIONS: 128 -# 2**4 (= 16) -MAX_DEPOSITS: 16 -# 2**4 (= 16) -MAX_VOLUNTARY_EXITS: 16 -# Mainnet preset - Sharding - -# Misc -# --------------------------------------------------------------- -# 2**10 (= 1,024) -MAX_SHARDS: 1024 -# 2**6 (= 64) -INITIAL_ACTIVE_SHARDS: 64 -# 2**3 (= 8) -SAMPLE_PRICE_ADJUSTMENT_COEFFICIENT: 8 -# 2**4 (= 16) -MAX_SHARD_PROPOSER_SLASHINGS: 16 -# -MAX_SHARD_HEADERS_PER_SHARD: 4 -# 2**8 (= 256) -SHARD_STATE_MEMORY_SLOTS: 256 -# 2**40 (= 1,099,511,627,776) -BLOB_BUILDER_REGISTRY_LIMIT: 1099511627776 - -# Shard blob samples -# --------------------------------------------------------------- -# 2**11 (= 2,048) -MAX_SAMPLES_PER_BLOCK: 2048 -# 2**10 (= 1,1024) -TARGET_SAMPLES_PER_BLOCK: 1024 - -# Gwei values -# --------------------------------------------------------------- -# 2**33 (= 8,589,934,592) Gwei -MAX_SAMPLE_PRICE: 8589934592 -# 2**3 (= 8) Gwei -MIN_SAMPLE_PRICE: 8 - -# Max operations per block -# --------------------------------------------------------------- -# 2**4 (= 16) -MAX_BLS_TO_EXECUTION_CHANGES: 16 - - -# Execution -# --------------------------------------------------------------- -# [customized] 2**2 (= 4) -MAX_WITHDRAWALS_PER_PAYLOAD: 16 - -# Withdrawals processing -# --------------------------------------------------------------- -# [customized] 2**4 (= 16) validators -MAX_VALIDATORS_PER_WITHDRAWALS_SWEEP: 16384 \ No newline at end of file +# Deneb +# `2**7` (=128) +MAX_REQUEST_BLOCKS_DENEB: 128 +# MAX_REQUEST_BLOCKS_DENEB * MAX_BLOBS_PER_BLOCK +MAX_REQUEST_BLOB_SIDECARS: 768 +# `2**12` (= 4096 epochs, ~18 days) +MIN_EPOCHS_FOR_BLOB_SIDECARS_REQUESTS: 4096 +# `6` +BLOB_SIDECAR_SUBNET_COUNT: 6 \ No newline at end of file diff --git a/db/db.go b/db/db.go index 68360ff5..3433a479 100644 --- a/db/db.go +++ b/db/db.go @@ -13,9 +13,9 @@ import ( "github.com/pressly/goose/v3" "github.com/sirupsen/logrus" - "github.com/pk910/dora/dbtypes" - "github.com/pk910/dora/types" - "github.com/pk910/dora/utils" + "github.com/ethpandaops/dora/dbtypes" + "github.com/ethpandaops/dora/types" + "github.com/ethpandaops/dora/utils" "github.com/jackc/pgx/v4/pgxpool" _ "github.com/jackc/pgx/v4/stdlib" @@ -278,7 +278,7 @@ func DeleteValidatorNames(validatorNames []uint64, tx *sqlx.Tx) error { var sql strings.Builder fmt.Fprint(&sql, `DELETE FROM validator_names WHERE "index" IN (`) argIdx := 0 - args := make([]any, len(validatorNames)*2) + args := make([]any, len(validatorNames)) for i, validatorName := range validatorNames { if i > 0 { fmt.Fprintf(&sql, ", ") diff --git a/go.mod b/go.mod index eac6b208..70472831 100644 --- a/go.mod +++ b/go.mod @@ -1,4 +1,4 @@ -module github.com/pk910/dora +module github.com/ethpandaops/dora go 1.21 diff --git a/handlers/clients.go b/handlers/clients.go index 48c8d6f0..d0aeb0bb 100644 --- a/handlers/clients.go +++ b/handlers/clients.go @@ -4,10 +4,10 @@ import ( "net/http" "time" - "github.com/pk910/dora/services" - "github.com/pk910/dora/templates" - "github.com/pk910/dora/types/models" - "github.com/pk910/dora/utils" + "github.com/ethpandaops/dora/services" + "github.com/ethpandaops/dora/templates" + "github.com/ethpandaops/dora/types/models" + "github.com/ethpandaops/dora/utils" "github.com/sirupsen/logrus" ) diff --git a/handlers/epoch.go b/handlers/epoch.go index c6ecd228..4b78ec79 100644 --- a/handlers/epoch.go +++ b/handlers/epoch.go @@ -9,10 +9,10 @@ import ( "github.com/gorilla/mux" "github.com/sirupsen/logrus" - "github.com/pk910/dora/services" - "github.com/pk910/dora/templates" - "github.com/pk910/dora/types/models" - "github.com/pk910/dora/utils" + "github.com/ethpandaops/dora/services" + "github.com/ethpandaops/dora/templates" + "github.com/ethpandaops/dora/types/models" + "github.com/ethpandaops/dora/utils" ) // Epoch will return the main "epoch" page using a go template diff --git a/handlers/epochs.go b/handlers/epochs.go index 07b6dfe7..ef3b0b25 100644 --- a/handlers/epochs.go +++ b/handlers/epochs.go @@ -7,10 +7,10 @@ import ( "strconv" "time" - "github.com/pk910/dora/services" - "github.com/pk910/dora/templates" - "github.com/pk910/dora/types/models" - "github.com/pk910/dora/utils" + "github.com/ethpandaops/dora/services" + "github.com/ethpandaops/dora/templates" + "github.com/ethpandaops/dora/types/models" + "github.com/ethpandaops/dora/utils" "github.com/sirupsen/logrus" ) diff --git a/handlers/error.go b/handlers/error.go index 2d5b07a4..ce3a5f3f 100644 --- a/handlers/error.go +++ b/handlers/error.go @@ -8,10 +8,10 @@ import ( "strings" "time" - "github.com/pk910/dora/services" - "github.com/pk910/dora/templates" - "github.com/pk910/dora/types/models" - "github.com/pk910/dora/utils" + "github.com/ethpandaops/dora/services" + "github.com/ethpandaops/dora/templates" + "github.com/ethpandaops/dora/types/models" + "github.com/ethpandaops/dora/utils" "github.com/sirupsen/logrus" ) diff --git a/handlers/forks.go b/handlers/forks.go index 36904b0e..6e1b5c7b 100644 --- a/handlers/forks.go +++ b/handlers/forks.go @@ -5,11 +5,11 @@ import ( "sort" "time" - "github.com/pk910/dora/db" - "github.com/pk910/dora/services" - "github.com/pk910/dora/templates" - "github.com/pk910/dora/types/models" - "github.com/pk910/dora/utils" + "github.com/ethpandaops/dora/db" + "github.com/ethpandaops/dora/services" + "github.com/ethpandaops/dora/templates" + "github.com/ethpandaops/dora/types/models" + "github.com/ethpandaops/dora/utils" "github.com/sirupsen/logrus" ) diff --git a/handlers/index.go b/handlers/index.go index 64622a92..6c109e1b 100644 --- a/handlers/index.go +++ b/handlers/index.go @@ -12,12 +12,12 @@ import ( "time" v1 "github.com/attestantio/go-eth2-client/api/v1" - "github.com/pk910/dora/db" - "github.com/pk910/dora/dbtypes" - "github.com/pk910/dora/services" - "github.com/pk910/dora/templates" - "github.com/pk910/dora/types/models" - "github.com/pk910/dora/utils" + "github.com/ethpandaops/dora/db" + "github.com/ethpandaops/dora/dbtypes" + "github.com/ethpandaops/dora/services" + "github.com/ethpandaops/dora/templates" + "github.com/ethpandaops/dora/types/models" + "github.com/ethpandaops/dora/utils" "github.com/sirupsen/logrus" ) diff --git a/handlers/pageData.go b/handlers/pageData.go index 5206ccde..f05d1b70 100644 --- a/handlers/pageData.go +++ b/handlers/pageData.go @@ -10,8 +10,8 @@ import ( logger "github.com/sirupsen/logrus" - "github.com/pk910/dora/types" - "github.com/pk910/dora/utils" + "github.com/ethpandaops/dora/types" + "github.com/ethpandaops/dora/utils" ) var layoutTemplateFiles = []string{ @@ -49,6 +49,7 @@ func InitPageData(w http.ResponseWriter, r *http.Request, active, path, title st Year: time.Now().UTC().Year(), ExplorerTitle: utils.Config.Frontend.SiteName, ExplorerSubtitle: utils.Config.Frontend.SiteSubtitle, + ExplorerLogo: utils.Config.Frontend.SiteLogo, ChainSlotsPerEpoch: utils.Config.Chain.Config.SlotsPerEpoch, ChainSecondsPerSlot: utils.Config.Chain.Config.SecondsPerSlot, ChainGenesisTimestamp: utils.Config.Chain.GenesisTimestamp, @@ -60,6 +61,10 @@ func InitPageData(w http.ResponseWriter, r *http.Request, active, path, title st MainMenuItems: createMenuItems(active, isMainnet), } + if utils.Config.Frontend.SiteDescription != "" { + data.Meta.Description = utils.Config.Frontend.SiteDescription + } + acceptedLangs := strings.Split(r.Header.Get("Accept-Language"), ",") if len(acceptedLangs) > 0 { if strings.Contains(acceptedLangs[0], "ru") || strings.Contains(acceptedLangs[0], "RU") { @@ -109,6 +114,11 @@ func createMenuItems(active string, isMain bool) []types.MainMenuItem { Path: "/validators", Icon: "fa-table", }, + { + Label: "Validator Activity", + Path: "/validators/activity", + Icon: "fa-tachometer", + }, }, }, { diff --git a/handlers/search.go b/handlers/search.go index a8d09925..c60bd7d6 100644 --- a/handlers/search.go +++ b/handlers/search.go @@ -13,12 +13,12 @@ import ( "github.com/gorilla/mux" "github.com/sirupsen/logrus" - "github.com/pk910/dora/db" - "github.com/pk910/dora/dbtypes" - "github.com/pk910/dora/services" - "github.com/pk910/dora/templates" - "github.com/pk910/dora/types/models" - "github.com/pk910/dora/utils" + "github.com/ethpandaops/dora/db" + "github.com/ethpandaops/dora/dbtypes" + "github.com/ethpandaops/dora/services" + "github.com/ethpandaops/dora/templates" + "github.com/ethpandaops/dora/types/models" + "github.com/ethpandaops/dora/utils" ) var searchLikeRE = regexp.MustCompile(`^[0-9a-fA-F]{0,96}$`) diff --git a/handlers/slot.go b/handlers/slot.go index e4bbaea6..8914a2a9 100644 --- a/handlers/slot.go +++ b/handlers/slot.go @@ -19,14 +19,14 @@ import ( "github.com/juliangruber/go-intersect" "github.com/sirupsen/logrus" - "github.com/pk910/dora/db" - "github.com/pk910/dora/dbtypes" - "github.com/pk910/dora/rpc" - "github.com/pk910/dora/services" - "github.com/pk910/dora/templates" - "github.com/pk910/dora/types" - "github.com/pk910/dora/types/models" - "github.com/pk910/dora/utils" + "github.com/ethpandaops/dora/db" + "github.com/ethpandaops/dora/dbtypes" + "github.com/ethpandaops/dora/rpc" + "github.com/ethpandaops/dora/services" + "github.com/ethpandaops/dora/templates" + "github.com/ethpandaops/dora/types" + "github.com/ethpandaops/dora/types/models" + "github.com/ethpandaops/dora/utils" ) // Index will return the main "index" page using a go template diff --git a/handlers/slots.go b/handlers/slots.go index 5af5fdd3..12a52f07 100644 --- a/handlers/slots.go +++ b/handlers/slots.go @@ -8,10 +8,10 @@ import ( "strconv" "time" - "github.com/pk910/dora/services" - "github.com/pk910/dora/templates" - "github.com/pk910/dora/types/models" - "github.com/pk910/dora/utils" + "github.com/ethpandaops/dora/services" + "github.com/ethpandaops/dora/templates" + "github.com/ethpandaops/dora/types/models" + "github.com/ethpandaops/dora/utils" "github.com/sirupsen/logrus" ) diff --git a/handlers/slots_filtered.go b/handlers/slots_filtered.go index 0fc0dc15..7cd19510 100644 --- a/handlers/slots_filtered.go +++ b/handlers/slots_filtered.go @@ -7,11 +7,11 @@ import ( "strconv" "time" - "github.com/pk910/dora/dbtypes" - "github.com/pk910/dora/services" - "github.com/pk910/dora/templates" - "github.com/pk910/dora/types/models" - "github.com/pk910/dora/utils" + "github.com/ethpandaops/dora/dbtypes" + "github.com/ethpandaops/dora/services" + "github.com/ethpandaops/dora/templates" + "github.com/ethpandaops/dora/types/models" + "github.com/ethpandaops/dora/utils" "github.com/sirupsen/logrus" ) diff --git a/handlers/validator.go b/handlers/validator.go index 4241a48b..15c0e2a5 100644 --- a/handlers/validator.go +++ b/handlers/validator.go @@ -14,11 +14,11 @@ import ( "github.com/gorilla/mux" "github.com/sirupsen/logrus" - "github.com/pk910/dora/dbtypes" - "github.com/pk910/dora/services" - "github.com/pk910/dora/templates" - "github.com/pk910/dora/types/models" - "github.com/pk910/dora/utils" + "github.com/ethpandaops/dora/dbtypes" + "github.com/ethpandaops/dora/services" + "github.com/ethpandaops/dora/templates" + "github.com/ethpandaops/dora/types/models" + "github.com/ethpandaops/dora/utils" ) // Validator will return the main "validator" page using a go template diff --git a/handlers/validator_slots.go b/handlers/validator_slots.go index c3be95f2..9bb12db0 100644 --- a/handlers/validator_slots.go +++ b/handlers/validator_slots.go @@ -9,11 +9,11 @@ import ( "github.com/gorilla/mux" "github.com/sirupsen/logrus" - "github.com/pk910/dora/dbtypes" - "github.com/pk910/dora/services" - "github.com/pk910/dora/templates" - "github.com/pk910/dora/types/models" - "github.com/pk910/dora/utils" + "github.com/ethpandaops/dora/dbtypes" + "github.com/ethpandaops/dora/services" + "github.com/ethpandaops/dora/templates" + "github.com/ethpandaops/dora/types/models" + "github.com/ethpandaops/dora/utils" ) // Slots will return the main "slots" page using a go template diff --git a/handlers/validators.go b/handlers/validators.go index e768f202..28f0601e 100644 --- a/handlers/validators.go +++ b/handlers/validators.go @@ -3,6 +3,7 @@ package handlers import ( "bytes" "encoding/hex" + "encoding/json" "fmt" "net/http" "net/url" @@ -14,10 +15,10 @@ import ( v1 "github.com/attestantio/go-eth2-client/api/v1" "golang.org/x/exp/maps" - "github.com/pk910/dora/services" - "github.com/pk910/dora/templates" - "github.com/pk910/dora/types/models" - "github.com/pk910/dora/utils" + "github.com/ethpandaops/dora/services" + "github.com/ethpandaops/dora/templates" + "github.com/ethpandaops/dora/types/models" + "github.com/ethpandaops/dora/utils" "github.com/sirupsen/logrus" ) @@ -40,6 +41,12 @@ func Validators(w http.ResponseWriter, r *http.Request) { if urlArgs.Has("c") { pageSize, _ = strconv.ParseUint(urlArgs.Get("c"), 10, 64) } + if urlArgs.Has("json") && pageSize > 10000 { + pageSize = 10000 + } else if !urlArgs.Has("json") && pageSize > 1000 { + pageSize = 1000 + } + var filterPubKey string var filterIndex string var filterName string @@ -72,6 +79,16 @@ func Validators(w http.ResponseWriter, r *http.Request) { handlePageError(w, r, pageError) return } + + if urlArgs.Has("json") { + w.Header().Set("Content-Type", "application/json") + err := json.NewEncoder(w).Encode(data.Data) + if err != nil { + logrus.WithError(err).Error("error encoding index data") + http.Error(w, "Internal server error", http.StatusServiceUnavailable) + } + } + w.Header().Set("Content-Type", "text/html") if handleTemplateError(w, r, "validators.go", "Validators", "", pageTemplate.ExecuteTemplate(w, "layout", data)) != nil { return // an error has occurred and was processed @@ -238,10 +255,6 @@ func buildValidatorsPageData(firstValIdx uint64, pageSize uint64, sortOrder stri firstValIdx = totalValidatorCount } - if pageSize > 100 { - pageSize = 100 - } - pagesBefore := firstValIdx / pageSize if (firstValIdx % pageSize) > 0 { pagesBefore++ diff --git a/handlers/validators_activity.go b/handlers/validators_activity.go new file mode 100644 index 00000000..7a6bc44e --- /dev/null +++ b/handlers/validators_activity.go @@ -0,0 +1,272 @@ +package handlers + +import ( + "fmt" + "net/http" + "net/url" + "sort" + "strconv" + "strings" + "time" + + "github.com/ethpandaops/dora/services" + "github.com/ethpandaops/dora/templates" + "github.com/ethpandaops/dora/types/models" + "github.com/sirupsen/logrus" + "golang.org/x/exp/maps" +) + +// ValidatorsActivity will return the filtered "slots" page using a go template +func ValidatorsActivity(w http.ResponseWriter, r *http.Request) { + var pageTemplateFiles = append(layoutTemplateFiles, + "validators_activity/validators_activity.html", + "_svg/professor.html", + ) + + var pageTemplate = templates.GetTemplate(pageTemplateFiles...) + data := InitPageData(w, r, "validators", "/validators/activity", "Validators Activity", pageTemplateFiles) + + urlArgs := r.URL.Query() + var pageSize uint64 = 50 + if urlArgs.Has("c") { + pageSize, _ = strconv.ParseUint(urlArgs.Get("c"), 10, 64) + } + var pageIdx uint64 = 0 + if urlArgs.Has("s") { + pageIdx, _ = strconv.ParseUint(urlArgs.Get("s"), 10, 64) + } + + var sortOrder string + if urlArgs.Has("o") { + sortOrder = urlArgs.Get("o") + } + if sortOrder == "" { + sortOrder = "group" + } + + var groupBy uint64 + if urlArgs.Has("group") { + groupBy, _ = strconv.ParseUint(urlArgs.Get("group"), 10, 64) + } + if groupBy == 0 { + if services.GlobalBeaconService.GetValidatorNamesCount() > 0 { + groupBy = 3 + } else { + groupBy = 1 + } + } + + var pageError error + pageError = services.GlobalCallRateLimiter.CheckCallLimit(r, 2) + if pageError == nil { + data.Data, pageError = getValidatorsActivityPageData(pageIdx, pageSize, sortOrder, groupBy) + } + if pageError != nil { + handlePageError(w, r, pageError) + return + } + w.Header().Set("Content-Type", "text/html") + if handleTemplateError(w, r, "slots_filtered.go", "SlotsFiltered", "", pageTemplate.ExecuteTemplate(w, "layout", data)) != nil { + return // an error has occurred and was processed + } +} + +func getValidatorsActivityPageData(pageIdx uint64, pageSize uint64, sortOrder string, groupBy uint64) (*models.ValidatorsActivityPageData, error) { + pageData := &models.ValidatorsActivityPageData{} + pageCacheKey := fmt.Sprintf("validators_activiy:%v:%v:%v:%v", pageIdx, pageSize, sortOrder, groupBy) + pageRes, pageErr := services.GlobalFrontendCache.ProcessCachedPage(pageCacheKey, true, pageData, func(processingPage *services.FrontendCacheProcessingPage) interface{} { + processingPage.CacheTimeout = 10 * time.Second + return buildValidatorsActivityPageData(pageIdx, pageSize, sortOrder, groupBy) + }) + if pageErr == nil && pageRes != nil { + resData, resOk := pageRes.(*models.ValidatorsActivityPageData) + if !resOk { + return nil, ErrInvalidPageModel + } + pageData = resData + } + return pageData, pageErr +} + +func buildValidatorsActivityPageData(pageIdx uint64, pageSize uint64, sortOrder string, groupBy uint64) *models.ValidatorsActivityPageData { + filterArgs := url.Values{} + filterArgs.Add("group", fmt.Sprintf("%v", groupBy)) + + pageData := &models.ValidatorsActivityPageData{ + ViewOptionGroupBy: groupBy, + Sorting: sortOrder, + } + logrus.Debugf("validators_activity page called: %v:%v [%v]", pageIdx, pageSize, groupBy) + if pageIdx == 0 { + pageData.IsDefaultPage = true + } + + if pageSize > 100 { + pageSize = 100 + } + pageData.PageSize = pageSize + pageData.CurrentPageIndex = pageIdx + 1 + if pageIdx >= 1 { + pageData.PrevPageIndex = pageIdx + } + pageData.LastPageIndex = 0 + + // group validators + validatorGroupMap := map[string]*models.ValidatorsActiviyPageDataGroup{} + validatorSet := services.GlobalBeaconService.GetCachedValidatorSet() + activityMap, _ := services.GlobalBeaconService.GetValidatorActivity() + + for vIdx, validator := range validatorSet { + var groupKey string + var groupName string + + switch groupBy { + case 1: + groupIdx := uint64(vIdx) / 100000 + groupKey = fmt.Sprintf("%06d", groupIdx) + groupName = fmt.Sprintf("%v - %v", groupIdx*100000, (groupIdx+1)*100000) + case 2: + groupIdx := uint64(vIdx) / 10000 + groupKey = fmt.Sprintf("%06d", groupIdx) + groupName = fmt.Sprintf("%v - %v", groupIdx*10000, (groupIdx+1)*10000) + case 3: + groupName = services.GlobalBeaconService.GetValidatorName(uint64(vIdx)) + groupKey = strings.ToLower(groupName) + } + + validatorGroup := validatorGroupMap[groupKey] + if validatorGroup == nil { + validatorGroup = &models.ValidatorsActiviyPageDataGroup{ + Group: groupName, + GroupLower: groupKey, + Validators: 0, + Activated: 0, + Online: 0, + Offline: 0, + Exited: 0, + Slashed: 0, + } + validatorGroupMap[groupKey] = validatorGroup + } + + validatorGroup.Validators++ + + statusStr := validator.Status.String() + if strings.HasPrefix(statusStr, "active_") { + validatorGroup.Activated++ + + if activityMap[uint64(vIdx)] > 0 { + validatorGroup.Online++ + } else { + validatorGroup.Offline++ + } + } + if strings.HasPrefix(statusStr, "exited_") || strings.HasPrefix(statusStr, "withdrawal_") { + validatorGroup.Exited++ + } + if strings.HasSuffix(statusStr, "_slashed") { + validatorGroup.Slashed++ + } + } + + // sort / filter groups + validatorGroups := maps.Values(validatorGroupMap) + switch sortOrder { + case "group": + sort.Slice(validatorGroups, func(a, b int) bool { + return strings.Compare(validatorGroups[a].GroupLower, validatorGroups[b].GroupLower) < 0 + }) + pageData.IsDefaultSorting = true + case "group-d": + sort.Slice(validatorGroups, func(a, b int) bool { + return strings.Compare(validatorGroups[a].GroupLower, validatorGroups[b].GroupLower) > 0 + }) + case "count": + sort.Slice(validatorGroups, func(a, b int) bool { + return validatorGroups[a].Validators < validatorGroups[b].Validators + }) + case "count-d": + sort.Slice(validatorGroups, func(a, b int) bool { + return validatorGroups[a].Validators > validatorGroups[b].Validators + }) + case "active": + sort.Slice(validatorGroups, func(a, b int) bool { + return validatorGroups[a].Activated < validatorGroups[b].Activated + }) + case "active-d": + sort.Slice(validatorGroups, func(a, b int) bool { + return validatorGroups[a].Activated > validatorGroups[b].Activated + }) + case "online": + sort.Slice(validatorGroups, func(a, b int) bool { + return validatorGroups[a].Online < validatorGroups[b].Online + }) + case "online-d": + sort.Slice(validatorGroups, func(a, b int) bool { + return validatorGroups[a].Online > validatorGroups[b].Online + }) + case "offline": + sort.Slice(validatorGroups, func(a, b int) bool { + return validatorGroups[a].Offline < validatorGroups[b].Offline + }) + case "offline-d": + sort.Slice(validatorGroups, func(a, b int) bool { + return validatorGroups[a].Offline > validatorGroups[b].Offline + }) + case "exited": + sort.Slice(validatorGroups, func(a, b int) bool { + return validatorGroups[a].Exited < validatorGroups[b].Exited + }) + case "exited-d": + sort.Slice(validatorGroups, func(a, b int) bool { + return validatorGroups[a].Exited > validatorGroups[b].Exited + }) + case "slashed": + sort.Slice(validatorGroups, func(a, b int) bool { + return validatorGroups[a].Slashed < validatorGroups[b].Slashed + }) + case "slashed-d": + sort.Slice(validatorGroups, func(a, b int) bool { + return validatorGroups[a].Slashed > validatorGroups[b].Slashed + }) + } + + groupCount := uint64(len(validatorGroups)) + + startIdx := pageIdx * pageSize + endIdx := startIdx + pageSize + if startIdx >= groupCount { + validatorGroups = []*models.ValidatorsActiviyPageDataGroup{} + } else if endIdx > groupCount { + validatorGroups = validatorGroups[startIdx:] + } else { + validatorGroups = validatorGroups[startIdx:endIdx] + } + pageData.Groups = validatorGroups + pageData.GroupCount = uint64(len(validatorGroups)) + + pageData.TotalPages = groupCount / pageSize + if groupCount%pageSize != 0 { + pageData.TotalPages++ + } + pageData.LastPageIndex = pageData.TotalPages - 1 + pageData.FirstGroup = startIdx + pageData.LastGroup = endIdx + + if endIdx <= groupCount { + pageData.NextPageIndex = pageIdx + 1 + } + + sortingArg := "" + if sortOrder != "group" { + sortingArg = fmt.Sprintf("&o=%v", sortOrder) + } + + pageData.ViewPageLink = fmt.Sprintf("/validators/activity?%v&c=%v", filterArgs.Encode(), pageData.PageSize) + pageData.FirstPageLink = fmt.Sprintf("/validators/activity?%v%v&c=%v", filterArgs.Encode(), sortingArg, pageData.PageSize) + pageData.PrevPageLink = fmt.Sprintf("/validators/activity?%v%v&c=%v&s=%v", filterArgs.Encode(), sortingArg, pageData.PageSize, pageData.PrevPageIndex) + pageData.NextPageLink = fmt.Sprintf("/validators/activity?%v%v&c=%v&s=%v", filterArgs.Encode(), sortingArg, pageData.PageSize, pageData.NextPageIndex) + pageData.LastPageLink = fmt.Sprintf("/validators/activity?%v%v&c=%v&s=%v", filterArgs.Encode(), sortingArg, pageData.PageSize, pageData.LastPageIndex) + + return pageData +} diff --git a/indexer/blobstore.go b/indexer/blobstore.go index 7adebf54..dca9e48a 100644 --- a/indexer/blobstore.go +++ b/indexer/blobstore.go @@ -12,10 +12,10 @@ import ( "github.com/jmoiron/sqlx" "github.com/sirupsen/logrus" - "github.com/pk910/dora/aws" - "github.com/pk910/dora/db" - "github.com/pk910/dora/dbtypes" - "github.com/pk910/dora/utils" + "github.com/ethpandaops/dora/aws" + "github.com/ethpandaops/dora/db" + "github.com/ethpandaops/dora/dbtypes" + "github.com/ethpandaops/dora/utils" ) var logger_blobs = logrus.StandardLogger().WithField("module", "blobstore") diff --git a/indexer/block_ssz.go b/indexer/block_ssz.go index fc4bb6cb..89cdf601 100644 --- a/indexer/block_ssz.go +++ b/indexer/block_ssz.go @@ -10,7 +10,7 @@ import ( "github.com/attestantio/go-eth2-client/spec/deneb" "github.com/attestantio/go-eth2-client/spec/phase0" "github.com/attestantio/go-eth2-client/spec/verkle" - "github.com/pk910/dora/utils" + "github.com/ethpandaops/dora/utils" ) var jsonVersionOffset uint64 = 0x70000000 diff --git a/indexer/cache.go b/indexer/cache.go index 2af79d66..c8e5857d 100644 --- a/indexer/cache.go +++ b/indexer/cache.go @@ -7,8 +7,8 @@ import ( v1 "github.com/attestantio/go-eth2-client/api/v1" "github.com/attestantio/go-eth2-client/spec/phase0" - "github.com/pk910/dora/db" - "github.com/pk910/dora/utils" + "github.com/ethpandaops/dora/db" + "github.com/ethpandaops/dora/utils" ) type indexerCache struct { diff --git a/indexer/cache_block.go b/indexer/cache_block.go index 808d83b2..6d47e6e0 100644 --- a/indexer/cache_block.go +++ b/indexer/cache_block.go @@ -5,8 +5,8 @@ import ( "github.com/attestantio/go-eth2-client/spec" "github.com/attestantio/go-eth2-client/spec/phase0" - "github.com/pk910/dora/db" - "github.com/pk910/dora/dbtypes" + "github.com/ethpandaops/dora/db" + "github.com/ethpandaops/dora/dbtypes" ) type CacheBlock struct { diff --git a/indexer/cache_logic.go b/indexer/cache_logic.go index b43ca36c..124a1a53 100644 --- a/indexer/cache_logic.go +++ b/indexer/cache_logic.go @@ -4,9 +4,9 @@ import ( "fmt" "time" - "github.com/pk910/dora/db" - "github.com/pk910/dora/dbtypes" - "github.com/pk910/dora/utils" + "github.com/ethpandaops/dora/db" + "github.com/ethpandaops/dora/dbtypes" + "github.com/ethpandaops/dora/utils" ) func (cache *indexerCache) runCacheLoop() { diff --git a/indexer/client.go b/indexer/client.go index 740c50e5..9f1875dd 100644 --- a/indexer/client.go +++ b/indexer/client.go @@ -9,8 +9,8 @@ import ( v1 "github.com/attestantio/go-eth2-client/api/v1" "github.com/attestantio/go-eth2-client/spec/phase0" - "github.com/pk910/dora/rpc" - "github.com/pk910/dora/utils" + "github.com/ethpandaops/dora/rpc" + "github.com/ethpandaops/dora/utils" ) type IndexerClient struct { diff --git a/indexer/epoch_stats.go b/indexer/epoch_stats.go index 5777a835..3753e011 100644 --- a/indexer/epoch_stats.go +++ b/indexer/epoch_stats.go @@ -9,10 +9,10 @@ import ( v1 "github.com/attestantio/go-eth2-client/api/v1" "github.com/attestantio/go-eth2-client/spec/phase0" - "github.com/pk910/dora/db" - "github.com/pk910/dora/dbtypes" - "github.com/pk910/dora/rpc" - "github.com/pk910/dora/utils" + "github.com/ethpandaops/dora/db" + "github.com/ethpandaops/dora/dbtypes" + "github.com/ethpandaops/dora/rpc" + "github.com/ethpandaops/dora/utils" ) type EpochStats struct { diff --git a/indexer/indexer.go b/indexer/indexer.go index bca338d5..1d734bac 100644 --- a/indexer/indexer.go +++ b/indexer/indexer.go @@ -10,10 +10,10 @@ import ( "github.com/attestantio/go-eth2-client/spec/phase0" "github.com/sirupsen/logrus" - "github.com/pk910/dora/dbtypes" - "github.com/pk910/dora/rpc" - "github.com/pk910/dora/types" - "github.com/pk910/dora/utils" + "github.com/ethpandaops/dora/dbtypes" + "github.com/ethpandaops/dora/rpc" + "github.com/ethpandaops/dora/types" + "github.com/ethpandaops/dora/utils" ) var logger = logrus.StandardLogger().WithField("module", "indexer") diff --git a/indexer/synchronizer.go b/indexer/synchronizer.go index a79247bf..5db2113d 100644 --- a/indexer/synchronizer.go +++ b/indexer/synchronizer.go @@ -5,9 +5,9 @@ import ( "sync" "time" - "github.com/pk910/dora/db" - "github.com/pk910/dora/dbtypes" - "github.com/pk910/dora/utils" + "github.com/ethpandaops/dora/db" + "github.com/ethpandaops/dora/dbtypes" + "github.com/ethpandaops/dora/utils" "github.com/sirupsen/logrus" ) diff --git a/indexer/votes.go b/indexer/votes.go index e11e9dca..bbeccd04 100644 --- a/indexer/votes.go +++ b/indexer/votes.go @@ -5,7 +5,7 @@ import ( "fmt" "time" - "github.com/pk910/dora/utils" + "github.com/ethpandaops/dora/utils" ) type EpochVotes struct { diff --git a/indexer/write_db.go b/indexer/write_db.go index a8b6874b..a879e705 100644 --- a/indexer/write_db.go +++ b/indexer/write_db.go @@ -3,10 +3,10 @@ package indexer import ( "fmt" + "github.com/ethpandaops/dora/db" + "github.com/ethpandaops/dora/dbtypes" + "github.com/ethpandaops/dora/utils" "github.com/jmoiron/sqlx" - "github.com/pk910/dora/db" - "github.com/pk910/dora/dbtypes" - "github.com/pk910/dora/utils" ) func persistSlotAssignments(epochStats *EpochStats, tx *sqlx.Tx) error { diff --git a/rpc/beaconapi.go b/rpc/beaconapi.go index 687a3c51..d3120ae4 100644 --- a/rpc/beaconapi.go +++ b/rpc/beaconapi.go @@ -23,9 +23,9 @@ import ( "github.com/sirupsen/logrus" "golang.org/x/crypto/ssh" - "github.com/pk910/dora/rpc/sshtunnel" - "github.com/pk910/dora/types" - "github.com/pk910/dora/utils" + "github.com/ethpandaops/dora/rpc/sshtunnel" + "github.com/ethpandaops/dora/types" + "github.com/ethpandaops/dora/utils" ) var logger = logrus.StandardLogger().WithField("module", "rpc") diff --git a/rpc/beaconstream.go b/rpc/beaconstream.go index 4f5b9108..6af10e00 100644 --- a/rpc/beaconstream.go +++ b/rpc/beaconstream.go @@ -11,8 +11,8 @@ import ( v1 "github.com/attestantio/go-eth2-client/api/v1" "github.com/donovanhide/eventsource" - "github.com/pk910/dora/rpc/eventstream" - "github.com/pk910/dora/utils" + "github.com/ethpandaops/dora/rpc/eventstream" + "github.com/ethpandaops/dora/utils" ) const ( diff --git a/rpc/epochduties.go b/rpc/epochduties.go index 5be2ad26..95f1168e 100644 --- a/rpc/epochduties.go +++ b/rpc/epochduties.go @@ -5,7 +5,7 @@ import ( "math" "github.com/attestantio/go-eth2-client/spec/phase0" - "github.com/pk910/dora/utils" + "github.com/ethpandaops/dora/utils" ) type EpochAssignments struct { diff --git a/rpc/eventstream/eventstream.go b/rpc/eventstream/eventstream.go index 1ebbb864..1fd2342d 100644 --- a/rpc/eventstream/eventstream.go +++ b/rpc/eventstream/eventstream.go @@ -18,7 +18,7 @@ import ( type Stream struct { c *http.Client req *http.Request - lastEventId string + lastEventID string retry time.Duration // Events emits the events received by the stream Events chan StreamEvent @@ -59,27 +59,28 @@ func (e SubscriptionError) Error() string { // Subscribe to the Events emitted from the specified url. // If lastEventId is non-empty it will be sent to the server in case it can replay missed events. -func Subscribe(url, lastEventId string) (*Stream, error) { - req, err := http.NewRequest("GET", url, nil) +func Subscribe(url, lastEventID string) (*Stream, error) { + req, err := http.NewRequest("GET", url, http.NoBody) if err != nil { return nil, err } - return SubscribeWithRequest(lastEventId, req) + + return SubscribeWithRequest(lastEventID, req) } // SubscribeWithRequest will take an http.Request to setup the stream, allowing custom headers // to be specified, authentication to be configured, etc. -func SubscribeWithRequest(lastEventId string, request *http.Request) (*Stream, error) { - return SubscribeWith(lastEventId, http.DefaultClient, request) +func SubscribeWithRequest(lastEventID string, request *http.Request) (*Stream, error) { + return SubscribeWith(lastEventID, http.DefaultClient, request) } // SubscribeWith takes a http client and request providing customization over both headers and // control over the http client settings (timeouts, tls, etc) -func SubscribeWith(lastEventId string, client *http.Client, request *http.Request) (*Stream, error) { +func SubscribeWith(lastEventID string, client *http.Client, request *http.Request) (*Stream, error) { stream := &Stream{ c: client, req: request, - lastEventId: lastEventId, + lastEventID: lastEventID, retry: time.Millisecond * 3000, Events: make(chan StreamEvent), Errors: make(chan error, 10), @@ -91,7 +92,9 @@ func SubscribeWith(lastEventId string, client *http.Client, request *http.Reques if err != nil { return nil, err } + go stream.stream(r) + return stream, nil } @@ -100,6 +103,7 @@ func (stream *Stream) Close() { go func() { stream.closeMutex.Lock() defer stream.closeMutex.Unlock() + if stream.isClosed { return } @@ -116,24 +120,30 @@ func checkRedirect(req *http.Request, via []*http.Request) error { if len(via) >= 10 { return errors.New("stopped after 10 redirects") } + for k, vv := range via[0].Header { for _, v := range vv { req.Header.Add(k, v) } } + return nil } func (stream *Stream) connect() (r io.ReadCloser, err error) { var resp *http.Response + stream.req.Header.Set("Cache-Control", "no-cache") stream.req.Header.Set("Accept", "text/event-stream") - if len(stream.lastEventId) > 0 { - stream.req.Header.Set("Last-Event-ID", stream.lastEventId) + + if stream.lastEventID != "" { + stream.req.Header.Set("Last-Event-ID", stream.lastEventID) } + if resp, err = stream.c.Do(stream.req); err != nil { return } + if resp.StatusCode != 200 { message, _ := io.ReadAll(resp.Body) err = SubscriptionError{ @@ -141,7 +151,9 @@ func (stream *Stream) connect() (r io.ReadCloser, err error) { Message: string(message), } } + r = resp.Body + return } @@ -162,39 +174,53 @@ func (stream *Stream) receiveEvents(r io.ReadCloser) { for { ev, err := dec.Decode() + stream.closeMutex.Lock() if stream.isClosed { stream.closeMutex.Unlock() return } + if err != nil { stream.Errors <- err stream.closeMutex.Unlock() + return } - stream.closeMutex.Unlock() - pub := ev.(StreamEvent) + pub, ok := ev.(StreamEvent) + if !ok { + stream.closeMutex.Unlock() + continue + } + if pub.Retry() > 0 { stream.retry = time.Duration(pub.Retry()) * time.Millisecond } - if len(pub.Id()) > 0 { - stream.lastEventId = pub.Id() + + if pub.Id() != "" { + stream.lastEventID = pub.Id() } + stream.Events <- pub + stream.closeMutex.Unlock() } } func (stream *Stream) retryRestartStream() { backoff := stream.retry + for { if stream.Logger != nil { stream.Logger.Printf("Reconnecting in %0.4f secs\n", backoff.Seconds()) } + time.Sleep(backoff) + if stream.isClosed { return } + // NOTE: because of the defer we're opening the new connection // before closing the old one. Shouldn't be a problem in practice, // but something to be aware of. @@ -203,10 +229,13 @@ func (stream *Stream) retryRestartStream() { go stream.stream(r) return } + if stream.isClosed { return } + stream.Errors <- err + backoff = 10 * time.Second } } diff --git a/services/beaconservice.go b/services/beaconservice.go index fafed9b6..f7efc4a6 100644 --- a/services/beaconservice.go +++ b/services/beaconservice.go @@ -13,11 +13,11 @@ import ( "github.com/attestantio/go-eth2-client/spec/phase0" "github.com/ethereum/go-ethereum/common/lru" - "github.com/pk910/dora/db" - "github.com/pk910/dora/dbtypes" - "github.com/pk910/dora/indexer" - "github.com/pk910/dora/rpc" - "github.com/pk910/dora/utils" + "github.com/ethpandaops/dora/db" + "github.com/ethpandaops/dora/dbtypes" + "github.com/ethpandaops/dora/indexer" + "github.com/ethpandaops/dora/rpc" + "github.com/ethpandaops/dora/utils" "github.com/sirupsen/logrus" ) @@ -80,6 +80,10 @@ func (bs *BeaconService) GetValidatorName(index uint64) string { return bs.validatorNames.GetValidatorName(index) } +func (bs *BeaconService) GetValidatorNamesCount() uint64 { + return bs.validatorNames.GetValidatorNamesCount() +} + func (bs *BeaconService) GetCachedValidatorSet() map[phase0.ValidatorIndex]*v1.Validator { return bs.indexer.GetCachedValidatorSet() } diff --git a/services/fnsignatures.go b/services/fnsignatures.go index c5bfe21e..41adbfb9 100644 --- a/services/fnsignatures.go +++ b/services/fnsignatures.go @@ -11,10 +11,10 @@ import ( nethttp "net/http" - "github.com/pk910/dora/db" - "github.com/pk910/dora/dbtypes" - "github.com/pk910/dora/types" - "github.com/pk910/dora/utils" + "github.com/ethpandaops/dora/db" + "github.com/ethpandaops/dora/dbtypes" + "github.com/ethpandaops/dora/types" + "github.com/ethpandaops/dora/utils" "github.com/sirupsen/logrus" ) diff --git a/services/frontendcache.go b/services/frontendcache.go index f3a12783..386f88af 100644 --- a/services/frontendcache.go +++ b/services/frontendcache.go @@ -10,8 +10,8 @@ import ( "sync" "time" - "github.com/pk910/dora/cache" - "github.com/pk910/dora/utils" + "github.com/ethpandaops/dora/cache" + "github.com/ethpandaops/dora/utils" "github.com/sirupsen/logrus" ) diff --git a/services/validatornames.go b/services/validatornames.go index b62ba76a..186d620e 100644 --- a/services/validatornames.go +++ b/services/validatornames.go @@ -12,21 +12,25 @@ import ( "sync" "time" - "github.com/pk910/dora/config" - "github.com/pk910/dora/db" - "github.com/pk910/dora/dbtypes" - "github.com/pk910/dora/utils" + "github.com/attestantio/go-eth2-client/spec/phase0" + "github.com/ethereum/go-ethereum/common" + "github.com/ethpandaops/dora/config" + "github.com/ethpandaops/dora/db" + "github.com/ethpandaops/dora/dbtypes" + "github.com/ethpandaops/dora/utils" "github.com/sirupsen/logrus" + "golang.org/x/exp/maps" "gopkg.in/yaml.v3" ) var logger_vn = logrus.StandardLogger().WithField("module", "validator_names") type ValidatorNames struct { - loadingMutex sync.Mutex - loading bool - namesMutex sync.RWMutex - names map[uint64]string + loadingMutex sync.Mutex + loading bool + namesMutex sync.RWMutex + namesByIndex map[uint64]string + namesByWithdrawal map[common.Address]string } func (vn *ValidatorNames) GetValidatorName(index uint64) string { @@ -34,10 +38,45 @@ func (vn *ValidatorNames) GetValidatorName(index uint64) string { return "" } defer vn.namesMutex.RUnlock() - if vn.names == nil { + if vn.namesByIndex == nil { return "" } - return vn.names[index] + + name := vn.namesByIndex[index] + if name != "" { + return name + } + + validatorSet := GlobalBeaconService.GetCachedValidatorSet() + if validatorSet == nil { + return "" + } + + validator := validatorSet[phase0.ValidatorIndex(index)] + if validator == nil { + return "" + } + + if validator.Validator.WithdrawalCredentials[0] == 0x01 { + withdrawal := common.Address(validator.Validator.WithdrawalCredentials[12:]) + name = vn.namesByWithdrawal[withdrawal] + if name != "" { + return name + } + } + + return "" +} + +func (vn *ValidatorNames) GetValidatorNamesCount() uint64 { + if !vn.namesMutex.TryRLock() { + return 0 + } + defer vn.namesMutex.RUnlock() + if vn.namesByIndex == nil { + return 0 + } + return uint64(len(maps.Keys(vn.namesByIndex)) + len(maps.Keys(vn.namesByWithdrawal))) } func (vn *ValidatorNames) LoadValidatorNames() { @@ -50,7 +89,8 @@ func (vn *ValidatorNames) LoadValidatorNames() { go func() { vn.namesMutex.Lock() - vn.names = make(map[uint64]string) + vn.namesByIndex = make(map[uint64]string) + vn.namesByWithdrawal = make(map[common.Address]string) vn.namesMutex.Unlock() // load names @@ -125,21 +165,32 @@ func (vn *ValidatorNames) parseNamesMap(names map[string]string) int { defer vn.namesMutex.Unlock() nameCount := 0 for idxStr, name := range names { - rangeParts := strings.Split(idxStr, "-") - minIdx, err := strconv.ParseUint(rangeParts[0], 10, 64) - if err != nil { - continue - } - maxIdx := minIdx + 1 + rangeParts := strings.Split(idxStr, ":") if len(rangeParts) > 1 { - maxIdx, err = strconv.ParseUint(rangeParts[1], 10, 64) + switch rangeParts[0] { + case "withdrawal": + withdrawal := common.HexToAddress(rangeParts[1]) + vn.namesByWithdrawal[withdrawal] = name + nameCount++ + } + + } else { + rangeParts = strings.Split(idxStr, "-") + minIdx, err := strconv.ParseUint(rangeParts[0], 10, 64) if err != nil { continue } - } - for idx := minIdx; idx <= maxIdx; idx++ { - vn.names[idx] = name - nameCount++ + maxIdx := minIdx + 1 + if len(rangeParts) > 1 { + maxIdx, err = strconv.ParseUint(rangeParts[1], 10, 64) + if err != nil { + continue + } + } + for idx := minIdx; idx <= maxIdx; idx++ { + vn.namesByIndex[idx] = name + nameCount++ + } } } return nameCount @@ -173,27 +224,7 @@ func (vn *ValidatorNames) loadFromRangesApi(apiUrl string) error { return fmt.Errorf("error parsing validator ranges response: %v", err) } - vn.namesMutex.Lock() - defer vn.namesMutex.Unlock() - nameCount := 0 - for rangeStr, name := range rangesResponse.Ranges { - rangeParts := strings.Split(rangeStr, "-") - minIdx, err := strconv.ParseUint(rangeParts[0], 10, 64) - if err != nil { - continue - } - maxIdx := minIdx + 1 - if len(rangeParts) > 1 { - maxIdx, err = strconv.ParseUint(rangeParts[1], 10, 64) - if err != nil { - continue - } - } - for idx := minIdx; idx <= maxIdx; idx++ { - vn.names[idx] = name - nameCount++ - } - } + nameCount := vn.parseNamesMap(rangesResponse.Ranges) logger_vn.Infof("loaded %v validator names from inventory api (%v)", nameCount, utils.GetRedactedUrl(apiUrl)) return nil } @@ -201,7 +232,7 @@ func (vn *ValidatorNames) loadFromRangesApi(apiUrl string) error { func (vn *ValidatorNames) updateDb() error { vn.namesMutex.RLock() nameRows := make([]*dbtypes.ValidatorName, 0) - for index, name := range vn.names { + for index, name := range vn.namesByIndex { nameRows = append(nameRows, &dbtypes.ValidatorName{ Index: index, Name: name, diff --git a/templates/_layout/500.html b/templates/_layout/500.html index 4cca3e9c..491a868c 100644 --- a/templates/_layout/500.html +++ b/templates/_layout/500.html @@ -16,7 +16,7 @@

Page Error

Sorry, there was an unexpected error when processing the page you requested.

This explorer is under heavy development.
- Please report this error on github to help fixing these issues. + Please report this error on github to help fixing these issues.

diff --git a/templates/_layout/footer.html b/templates/_layout/footer.html index 1289b7f6..369df9e9 100644 --- a/templates/_layout/footer.html +++ b/templates/_layout/footer.html @@ -2,7 +2,7 @@ diff --git a/templates/_layout/header.html b/templates/_layout/header.html index a349b806..f67d2a15 100644 --- a/templates/_layout/header.html +++ b/templates/_layout/header.html @@ -22,9 +22,13 @@