diff --git a/cmd/explorer/main.go b/cmd/explorer/main.go
index 49752863ef..82bba73660 100644
--- a/cmd/explorer/main.go
+++ b/cmd/explorer/main.go
@@ -379,6 +379,8 @@ func main() {
router.HandleFunc("/burn/data", handlers.BurnPageData).Methods("GET")
router.HandleFunc("/gasnow", handlers.GasNow).Methods("GET")
router.HandleFunc("/gasnow/data", handlers.GasNowData).Methods("GET")
+ router.HandleFunc("/correlations", handlers.Correlations).Methods("GET")
+ router.HandleFunc("/correlations/data", handlers.CorrelationsData).Methods("POST")
router.HandleFunc("/vis", handlers.Vis).Methods("GET")
router.HandleFunc("/charts", handlers.Charts).Methods("GET")
diff --git a/cmd/statistics/main.go b/cmd/statistics/main.go
index ce30d32eda..857c9c8a94 100644
--- a/cmd/statistics/main.go
+++ b/cmd/statistics/main.go
@@ -2,6 +2,7 @@ package main
import (
"eth2-exporter/db"
+ "eth2-exporter/price"
"eth2-exporter/services"
"eth2-exporter/types"
"eth2-exporter/utils"
@@ -25,7 +26,7 @@ type options struct {
statisticsChartToggle bool
}
-var opt options = options{}
+var opt *options
func main() {
configPath := flag.String("config", "", "Path to the config file")
@@ -37,11 +38,12 @@ func main() {
flag.Parse()
- opt = options{
+ opt = &options{
configPath: *configPath,
statisticsDayToExport: *statisticsDayToExport,
statisticsDaysToExport: *statisticsDaysToExport,
- statisticsValidatorToggle: *statisticsChartToggle,
+ statisticsChartToggle: *statisticsChartToggle,
+ statisticsValidatorToggle: *statisticsValidatorToggle,
poolsDisabledFlag: *poolsDisabledFlag,
}
@@ -88,6 +90,8 @@ func main() {
db.InitBigtable(cfg.Bigtable.Project, cfg.Bigtable.Instance, fmt.Sprintf("%d", utils.Config.Chain.Config.DepositChainID))
+ price.Init(utils.Config.Chain.Config.DepositChainID)
+
if *statisticsDaysToExport != "" {
s := strings.Split(*statisticsDaysToExport, "-")
if len(s) < 2 {
@@ -125,7 +129,7 @@ func main() {
logrus.Fatalf("error resetting status for chart series status for day %v: %v", d, err)
}
- err = db.WriteChartSeriesForDay(uint64(d))
+ err = db.WriteChartSeriesForDay(int64(d))
if err != nil {
logrus.Errorf("error exporting chart series from day %v: %v", d, err)
}
@@ -153,7 +157,7 @@ func main() {
logrus.Fatalf("error resetting status for chart series status for day %v: %v", *statisticsDayToExport, err)
}
- err = db.WriteChartSeriesForDay(uint64(*statisticsDayToExport))
+ err = db.WriteChartSeriesForDay(int64(*statisticsDayToExport))
if err != nil {
logrus.Errorf("error exporting chart series from day %v: %v", *statisticsDayToExport, err)
}
@@ -229,7 +233,7 @@ func statisticsLoop() {
logrus.Infof("Chart statistics: latest epoch is %v, previous day is %v, last exported day is %v", latestEpoch, previousDay, lastExportedDayChart)
if lastExportedDayChart <= previousDay || lastExportedDayChart == 0 {
for day := lastExportedDayChart; day <= previousDay; day++ {
- err = db.WriteChartSeriesForDay(day)
+ err = db.WriteChartSeriesForDay(int64(day))
if err != nil {
logrus.Errorf("error exporting chart series from day %v: %v", day, err)
}
diff --git a/db/bigtable.go b/db/bigtable.go
index 2042c6d78c..7cc15cc78f 100644
--- a/db/bigtable.go
+++ b/db/bigtable.go
@@ -656,9 +656,9 @@ func (bigtable *Bigtable) GetValidatorBalanceHistory(validators []uint64, startE
rangeEnd := fmt.Sprintf("%s:e:b:%s", bigtable.chainId, reversedPaddedEpoch(startEpoch-uint64(limit)))
res := make(map[uint64][]*types.ValidatorBalance, len(validators))
- if len(validators) == 0 {
- return res, nil
- }
+ // if len(validators) == 0 {
+ // return res, nil
+ // }
columnFilters := make([]gcp_bigtable.Filter, 0, len(validators))
for _, validator := range validators {
@@ -1241,7 +1241,7 @@ func (bigtable *Bigtable) SaveValidatorIncomeDetails(epoch uint64, rewards map[u
return nil
}
-func (bigtable *Bigtable) GetEpochIncomeHistory(startEpoch uint64, limit int64) (*itypes.ValidatorEpochIncome, error) {
+func (bigtable *Bigtable) GetEpochIncomeHistoryDescending(startEpoch uint64, limit int64) (*itypes.ValidatorEpochIncome, error) {
ctx, cancel := context.WithDeadline(context.Background(), time.Now().Add(time.Second*30))
defer cancel()
@@ -1273,19 +1273,81 @@ func (bigtable *Bigtable) GetEpochIncomeHistory(startEpoch uint64, limit int64)
return &res, nil
}
-func (bigtable *Bigtable) GetValidatorIncomeDetailsHistory(validators []uint64, startEpoch uint64, limit int64) (map[uint64]map[uint64]*itypes.ValidatorEpochIncome, error) {
+func (bigtable *Bigtable) GetEpochIncomeHistory(epoch uint64) (*itypes.ValidatorEpochIncome, error) {
- ctx, cancel := context.WithDeadline(context.Background(), time.Now().Add(time.Second*40))
+ ctx, cancel := context.WithDeadline(context.Background(), time.Now().Add(time.Second*30))
defer cancel()
+ key := fmt.Sprintf("%s:e:b:%s", bigtable.chainId, reversedPaddedEpoch(epoch))
+
+ family := gcp_bigtable.FamilyFilter(STATS_COLUMN_FAMILY)
+ columnFilter := gcp_bigtable.ColumnFilter(SUM_COLUMN)
+ filter := gcp_bigtable.RowFilter(gcp_bigtable.ChainFilters(family, columnFilter))
+
+ row, err := bigtable.tableBeaconchain.ReadRow(ctx, key, filter)
+ if err != nil {
+ return nil, fmt.Errorf("error reading income statistics from bigtable for epoch: %v err: %w", epoch, err)
+ }
+
+ if row != nil {
+ res := itypes.ValidatorEpochIncome{}
+ err := proto.Unmarshal(row[STATS_COLUMN_FAMILY][0].Value, &res)
+ if err != nil {
+ return nil, fmt.Errorf("error decoding income data for row %v: %w", row.Key(), err)
+ }
+ return &res, nil
+ }
+
+ // if there is no result we have to calculate the sum
+ income, err := bigtable.GetValidatorIncomeDetailsHistory([]uint64{}, epoch, 1)
+ if err != nil {
+ logger.WithError(err).Error("error getting validator income history")
+ }
+
+ total := &itypes.ValidatorEpochIncome{}
+
+ for _, epochs := range income {
+ for _, details := range epochs {
+ total.AttestationHeadReward += details.AttestationHeadReward
+ total.AttestationSourceReward += details.AttestationSourceReward
+ total.AttestationSourcePenalty += details.AttestationSourcePenalty
+ total.AttestationTargetReward += details.AttestationTargetReward
+ total.AttestationTargetPenalty += details.AttestationTargetPenalty
+ total.FinalityDelayPenalty += details.FinalityDelayPenalty
+ total.ProposerSlashingInclusionReward += details.ProposerSlashingInclusionReward
+ total.ProposerAttestationInclusionReward += details.ProposerAttestationInclusionReward
+ total.ProposerSyncInclusionReward += details.ProposerSyncInclusionReward
+ total.SyncCommitteeReward += details.SyncCommitteeReward
+ total.SyncCommitteePenalty += details.SyncCommitteePenalty
+ total.SlashingReward += details.SlashingReward
+ total.SlashingPenalty += details.SlashingPenalty
+ total.TxFeeRewardWei = utils.AddBigInts(total.TxFeeRewardWei, details.TxFeeRewardWei)
+ }
+ }
+
+ return total, nil
+}
+
+// GetValidatorIncomeDetailsHistory returns the validator income details, which have a garbage collection policy of one day.
+func (bigtable *Bigtable) GetValidatorIncomeDetailsHistory(validators []uint64, startEpoch uint64, limit int64) (map[uint64]map[uint64]*itypes.ValidatorEpochIncome, error) {
+ ctx, cancel := context.WithDeadline(context.Background(), time.Now().Add(time.Second*180))
+ defer cancel()
+
+ endEpoch := startEpoch - uint64(limit)
+
+ endTime := utils.EpochToTime(endEpoch)
+
+ // if the end time + 25 hours is not after the current time the end epoch is older than 25 hours.
+ if !endTime.Add(time.Hour * 25).After(time.Now()) {
+ return nil, fmt.Errorf("error epoch range is outside of the garbage collection policy (1 day)")
+ }
+
rangeStart := fmt.Sprintf("%s:e:b:%s", bigtable.chainId, reversedPaddedEpoch(startEpoch))
- rangeEnd := fmt.Sprintf("%s:e:b:%s", bigtable.chainId, reversedPaddedEpoch(startEpoch-uint64(limit)))
+ rangeEnd := fmt.Sprintf("%s:e:b:%s", bigtable.chainId, reversedPaddedEpoch(endEpoch))
+ // logger.Infof("range: %v to %v", rangeStart, rangeEnd)
res := make(map[uint64]map[uint64]*itypes.ValidatorEpochIncome, len(validators))
valLen := len(validators)
- // if valLen == 0 {
- // return res, nil
- // }
// read entire row if you require more than 1000 validators
var columnFilters []gcp_bigtable.Filter
@@ -1310,7 +1372,6 @@ func (bigtable *Bigtable) GetValidatorIncomeDetailsHistory(validators []uint64,
if len(columnFilters) == 0 { // special case to retrieve data for all validators
filter = gcp_bigtable.FamilyFilter(INCOME_DETAILS_COLUMN_FAMILY)
}
-
err := bigtable.tableBeaconchain.ReadRows(ctx, gcp_bigtable.NewRange(rangeStart, rangeEnd), func(r gcp_bigtable.Row) bool {
for _, ri := range r[INCOME_DETAILS_COLUMN_FAMILY] {
validator, err := strconv.ParseUint(strings.TrimPrefix(ri.Column, INCOME_DETAILS_COLUMN_FAMILY+":"), 10, 64)
@@ -1318,7 +1379,6 @@ func (bigtable *Bigtable) GetValidatorIncomeDetailsHistory(validators []uint64,
logger.Errorf("error parsing validator from column key %v: %v", ri.Column, err)
return false
}
-
keySplit := strings.Split(r.Key(), ":")
epoch, err := strconv.ParseUint(keySplit[3], 10, 64)
diff --git a/db/bigtable_eth1.go b/db/bigtable_eth1.go
index 270e19510f..4c39590406 100644
--- a/db/bigtable_eth1.go
+++ b/db/bigtable_eth1.go
@@ -318,35 +318,6 @@ func (bigtable *Bigtable) GetLastBlockInDataTable() (int, error) {
return lastBlock, nil
}
-func (bigtable *Bigtable) GetFullBlockFromDataTable(number uint64) (*types.Eth1Block, error) {
-
- ctx, cancel := context.WithTimeout(context.Background(), time.Second*10)
- defer cancel()
-
- paddedNumber := reversedPaddedBlockNumber(number)
-
- row, err := bigtable.tableData.ReadRow(ctx, fmt.Sprintf("%s:%s", bigtable.chainId, paddedNumber))
-
- if err != nil {
- return nil, err
- }
-
- if len(row[DEFAULT_FAMILY]) == 0 { // block not found
- logger.Errorf("block %v not found in data table", number)
- return nil, ErrBlockNotFound
- }
- blocks := make([]*types.Eth1Block, 0, 1)
- rowHandler := getFullBlockHandler(&blocks)
-
- rowHandler(row)
-
- if err != nil {
- return nil, err
- }
-
- return blocks[0], nil
-}
-
func (bigtable *Bigtable) GetMostRecentBlockFromDataTable() (*types.Eth1BlockIndexed, error) {
ctx, cancel := context.WithDeadline(context.Background(), time.Now().Add(time.Second*30))
defer cancel()
@@ -446,28 +417,94 @@ func getFullBlockHandler(blocks *[]*types.Eth1Block) func(gcp_bigtable.Row) bool
// GetFullBlockDescending gets blocks starting at block start
func (bigtable *Bigtable) GetFullBlockDescending(start, limit uint64) ([]*types.Eth1Block, error) {
- startPadded := reversedPaddedBlockNumber(start)
- ctx, cancel := context.WithDeadline(context.Background(), time.Now().Add(time.Second*30))
+ ctx, cancel := context.WithDeadline(context.Background(), time.Now().Add(time.Second*60))
defer cancel()
- prefix := fmt.Sprintf("%s:%s", bigtable.chainId, startPadded)
+ if start < 1 || limit < 1 || limit > start {
+ return nil, fmt.Errorf("invalid block range provided (start: %v, limit: %v)", start, limit)
+ }
- rowRange := gcp_bigtable.InfiniteRange(prefix) //gcp_bigtable.PrefixRange("1:1000000000")
+ startPadded := reversedPaddedBlockNumber(start)
+ endPadded := reversedPaddedBlockNumber(start - limit)
+
+ startKey := fmt.Sprintf("%s:%s", bigtable.chainId, startPadded)
+ endKey := fmt.Sprintf("%s:%s", bigtable.chainId, endPadded)
+
+ rowRange := gcp_bigtable.NewRange(startKey, endKey) //gcp_bigtable.PrefixRange("1:1000000000")
- blocks := make([]*types.Eth1Block, 0, 100)
+ // if limit >= start { // handle retrieval of the first blocks
+ // rowRange = gcp_bigtable.InfiniteRange(startKey)
+ // }
+
+ rowFilter := gcp_bigtable.RowFilter(gcp_bigtable.ColumnFilter("data"))
- rowHandler := getFullBlockHandler(&blocks)
+ blocks := make([]*types.Eth1Block, 0, limit)
+
+ rowHandler := func(row gcp_bigtable.Row) bool {
+ block := types.Eth1Block{}
+ err := proto.Unmarshal(row[DEFAULT_FAMILY_BLOCKS][0].Value, &block)
+ if err != nil {
+ logger.Errorf("error could not unmarschal proto object, err: %v", err)
+ return false
+ }
+ blocks = append(blocks, &block)
+ return true
+ }
startTime := time.Now()
- err := bigtable.tableData.ReadRows(ctx, rowRange, rowHandler, gcp_bigtable.LimitRows(int64(limit)))
+ err := bigtable.tableBlocks.ReadRows(ctx, rowRange, rowHandler, rowFilter, gcp_bigtable.LimitRows(int64(limit)))
if err != nil {
return nil, err
}
- logger.Infof("finished getting blocks from table data: %v", time.Since(startTime))
+ logger.Infof("finished getting blocks from table blocks: %v", time.Since(startTime))
return blocks, nil
}
+// GetFullBlockDescending gets blocks starting at block start
+func (bigtable *Bigtable) GetFullBlocksDescending(stream chan<- *types.Eth1Block, high, low uint64) error {
+ ctx, cancel := context.WithDeadline(context.Background(), time.Now().Add(time.Second*180))
+ defer cancel()
+
+ if high < 1 || low < 1 || high < low {
+ return fmt.Errorf("invalid block range provided (start: %v, limit: %v)", high, low)
+ }
+
+ highKey := fmt.Sprintf("%s:%s", bigtable.chainId, reversedPaddedBlockNumber(high))
+ lowKey := fmt.Sprintf("%s:%s", bigtable.chainId, reversedPaddedBlockNumber(low))
+
+ // the low key will have a higher reverse padded number
+ rowRange := gcp_bigtable.NewRange(highKey, lowKey) //gcp_bigtable.PrefixRange("1:1000000000")
+
+ // if limit >= start { // handle retrieval of the first blocks
+ // rowRange = gcp_bigtable.InfiniteRange(startKey)
+ // }
+
+ // logger.Infof("querying from (excl) %v to (incl) %v", low, high)
+
+ rowFilter := gcp_bigtable.RowFilter(gcp_bigtable.ColumnFilter("data"))
+
+ rowHandler := func(row gcp_bigtable.Row) bool {
+ block := types.Eth1Block{}
+ err := proto.Unmarshal(row[DEFAULT_FAMILY_BLOCKS][0].Value, &block)
+ if err != nil {
+ logger.Errorf("error could not unmarschal proto object, err: %v", err)
+ return false
+ }
+ stream <- &block
+ return true
+ }
+
+ // startTime := time.Now()
+ err := bigtable.tableBlocks.ReadRows(ctx, rowRange, rowHandler, rowFilter)
+ if err != nil {
+ return err
+ }
+
+ // logger.Infof("finished getting blocks from table blocks: %v", time.Since(startTime))
+ return nil
+}
+
func (bigtable *Bigtable) GetBlocksIndexedMultiple(blockNumbers []uint64, limit uint64) ([]*types.Eth1BlockIndexed, error) {
rowList := gcp_bigtable.RowList{}
for _, block := range blockNumbers {
diff --git a/db/db.go b/db/db.go
index adc087d3c0..f6e207928a 100644
--- a/db/db.go
+++ b/db/db.go
@@ -2316,3 +2316,16 @@ func updateValidatorPerformance(tx *sqlx.Tx) error {
return tx.Commit()
}
+
+func GetBlockNumber(slot uint64) (block uint64, err error) {
+ err = ReaderDb.Get(&block, `SELECT exec_block_number FROM blocks where slot = $1`, slot)
+ return
+}
+
+func SaveChartSeriesPoint(date time.Time, indicator string, value any) error {
+ _, err := WriterDb.Exec(`INSERT INTO chart_series (time, indicator, value) VALUES($1, $2, $3) ON CONFLICT (time, indicator) DO UPDATE SET value = EXCLUDED.value`, date, indicator, value)
+ if err != nil {
+ return fmt.Errorf("error calculating NON_FAILED_TX_GAS_USAGE chart_series: %w", err)
+ }
+ return err
+}
diff --git a/db/statistics.go b/db/statistics.go
index cecb9b3438..f7e02cea10 100644
--- a/db/statistics.go
+++ b/db/statistics.go
@@ -2,6 +2,7 @@ package db
import (
"eth2-exporter/metrics"
+ "eth2-exporter/price"
"eth2-exporter/types"
"eth2-exporter/utils"
"fmt"
@@ -373,17 +374,29 @@ func GetValidatorIncomeHistory(validator_indices []uint64, lowerBoundDay uint64,
return result, err
}
-func WriteChartSeriesForDay(day uint64) error {
+func WriteChartSeriesForDay(day int64) error {
+ startTs := time.Now()
+
+ if day < 0 {
+ // before the beaconchain
+ return fmt.Errorf("this function does not yet pre-beaconchain blocks")
+ }
+
epochsPerDay := (24 * 60 * 60) / utils.Config.Chain.Config.SlotsPerEpoch / utils.Config.Chain.Config.SecondsPerSlot
- beaconchainDay := day * epochsPerDay
+ beaconchainDay := day * int64(epochsPerDay)
- startDate := utils.EpochToTime(beaconchainDay)
+ startDate := utils.EpochToTime(uint64(beaconchainDay))
dateTrunc := time.Date(startDate.Year(), startDate.Month(), startDate.Day(), 0, 0, 0, 0, time.UTC)
+ // inclusive slot
firstSlot := utils.TimeToSlot(uint64(dateTrunc.Unix()))
- lastSlot := int64(firstSlot) + int64(epochsPerDay*utils.Config.Chain.Config.SlotsPerEpoch)
- logger.Infof("exporting chart_series for day %v (slot %v to %v)", day, firstSlot, lastSlot)
+ epochOffset := firstSlot % utils.Config.Chain.Config.SlotsPerEpoch
+ firstSlot = firstSlot - epochOffset
+ firstEpoch := firstSlot / utils.Config.Chain.Config.SlotsPerEpoch
+ // exclusive slot
+ lastSlot := int64(firstSlot) + int64(epochsPerDay*utils.Config.Chain.Config.SlotsPerEpoch)
+ lastEpoch := lastSlot / int64(utils.Config.Chain.Config.SlotsPerEpoch)
latestDbEpoch, err := GetLatestEpoch()
if err != nil {
@@ -394,41 +407,292 @@ func WriteChartSeriesForDay(day uint64) error {
return fmt.Errorf("delaying statistics export as epoch %v has not yet been indexed. LatestDB: %v", (uint64(lastSlot) / utils.Config.Chain.Config.SlotsPerEpoch), latestDbEpoch)
}
- type Fees struct {
- BaseFee uint64 `db:"exec_base_fee_per_gas"`
- ExecGasUsed uint64 `db:"exec_gas_used"`
+ firstBlock, err := GetBlockNumber(uint64(firstSlot))
+ if err != nil {
+ return fmt.Errorf("error getting block number for slot: %v err: %w", firstSlot, err)
+ }
+
+ if firstBlock <= 15537394 {
+ return fmt.Errorf("this function does not yet handle pre merge statistics")
}
- burnedFees := make([]Fees, 0)
- logger.Infof("exporting from (inc): %v to (not inc): %v", firstSlot, lastSlot)
- err = ReaderDb.Select(&burnedFees, `SELECT exec_base_fee_per_gas, exec_gas_used FROM blocks WHERE slot >= $1 AND slot < $2 AND exec_base_fee_per_gas > 0 AND exec_gas_used > 0`, firstSlot, lastSlot)
+ lastBlock, err := GetBlockNumber(uint64(lastSlot))
if err != nil {
- return fmt.Errorf("error getting BURNED_FEES: %w", err)
+ return fmt.Errorf("error getting block number for slot: %v err: %w", lastSlot, err)
}
+ logger.Infof("exporting chart_series for day %v ts: %v (slot %v to %v, block %v to %v)", day, dateTrunc, firstSlot, lastSlot, firstBlock, lastBlock)
+
+ blocksChan := make(chan *types.Eth1Block, 360)
+ batchSize := int64(360)
+ go func(stream chan *types.Eth1Block) {
+ logger.Infof("querying blocks from %v to %v", firstBlock, lastBlock)
+ for b := int64(lastBlock) - 1; b > int64(firstBlock); b -= batchSize {
+ high := b
+ low := b - batchSize
+ if int64(firstBlock) > low {
+ low = int64(firstBlock - 1)
+ }
+
+ err := BigtableClient.GetFullBlocksDescending(stream, uint64(high), uint64(low))
+ if err != nil {
+ logger.Errorf("error getting blocks descending high: %v low: %v err: %v", high, low, err)
+ }
+
+ }
+ close(stream)
+ }(blocksChan)
+
+ // logger.Infof("got %v blocks", len(blocks))
+
+ blockCount := int64(0)
+ txCount := int64(0)
+
+ totalBaseFee := decimal.NewFromInt(0)
+ totalGasPrice := decimal.NewFromInt(0)
+ totalTxSavings := decimal.NewFromInt(0)
+ totalTxFees := decimal.NewFromInt(0)
+ totalBurned := decimal.NewFromInt(0)
+ totalGasUsed := decimal.NewFromInt(0)
+
+ legacyTxCount := int64(0)
+ accessListTxCount := int64(0)
+ eip1559TxCount := int64(0)
+ failedTxCount := int64(0)
+ successTxCount := int64(0)
+
+ totalFailedGasUsed := decimal.NewFromInt(0)
+ totalFailedTxFee := decimal.NewFromInt(0)
+
+ totalBaseBlockReward := decimal.NewFromInt(0)
+
+ totalGasLimit := decimal.NewFromInt(0)
+ totalTips := decimal.NewFromInt(0)
+
+ // totalSize := decimal.NewFromInt(0)
+
+ // blockCount := len(blocks)
+
+ // missedBlockCount := (firstSlot - uint64(lastSlot)) - uint64(blockCount)
- sum := decimal.NewFromInt(0)
+ var prevBlock *types.Eth1Block
- for _, fee := range burnedFees {
- base := new(big.Int).SetUint64(fee.BaseFee)
- used := new(big.Int).SetUint64(fee.ExecGasUsed)
+ avgBlockTime := decimal.NewFromInt(0)
- burned := new(big.Int).Mul(base, used)
- sum = sum.Add(decimal.NewFromBigInt(burned, 0))
+ for blk := range blocksChan {
+ // logger.Infof("analyzing block: %v with: %v transactions", blk.Number, len(blk.Transactions))
+ blockCount += 1
+ baseFee := decimal.NewFromBigInt(new(big.Int).SetBytes(blk.BaseFee), 0)
+ totalBaseFee = totalBaseFee.Add(baseFee)
+ totalGasLimit = totalGasLimit.Add(decimal.NewFromInt(int64(blk.GasLimit)))
+
+ if prevBlock != nil {
+ avgBlockTime = avgBlockTime.Add(decimal.NewFromInt(prevBlock.Time.AsTime().UnixMicro() - blk.Time.AsTime().UnixMicro())).Div(decimal.NewFromInt(2))
+ }
+
+ totalBaseBlockReward = totalBaseBlockReward.Add(decimal.NewFromBigInt(utils.Eth1BlockReward(blk.Number, blk.Difficulty), 0))
+
+ for _, tx := range blk.Transactions {
+ // for _, itx := range tx.Itx {
+ // }
+ // blk.Time
+ txCount += 1
+ maxFee := decimal.NewFromBigInt(new(big.Int).SetBytes(tx.MaxFeePerGas), 0)
+ prioFee := decimal.NewFromBigInt(new(big.Int).SetBytes(tx.MaxPriorityFeePerGas), 0)
+ gasUsed := decimal.NewFromBigInt(new(big.Int).SetUint64(tx.GasUsed), 0)
+ gasPrice := decimal.NewFromBigInt(new(big.Int).SetBytes(tx.GasPrice), 0)
+
+ var tipFee decimal.Decimal
+ var txFees decimal.Decimal
+ switch tx.Type {
+ case 0:
+ legacyTxCount += 1
+ totalGasPrice = totalGasPrice.Add(gasPrice)
+ txFees = gasUsed.Mul(gasPrice)
+ tipFee = gasPrice.Sub(baseFee)
+
+ case 1:
+ accessListTxCount += 1
+ totalGasPrice = totalGasPrice.Add(gasPrice)
+ txFees = gasUsed.Mul(gasPrice)
+ tipFee = gasPrice.Sub(baseFee)
+
+ case 2:
+ // priority fee is capped because the base fee is filled first
+ tipFee = decimal.Min(prioFee, maxFee.Sub(baseFee))
+ eip1559TxCount += 1
+ // totalMinerTips = totalMinerTips.Add(tipFee.Mul(gasUsed))
+ txFees = baseFee.Mul(gasUsed).Add(tipFee.Mul(gasUsed))
+ totalTxSavings = totalTxSavings.Add(maxFee.Mul(gasUsed).Sub(baseFee.Mul(gasUsed).Add(tipFee.Mul(gasUsed))))
+
+ default:
+ logger.Fatalf("error unknown tx type %v hash: %x", tx.Status, tx.Hash)
+ }
+ totalTxFees = totalTxFees.Add(txFees)
+
+ switch tx.Status {
+ case 0:
+ failedTxCount += 1
+ totalFailedGasUsed = totalFailedGasUsed.Add(gasUsed)
+ totalFailedTxFee = totalFailedTxFee.Add(txFees)
+ case 1:
+ successTxCount += 1
+ default:
+ logger.Fatalf("error unknown status code %v hash: %x", tx.Status, tx.Hash)
+ }
+ totalGasUsed = totalGasUsed.Add(gasUsed)
+ totalBurned = totalBurned.Add(baseFee.Mul(gasUsed))
+ if blk.Number < 12244000 {
+ totalTips = totalTips.Add(gasUsed.Mul(gasPrice))
+ } else {
+ totalTips = totalTips.Add(gasUsed.Mul(tipFee))
+ }
+ }
+ prevBlock = blk
}
- logger.Println("Exporting BURNED_FEES")
- _, err = WriterDb.Exec("INSERT INTO chart_series (time, indicator, value) VALUES ($1, 'BURNED_FEES', $2) ON CONFLICT (time, indicator) DO UPDATE SET value = EXCLUDED.value", dateTrunc, sum.String())
+ logger.Infof("exporting consensus rewards from %v to %v", firstEpoch, lastEpoch)
+ historyFirst, err := BigtableClient.GetValidatorBalanceHistory(nil, firstEpoch+1, 1)
+ if err != nil {
+ return err
+ }
+
+ sumStartEpoch := decimal.NewFromInt(0)
+ for _, balances := range historyFirst {
+ for _, balance := range balances {
+ sumStartEpoch = sumStartEpoch.Add(decimal.NewFromInt(int64(balance.Balance)))
+ }
+ }
+
+ historyLast, err := BigtableClient.GetValidatorBalanceHistory(nil, uint64(lastEpoch+1), 1)
+ if err != nil {
+ return err
+ }
+
+ sumEndEpoch := decimal.NewFromInt(0)
+ for _, balances := range historyLast {
+ for _, balance := range balances {
+ sumEndEpoch = sumEndEpoch.Add(decimal.NewFromInt(int64(balance.Balance)))
+ }
+ }
+ // consensus rewards are in Gwei
+ totalConsensusRewards := sumEndEpoch.Sub(sumStartEpoch)
+ logger.Infof("consensus rewards: %v", totalConsensusRewards.String())
+
+ logger.Infof("Exporting BURNED_FEES %v", totalBurned.String())
+ _, err = WriterDb.Exec("INSERT INTO chart_series (time, indicator, value) VALUES ($1, 'BURNED_FEES', $2) ON CONFLICT (time, indicator) DO UPDATE SET value = EXCLUDED.value", dateTrunc, totalBurned.String())
if err != nil {
return fmt.Errorf("error calculating BURNED_FEES chart_series: %w", err)
}
+ logger.Infof("Exporting NON_FAILED_TX_GAS_USAGE %v", totalGasUsed.Sub(totalFailedGasUsed).String())
+ err = SaveChartSeriesPoint(dateTrunc, "NON_FAILED_TX_GAS_USAGE", totalGasUsed.Sub(totalFailedGasUsed).String())
+ if err != nil {
+ return fmt.Errorf("error calculating NON_FAILED_TX_GAS_USAGE chart_series: %w", err)
+ }
+ logger.Infof("Exporting BLOCK_COUNT %v", blockCount)
+ err = SaveChartSeriesPoint(dateTrunc, "BLOCK_COUNT", blockCount)
+ if err != nil {
+ return fmt.Errorf("error calculating BLOCK_COUNT chart_series: %w", err)
+ }
+
+ // convert microseconds to seconds
+ logger.Infof("Exporting BLOCK_TIME_AVG %v", avgBlockTime.Div(decimal.NewFromInt(1e6)).Abs().String())
+ err = SaveChartSeriesPoint(dateTrunc, "BLOCK_TIME_AVG", avgBlockTime.Div(decimal.NewFromInt(1e6)).String())
+ if err != nil {
+ return fmt.Errorf("error calculating BLOCK_TIME_AVG chart_series: %w", err)
+ }
+ // convert consensus rewards to gwei
+ emission := (totalBaseBlockReward.Add(totalConsensusRewards.Mul(decimal.NewFromInt(1000000000))).Add(totalTips)).Sub(totalBurned)
+ logger.Infof("Exporting TOTAL_EMISSION %v day emission", emission)
+
+ var lastEmission float64
+ err = ReaderDb.Get(&lastEmission, "SELECT value FROM chart_series WHERE indicator = 'TOTAL_EMISSION' AND time < $1 ORDER BY time DESC LIMIT 1", dateTrunc)
+ if err != nil {
+ return fmt.Errorf("error getting previous value for TOTAL_EMISSION chart_series: %w", err)
+ }
+
+ newEmission := decimal.NewFromFloat(lastEmission).Add(emission)
+ err = SaveChartSeriesPoint(dateTrunc, "TOTAL_EMISSION", newEmission)
+ if err != nil {
+ return fmt.Errorf("error calculating TOTAL_EMISSION chart_series: %w", err)
+ }
+
+ if totalGasPrice.GreaterThan(decimal.NewFromInt(0)) && decimal.NewFromInt(legacyTxCount).Add(decimal.NewFromInt(accessListTxCount)).GreaterThan(decimal.NewFromInt(0)) {
+ logger.Infof("Exporting AVG_GASPRICE")
+ _, err = WriterDb.Exec("INSERT INTO chart_series (time, indicator, value) VALUES($1, 'AVG_GASPRICE', $2) ON CONFLICT (time, indicator) DO UPDATE SET value = EXCLUDED.value", dateTrunc, totalGasPrice.Div((decimal.NewFromInt(legacyTxCount).Add(decimal.NewFromInt(accessListTxCount)))).String())
+ if err != nil {
+ return fmt.Errorf("error calculating AVG_GASPRICE chart_series err: %w", err)
+ }
+ }
+
+ if txCount > 0 {
+ logger.Infof("Exporting AVG_GASUSED %v", totalGasUsed.Div(decimal.NewFromInt(blockCount)).String())
+ err = SaveChartSeriesPoint(dateTrunc, "AVG_GASUSED", totalGasUsed.Div(decimal.NewFromInt(blockCount)).String())
+ if err != nil {
+ return fmt.Errorf("error calculating AVG_GASUSED chart_series: %w", err)
+ }
+ }
+
+ logger.Infof("Exporting TOTAL_GASUSED %v", totalGasUsed.String())
+ err = SaveChartSeriesPoint(dateTrunc, "TOTAL_GASUSED", totalGasUsed.String())
+ if err != nil {
+ return fmt.Errorf("error calculating TOTAL_GASUSED chart_series: %w", err)
+ }
+
+ if blockCount > 0 {
+ logger.Infof("Exporting AVG_GASLIMIT %v", totalGasLimit.Div(decimal.NewFromInt(blockCount)))
+ err = SaveChartSeriesPoint(dateTrunc, "AVG_GASLIMIT", totalGasLimit.Div(decimal.NewFromInt(blockCount)))
+ if err != nil {
+ return fmt.Errorf("error calculating AVG_GASLIMIT chart_series: %w", err)
+ }
+ }
+
+ if !totalGasLimit.IsZero() {
+ logger.Infof("Exporting AVG_BLOCK_UTIL %v", totalGasUsed.Div(totalGasLimit).Mul(decimal.NewFromInt(100)))
+ err = SaveChartSeriesPoint(dateTrunc, "AVG_BLOCK_UTIL", totalGasUsed.Div(totalGasLimit).Mul(decimal.NewFromInt(100)))
+ if err != nil {
+ return fmt.Errorf("error calculating AVG_BLOCK_UTIL chart_series: %w", err)
+ }
+ }
+
+ logger.Infof("Exporting MARKET_CAP: %v", newEmission.Div(decimal.NewFromInt(1e18)).Add(decimal.NewFromFloat(72009990.50)).Mul(decimal.NewFromFloat(price.GetEthPrice("USD"))).String())
+ err = SaveChartSeriesPoint(dateTrunc, "MARKET_CAP", newEmission.Div(decimal.NewFromInt(1e18)).Add(decimal.NewFromFloat(72009990.50)).Mul(decimal.NewFromFloat(price.GetEthPrice("USD"))).String())
+ if err != nil {
+ return fmt.Errorf("error calculating MARKET_CAP chart_series: %w", err)
+ }
+
+ logger.Infof("Exporting TX_COUNT %v", txCount)
+ err = SaveChartSeriesPoint(dateTrunc, "TX_COUNT", txCount)
+ if err != nil {
+ return fmt.Errorf("error calculating TX_COUNT chart_series: %w", err)
+ }
+
+ // Not sure how this is currently possible (where do we store the size, i think this is missing)
+ // logger.Infof("Exporting AVG_SIZE %v", totalSize.div)
+ // err = SaveChartSeriesPoint(dateTrunc, "AVG_SIZE", totalSize.div)
+ // if err != nil {
+ // return fmt.Errorf("error calculating AVG_SIZE chart_series: %w", err)
+ // }
+
+ // logger.Infof("Exporting POWER_CONSUMPTION %v", avgBlockTime.String())
+ // err = SaveChartSeriesPoint(dateTrunc, "POWER_CONSUMPTION", avgBlockTime.String())
+ // if err != nil {
+ // return fmt.Errorf("error calculating POWER_CONSUMPTION chart_series: %w", err)
+ // }
+
+ // logger.Infof("Exporting NEW_ACCOUNTS %v", avgBlockTime.String())
+ // err = SaveChartSeriesPoint(dateTrunc, "NEW_ACCOUNTS", avgBlockTime.String())
+ // if err != nil {
+ // return fmt.Errorf("error calculating NEW_ACCOUNTS chart_series: %w", err)
+ // }
+
logger.Infof("marking day export as completed in the status table")
_, err = WriterDb.Exec("insert into chart_series_status (day, status) values ($1, true)", day)
if err != nil {
return err
}
- logger.Println("chart_series export completed")
+ logger.Infof("chart_series export completed: took %v", time.Since(startTs))
return nil
}
diff --git a/handlers/api.go b/handlers/api.go
index f3f8c66be9..80c9ce2e19 100644
--- a/handlers/api.go
+++ b/handlers/api.go
@@ -1160,6 +1160,10 @@ func ApiValidatorBalanceHistory(w http.ResponseWriter, r *http.Request) {
return
}
+ if len(queryIndices) == 0 {
+ sendErrorResponse(w, r.URL.String(), "no or invalid validator indicies provided")
+ }
+
history, err := db.BigtableClient.GetValidatorBalanceHistory(queryIndices, services.LatestEpoch(), 101)
if err != nil {
sendErrorResponse(w, r.URL.String(), "could not retrieve db results")
@@ -2481,6 +2485,10 @@ func APIDashboardDataBalance(w http.ResponseWriter, r *http.Request) {
queryOffsetEpoch = latestEpoch - oneWeekEpochs
}
+ if len(queryValidators) == 0 {
+ sendErrorResponse(w, r.URL.String(), "no or invalid validator indicies provided")
+ }
+
balances, err := db.BigtableClient.GetValidatorBalanceHistory(queryValidators, latestEpoch, int64(latestEpoch-queryOffsetEpoch))
if err != nil {
logger.WithError(err).WithField("route", r.URL.String()).Errorf("error retrieving validator balance history")
diff --git a/handlers/charts.go b/handlers/charts.go
index 416f309121..0289c0a43c 100644
--- a/handlers/charts.go
+++ b/handlers/charts.go
@@ -6,10 +6,13 @@ import (
"eth2-exporter/types"
"fmt"
"net/http"
+ "reflect"
"github.com/gorilla/mux"
)
+const CHART_PREVIEW_POINTS = 100
+
// Charts uses a go template for presenting the page to show charts
func Charts(w http.ResponseWriter, r *http.Request) {
@@ -21,6 +24,7 @@ func Charts(w http.ResponseWriter, r *http.Request) {
data := InitPageData(w, r, "stats", "/charts", "Charts")
chartsPageData := services.LatestChartsPageData()
+
if chartsPageData == nil {
err := chartsUnavailableTemplate.ExecuteTemplate(w, "layout", data)
if err != nil {
@@ -31,6 +35,25 @@ func Charts(w http.ResponseWriter, r *http.Request) {
return
}
+ // only display the most recent N entries as a preview
+ for i, ch := range *chartsPageData {
+ if ch != nil && ch.Data != nil {
+ if ch.Order >= 20 && ch.Order <= 30 {
+ for j, series := range ch.Data.Series {
+ switch series.Data.(type) {
+ case []interface{}:
+ l := len(series.Data.([]interface{}))
+ if l > CHART_PREVIEW_POINTS*2 {
+ (*chartsPageData)[i].Data.Series[j].Data = series.Data.([]interface{})[l-CHART_PREVIEW_POINTS:]
+ }
+ default:
+ logger.Infof("unknown type: %v for chart: %v", reflect.TypeOf(series.Data), ch.Data.Title)
+ }
+ }
+ }
+ }
+ }
+
data.Data = chartsPageData
err := chartsTemplate.ExecuteTemplate(w, "layout", data)
@@ -103,7 +126,6 @@ func GenericChart(w http.ResponseWriter, r *http.Request) {
// SlotViz renders a single page with a d3 slot (block) visualisation
func SlotViz(w http.ResponseWriter, r *http.Request) {
-
var slotVizTemplate = templates.GetTemplate("layout.html", "slotViz.html", "slotVizPage.html")
w.Header().Set("Content-Type", "text/html")
diff --git a/handlers/correlations.go b/handlers/correlations.go
new file mode 100644
index 0000000000..8b269c6dda
--- /dev/null
+++ b/handlers/correlations.go
@@ -0,0 +1,93 @@
+package handlers
+
+import (
+ "encoding/json"
+ "eth2-exporter/db"
+ "eth2-exporter/templates"
+ "eth2-exporter/types"
+
+ "net/http"
+ "time"
+)
+
+// Blocks will return information about blocks using a go template
+func Correlations(w http.ResponseWriter, r *http.Request) {
+ w.Header().Set("Content-Type", "text/html")
+
+ data := InitPageData(w, r, "correlations", "/correlations", "Correlations")
+
+ var indicators []string
+ err := db.ReaderDb.Select(&indicators, "SELECT DISTINCT(indicator) AS indicator FROM chart_series WHERE time > NOW() - INTERVAL '1 week' ORDER BY indicator;")
+
+ if err != nil {
+ logger.Errorf("error retrieving correlation indicators: %v", err)
+ http.Error(w, "Internal server error", http.StatusInternalServerError)
+ return
+ }
+
+ data.Data = indicators
+
+ var correlationsTemplate = templates.GetTemplate("layout.html", "correlations.html")
+
+ // data := &types.PageData{
+ // HeaderAd: true,
+ // Meta: &types.Meta{
+ // Image: "https://etherchain.org/img/ballon-512x512.png",
+ // Title: fmt.Sprintf("%v - Correlations - etherchain.org - %v", utils.Config.Frontend.SiteName, time.Now().Year()),
+ // Description: "etherchain.org makes the Ethereum block chain accessible to non-technical end users",
+ // Path: "/correlations",
+ // GATag: utils.Config.Frontend.GATag,
+ // },
+ // Active: "stats",
+ // Data: indicators,
+ // CurrentBlock: services.LatestBlock(),
+ // GPO: services.LatestGasNowData(),
+ // }
+
+ err = correlationsTemplate.ExecuteTemplate(w, "layout", data)
+
+ if err != nil {
+ logger.Errorf("error executing template for %v route: %v", r.URL.String(), err)
+ return
+ }
+}
+
+func CorrelationsData(w http.ResponseWriter, r *http.Request) {
+ w.Header().Set("Content-Type", "application/json")
+ enc := json.NewEncoder(w)
+
+ x := r.FormValue("x")
+ y := r.FormValue("y")
+ startDate, err := time.Parse("2006-01-02", r.FormValue("startDate"))
+
+ if err != nil {
+ logger.Infof("invalid correlation start date %v provided: %v", startDate, err)
+ enc.Encode(&types.CorrelationDataResponse{Status: "error", Message: "Invalid or missing parameters"})
+ return
+ }
+ endDate, err := time.Parse("2006-01-02", r.FormValue("endDate"))
+ if err != nil {
+ logger.Infof("invalid correlation end date %v provided: %v", endDate, err)
+ enc.Encode(&types.CorrelationDataResponse{Status: "error", Message: "Invalid or missing parameters"})
+ return
+ }
+
+ var data []*types.CorrelationData
+ err = db.ReaderDb.Select(&data, `
+ SELECT indicator,
+ value,
+ EXTRACT(epoch from date_trunc('day', time)) as time
+ FROM chart_series
+ WHERE (indicator = $1 OR indicator = $2) AND time >= $3 AND time <= $4`,
+ x, y, startDate, endDate)
+ if err != nil {
+ logger.Infof("error querying correlation data: %v", err)
+ enc.Encode(&types.CorrelationDataResponse{Status: "error", Message: "Data error"})
+ return
+ }
+
+ err = enc.Encode(&types.CorrelationDataResponse{Status: "ok", Data: data})
+ if err != nil {
+ logger.Errorf("error serializing json data for %v route: %v", r.URL.String(), err)
+ }
+}
diff --git a/handlers/pageData.go b/handlers/pageData.go
index 756678cdf6..b472b94da7 100644
--- a/handlers/pageData.go
+++ b/handlers/pageData.go
@@ -83,12 +83,14 @@ func InitPageData(w http.ResponseWriter, r *http.Request, active, path, title st
if err != nil {
logger.WithError(err).Error("error getting user session")
}
- jsn := make(map[string]interface{})
- // convert map[interface{}]interface{} -> map[string]interface{}
- for sessionKey, sessionValue := range session.Values {
- jsn[fmt.Sprintf("%v", sessionKey)] = sessionValue
+ if session != nil {
+ jsn := make(map[string]interface{})
+ // convert map[interface{}]interface{} -> map[string]interface{}
+ for sessionKey, sessionValue := range session.Values {
+ jsn[fmt.Sprintf("%v", sessionKey)] = sessionValue
+ }
+ data.DebugSession = jsn
}
- data.DebugSession = jsn
}
data.Rates.EthPrice = price.GetEthPrice(data.Rates.Currency)
data.Rates.ExchangeRate = price.GetEthPrice(data.Rates.Currency)
diff --git a/services/charts_updater.go b/services/charts_updater.go
index 022573128a..7504b1390c 100644
--- a/services/charts_updater.go
+++ b/services/charts_updater.go
@@ -27,10 +27,12 @@ var ChartHandlers = map[string]chartHandler{
"average_balance": {4, averageBalanceChartData},
"network_liveness": {5, networkLivenessChartData},
"participation_rate": {6, participationRateChartData},
+
// "inclusion_distance": {7, inclusionDistanceChartData},
// "incorrect_attestations": {6, incorrectAttestationsChartData},
// "validator_income": {7, averageDailyValidatorIncomeChartData},
// "staking_rewards": {8, stakingRewardsChartData},
+
"stake_effectiveness": {9, stakeEffectivenessChartData},
"balance_distribution": {10, balanceDistributionChartData},
"effective_balance_distribution": {11, effectiveBalanceDistributionChartData},
@@ -39,6 +41,20 @@ var ChartHandlers = map[string]chartHandler{
"graffiti_wordcloud": {14, graffitiCloudChartData},
"pools_distribution": {15, poolsDistributionChartData},
"historic_pool_performance": {16, historicPoolPerformanceData},
+
+ // execution charts start with 20+
+
+ "avg_gas_used_chart_data": {22, AvgGasUsedChartData},
+ "execution_burned_fees": {23, BurnedFeesChartData},
+ "block_gas_used": {25, TotalGasUsedChartData},
+ // "non_failed_tx_gas_usage_chart_data": {21, NonFailedTxGasUsageChartData},
+ "block_count_chart_data": {26, BlockCountChartData},
+ "block_time_avg_chart_data": {27, BlockTimeAvgChartData},
+ // "avg_gas_price": {25, AvgGasPrice},
+ "avg_gas_limit_chart_data": {28, AvgGasLimitChartData},
+ "avg_block_util_chart_data": {29, AvgBlockUtilChartData},
+ "tx_count_chart_data": {31, TxCountChartData},
+ // "avg_block_size_chart_data": {32, AvgBlockSizeChartData},
}
// LatestChartsPageData returns the latest chart page data
@@ -68,11 +84,11 @@ func chartsPageDataUpdater(wg *sync.WaitGroup) {
}
start := time.Now()
- if start.Add(time.Minute * -20).After(utils.EpochToTime(latestEpoch)) {
- logger.Info("skipping chartsPageDataUpdater because the explorer is syncing")
- time.Sleep(time.Second * 60)
- continue
- }
+ // if start.Add(time.Minute * -20).After(utils.EpochToTime(latestEpoch)) {
+ // logger.Info("skipping chartsPageDataUpdater because the explorer is syncing")
+ // time.Sleep(time.Second * 60)
+ // continue
+ // }
data, err := getChartsPageData()
if err != nil {
@@ -107,6 +123,12 @@ func getChartsPageData() ([]*types.ChartsPageDataChart, error) {
Error error
}
+ // add charts if it is mainnet
+ if utils.Config.Chain.Config.DepositChainID == 1 {
+ ChartHandlers["total_supply"] = chartHandler{20, TotalEmissionChartData}
+ ChartHandlers["market_cap_chart_data"] = chartHandler{21, MarketCapChartData}
+ }
+
wg := sync.WaitGroup{}
wg.Add(len(ChartHandlers))
@@ -1671,3 +1693,692 @@ func graffitiCloudChartData() (*types.GenericChartData, error) {
return chartData, nil
}
+
+func BurnedFeesChartData() (*types.GenericChartData, error) {
+ if LatestEpoch() == 0 {
+ return nil, fmt.Errorf("chart-data not available pre-genesis")
+ }
+
+ rows := []struct {
+ Day time.Time `db:"time"`
+ BurnedFees float64 `db:"value"`
+ }{}
+
+ epoch := LatestEpoch()
+ if epoch > 0 {
+ epoch--
+ }
+ ts := utils.EpochToTime(epoch)
+
+ err := db.ReaderDb.Select(&rows, "SELECT time, Round(value / 1e18, 2) as value FROM chart_series WHERE time < $1 and indicator = 'BURNED_FEES' ORDER BY time", ts)
+ if err != nil {
+ return nil, err
+ }
+
+ seriesData := [][]float64{}
+
+ for _, row := range rows {
+ seriesData = append(seriesData, []float64{
+ float64(row.Day.UnixMilli()),
+ row.BurnedFees,
+ })
+ }
+
+ chartData := &types.GenericChartData{
+ Title: "Burned Fees",
+ Subtitle: "Evolution of the total number of Ether burned with EIP 1559",
+ XAxisTitle: "",
+ YAxisTitle: "Burned Fees [ETH]",
+ StackingMode: "false",
+ Type: "area",
+ Series: []*types.GenericChartDataSeries{
+ {
+ Name: "Burned Fees",
+ Data: seriesData,
+ },
+ },
+ }
+
+ return chartData, nil
+}
+
+func NonFailedTxGasUsageChartData() (*types.GenericChartData, error) {
+ if LatestEpoch() == 0 {
+ return nil, fmt.Errorf("chart-data not available pre-genesis")
+ }
+
+ rows := []struct {
+ Day time.Time `db:"time"`
+ BurnedFees float64 `db:"value"`
+ }{}
+
+ epoch := LatestEpoch()
+ if epoch > 0 {
+ epoch--
+ }
+ ts := utils.EpochToTime(epoch)
+
+ err := db.ReaderDb.Select(&rows, "SELECT time, ROUND(value, 0) as value FROM chart_series WHERE time < $1 and indicator = 'NON_FAILED_TX_GAS_USAGE' ORDER BY time", ts)
+ if err != nil {
+ return nil, err
+ }
+
+ seriesData := [][]float64{}
+
+ for _, row := range rows {
+ seriesData = append(seriesData, []float64{
+ float64(row.Day.UnixMilli()),
+ row.BurnedFees,
+ })
+ }
+
+ chartData := &types.GenericChartData{
+ // IsNormalChart: true,
+ Title: "Gas Usage - Successful Tx",
+ Subtitle: "Gas usage of successful transactions that are not reverted.",
+ XAxisTitle: "",
+ YAxisTitle: "Gas Usage [Gas]",
+ StackingMode: "false",
+ Type: "area",
+ Series: []*types.GenericChartDataSeries{
+ {
+ Name: "Gas Usage",
+ Data: seriesData,
+ },
+ },
+ }
+
+ return chartData, nil
+}
+
+func BlockCountChartData() (*types.GenericChartData, error) {
+ if LatestEpoch() == 0 {
+ return nil, fmt.Errorf("chart-data not available pre-genesis")
+ }
+
+ rows := []struct {
+ Day time.Time `db:"time"`
+ Value float64 `db:"value"`
+ }{}
+
+ epoch := LatestEpoch()
+ if epoch > 0 {
+ epoch--
+ }
+ ts := utils.EpochToTime(epoch)
+
+ err := db.ReaderDb.Select(&rows, "SELECT time, ROUND(value, 0) as value FROM chart_series WHERE time < $1 and indicator = 'BLOCK_COUNT' ORDER BY time", ts)
+ if err != nil {
+ return nil, err
+ }
+
+ seriesData := [][]float64{}
+
+ for _, row := range rows {
+ seriesData = append(seriesData, []float64{
+ float64(row.Day.UnixMilli()),
+ row.Value,
+ })
+ }
+
+ chartData := &types.GenericChartData{
+ Title: "Daily Block Count",
+ Subtitle: "Number of blocks produced (daily)",
+ XAxisTitle: "",
+ YAxisTitle: "Block Count [#]",
+ StackingMode: "false",
+ Type: "area",
+ TooltipFormatter: `
+ function (tooltip) {
+ this.point.y = Math.round(this.point.y)
+ var orig = tooltip.defaultFormatter.call(this, tooltip)
+ var epoch = timeToEpoch(this.x)
+ if (epoch > 0) {
+ orig[0] = orig[0] + 'Epoch ' + epoch + ''
+ }
+ return orig
+ }
+ `,
+ Series: []*types.GenericChartDataSeries{
+ {
+ Name: "Block Count",
+ Data: seriesData,
+ },
+ },
+ }
+
+ return chartData, nil
+}
+
+func BlockTimeAvgChartData() (*types.GenericChartData, error) {
+ if LatestEpoch() == 0 {
+ return nil, fmt.Errorf("chart-data not available pre-genesis")
+ }
+
+ rows := []struct {
+ Day time.Time `db:"time"`
+ Value float64 `db:"value"`
+ }{}
+
+ epoch := LatestEpoch()
+ if epoch > 0 {
+ epoch--
+ }
+ ts := utils.EpochToTime(epoch)
+
+ err := db.ReaderDb.Select(&rows, "SELECT time, ROUND(value, 2) as value FROM chart_series WHERE time < $1 and indicator = 'BLOCK_TIME_AVG' ORDER BY time", ts)
+ if err != nil {
+ return nil, err
+ }
+
+ seriesData := [][]float64{}
+
+ for _, row := range rows {
+ seriesData = append(seriesData, []float64{
+ float64(row.Day.UnixMilli()),
+ row.Value,
+ })
+ }
+
+ chartData := &types.GenericChartData{
+ Title: "Block Time (Avg)",
+ Subtitle: "Average time between blocks over the last 24 hours",
+ XAxisTitle: "",
+ YAxisTitle: "Block Time [seconds]",
+ StackingMode: "false",
+ Type: "area",
+ TooltipFormatter: `
+ function (tooltip) {
+ this.point.y = Math.round(this.point.y)
+ var orig = tooltip.defaultFormatter.call(this, tooltip)
+ var epoch = timeToEpoch(this.x)
+ if (epoch > 0) {
+ orig[0] = orig[0] + 'Epoch ' + epoch + ''
+ }
+ return orig
+ }
+ `,
+ Series: []*types.GenericChartDataSeries{
+ {
+ Name: "Block Time (s)",
+ Data: seriesData,
+ },
+ },
+ }
+
+ return chartData, nil
+}
+
+func TotalEmissionChartData() (*types.GenericChartData, error) {
+ if LatestEpoch() == 0 {
+ return nil, fmt.Errorf("chart-data not available pre-genesis")
+ }
+
+ rows := []struct {
+ Day time.Time `db:"time"`
+ Value float64 `db:"value"`
+ }{}
+
+ epoch := LatestEpoch()
+ if epoch > 0 {
+ epoch--
+ }
+ ts := utils.EpochToTime(epoch)
+
+ err := db.ReaderDb.Select(&rows, "SELECT time, ROUND(value / 1e18, 5) as value FROM chart_series WHERE time < $1 and indicator = 'TOTAL_EMISSION' ORDER BY time", ts)
+ if err != nil {
+ return nil, err
+ }
+
+ seriesData := [][]float64{}
+
+ for _, row := range rows {
+ seriesData = append(seriesData, []float64{
+ float64(row.Day.UnixMilli()),
+ 72009990.50 + row.Value,
+ })
+ }
+
+ chartData := &types.GenericChartData{
+ // IsNormalChart: true,
+ Title: "Total Ether Supply",
+ Subtitle: "Evolution of the total Ether supply",
+ XAxisTitle: "",
+ YAxisTitle: "Total Supply [ETH]",
+ StackingMode: "false",
+ Type: "area",
+ Series: []*types.GenericChartDataSeries{
+ {
+ Name: "Total Supply",
+ Data: seriesData,
+ },
+ },
+ TooltipFormatter: `
+ function (tooltip) {
+ this.point.y = Math.round(this.point.y * 100000) / 100000
+ var orig = tooltip.defaultFormatter.call(this, tooltip)
+ var epoch = timeToEpoch(this.x)
+ if (epoch > 0) {
+ orig[0] = 'Epoch ' + epoch + '
' + orig[0]
+ }
+ return orig
+ }
+ `,
+ }
+
+ return chartData, nil
+}
+
+func AvgGasPrice() (*types.GenericChartData, error) {
+ if LatestEpoch() == 0 {
+ return nil, fmt.Errorf("chart-data not available pre-genesis")
+ }
+
+ rows := []struct {
+ Day time.Time `db:"time"`
+ Value float64 `db:"value"`
+ }{}
+
+ epoch := LatestEpoch()
+ if epoch > 0 {
+ epoch--
+ }
+ ts := utils.EpochToTime(epoch)
+
+ err := db.ReaderDb.Select(&rows, "SELECT time, ROUND(value / 1e9, 2) as value FROM chart_series WHERE time < $1 and indicator = 'AVG_GASPRICE' ORDER BY time", ts)
+ if err != nil {
+ return nil, err
+ }
+
+ seriesData := [][]float64{}
+
+ for _, row := range rows {
+ seriesData = append(seriesData, []float64{
+ float64(row.Day.UnixMilli()),
+ row.Value,
+ })
+ }
+
+ chartData := &types.GenericChartData{
+ Title: "Average Gas Price",
+ Subtitle: "The average gas price for non-EIP1559 transaction.",
+ XAxisTitle: "",
+ YAxisTitle: "Gas Price [GWei]",
+ StackingMode: "false",
+ Type: "area",
+ Series: []*types.GenericChartDataSeries{
+ {
+ Name: "Gas Price (avg)",
+ Data: seriesData,
+ },
+ },
+ TooltipFormatter: `
+ function (tooltip) {
+ this.point.y = Math.round(this.point.y * 100000) / 100000
+ var orig = tooltip.defaultFormatter.call(this, tooltip)
+ var epoch = timeToEpoch(this.x)
+ if (epoch > 0) {
+ orig[0] = 'Epoch ' + epoch + '
' + orig[0]
+ }
+ return orig
+ }
+ `,
+ }
+
+ return chartData, nil
+}
+
+func AvgGasUsedChartData() (*types.GenericChartData, error) {
+ if LatestEpoch() == 0 {
+ return nil, fmt.Errorf("chart-data not available pre-genesis")
+ }
+
+ rows := []struct {
+ Day time.Time `db:"time"`
+ BurnedFees float64 `db:"value"`
+ }{}
+
+ epoch := LatestEpoch()
+ if epoch > 0 {
+ epoch--
+ }
+ ts := utils.EpochToTime(epoch)
+
+ err := db.ReaderDb.Select(&rows, "SELECT time, value FROM chart_series WHERE time < $1 and indicator = 'AVG_GASUSED' ORDER BY time", ts)
+ if err != nil {
+ return nil, err
+ }
+
+ seriesData := [][]float64{}
+
+ for _, row := range rows {
+ seriesData = append(seriesData, []float64{
+ float64(row.Day.UnixMilli()),
+ row.BurnedFees,
+ })
+ }
+
+ chartData := &types.GenericChartData{
+ Title: "Block Gas Usage",
+ Subtitle: "The average amount of gas used by blocks per day.",
+ XAxisTitle: "",
+ YAxisTitle: "Block Gas Usage [gas]",
+ StackingMode: "false",
+ Type: "area",
+ Series: []*types.GenericChartDataSeries{
+ {
+ Name: "Average Gas Used",
+ Data: seriesData,
+ },
+ },
+ }
+
+ return chartData, nil
+}
+
+func TotalGasUsedChartData() (*types.GenericChartData, error) {
+ if LatestEpoch() == 0 {
+ return nil, fmt.Errorf("chart-data not available pre-genesis")
+ }
+
+ rows := []struct {
+ Day time.Time `db:"time"`
+ Value float64 `db:"value"`
+ }{}
+
+ epoch := LatestEpoch()
+ if epoch > 0 {
+ epoch--
+ }
+ ts := utils.EpochToTime(epoch)
+
+ err := db.ReaderDb.Select(&rows, "SELECT time, value FROM chart_series WHERE time < $1 and indicator = 'TOTAL_GASUSED' ORDER BY time", ts)
+ if err != nil {
+ return nil, err
+ }
+
+ seriesData := [][]float64{}
+
+ for _, row := range rows {
+ seriesData = append(seriesData, []float64{
+ float64(row.Day.UnixMilli()),
+ row.Value,
+ })
+ }
+
+ chartData := &types.GenericChartData{
+ Title: "Total Gas Usage",
+ Subtitle: "The total amout of daily gas used.",
+ XAxisTitle: "",
+ YAxisTitle: "Total Gas Usage [gas]",
+ StackingMode: "false",
+ Type: "area",
+ Series: []*types.GenericChartDataSeries{
+ {
+ Name: "Total Gas Usage",
+ Data: seriesData,
+ },
+ },
+ TooltipFormatter: `
+ function (tooltip) {
+ this.point.y = Math.round(this.point.y)
+ var orig = tooltip.defaultFormatter.call(this, tooltip)
+ var epoch = timeToEpoch(this.x)
+ if (epoch > 0) {
+ orig[0] = 'Epoch ' + epoch + '
' + orig[0]
+ }
+ return orig
+ }
+ `,
+ }
+
+ return chartData, nil
+}
+
+func AvgGasLimitChartData() (*types.GenericChartData, error) {
+ if LatestEpoch() == 0 {
+ return nil, fmt.Errorf("chart-data not available pre-genesis")
+ }
+
+ rows := []struct {
+ Day time.Time `db:"time"`
+ Value float64 `db:"value"`
+ }{}
+
+ epoch := LatestEpoch()
+ if epoch > 0 {
+ epoch--
+ }
+ ts := utils.EpochToTime(epoch)
+
+ err := db.ReaderDb.Select(&rows, "SELECT time, value FROM chart_series WHERE time < $1 and indicator = 'AVG_GASLIMIT' ORDER BY time", ts)
+ if err != nil {
+ return nil, err
+ }
+
+ seriesData := [][]float64{}
+
+ for _, row := range rows {
+ seriesData = append(seriesData, []float64{
+ float64(row.Day.UnixMilli()),
+ row.Value,
+ })
+ }
+
+ chartData := &types.GenericChartData{
+ Title: "Block Gas Limit",
+ Subtitle: "Evolution of the average block gas limit",
+ XAxisTitle: "",
+ YAxisTitle: "Gas Limit [gas]",
+ StackingMode: "false",
+ Type: "area",
+ Series: []*types.GenericChartDataSeries{
+ {
+ Name: "Gas Limit",
+ Data: seriesData,
+ },
+ },
+ TooltipFormatter: `
+ function (tooltip) {
+ this.point.y = Math.round(this.point.y)
+ var orig = tooltip.defaultFormatter.call(this, tooltip)
+ var epoch = timeToEpoch(this.x)
+ if (epoch > 0) {
+ orig[0] = 'Epoch ' + epoch + '
' + orig[0]
+ }
+ return orig
+ }
+ `,
+ }
+
+ return chartData, nil
+}
+
+func AvgBlockUtilChartData() (*types.GenericChartData, error) {
+ if LatestEpoch() == 0 {
+ return nil, fmt.Errorf("chart-data not available pre-genesis")
+ }
+
+ rows := []struct {
+ Day time.Time `db:"time"`
+ BurnedFees float64 `db:"value"`
+ }{}
+
+ epoch := LatestEpoch()
+ if epoch > 0 {
+ epoch--
+ }
+ ts := utils.EpochToTime(epoch)
+
+ err := db.ReaderDb.Select(&rows, "SELECT time, value FROM chart_series WHERE time < $1 and indicator = 'AVG_BLOCK_UTIL' ORDER BY time", ts)
+ if err != nil {
+ return nil, err
+ }
+
+ seriesData := [][]float64{}
+
+ for _, row := range rows {
+ seriesData = append(seriesData, []float64{
+ float64(row.Day.UnixMilli()),
+ row.BurnedFees,
+ })
+ }
+
+ chartData := &types.GenericChartData{
+ Title: "Average Block Usage",
+ Subtitle: "Evolution of the average utilization of Ethereum blocks",
+ XAxisTitle: "",
+ YAxisTitle: "Block Usage [%]",
+ StackingMode: "false",
+ Type: "spline",
+ Series: []*types.GenericChartDataSeries{
+ {
+ Name: "Block Usage",
+ Data: seriesData,
+ },
+ },
+ TooltipFormatter: `
+ function (tooltip) {
+ this.point.y = Math.round(this.point.y * 100) / 100
+ var orig = tooltip.defaultFormatter.call(this, tooltip)
+ var epoch = timeToEpoch(this.x)
+ if (epoch > 0) {
+ orig[0] = 'Epoch ' + epoch + '
' + orig[0]
+ }
+ return orig
+ }
+ `,
+ }
+
+ return chartData, nil
+}
+
+func MarketCapChartData() (*types.GenericChartData, error) {
+ if LatestEpoch() == 0 {
+ return nil, fmt.Errorf("chart-data not available pre-genesis")
+ }
+
+ rows := []struct {
+ Day time.Time `db:"time"`
+ Value float64 `db:"value"`
+ }{}
+
+ epoch := LatestEpoch()
+ if epoch > 0 {
+ epoch--
+ }
+ ts := utils.EpochToTime(epoch)
+
+ err := db.ReaderDb.Select(&rows, "SELECT time, value FROM chart_series WHERE time < $1 and indicator = 'MARKET_CAP' ORDER BY time", ts)
+ if err != nil {
+ return nil, err
+ }
+
+ seriesData := [][]float64{}
+
+ for _, row := range rows {
+ seriesData = append(seriesData, []float64{
+ float64(row.Day.UnixMilli()),
+ row.Value,
+ })
+ }
+
+ chartData := &types.GenericChartData{
+ Title: "Market Cap",
+ Subtitle: "The Evolution of the Ethereum Makret Cap.",
+ XAxisTitle: "",
+ YAxisTitle: "Market Cap [$]",
+ StackingMode: "false",
+ Type: "area",
+ Series: []*types.GenericChartDataSeries{
+ {
+ Name: "Market Cap",
+ Data: seriesData,
+ },
+ },
+ TooltipFormatter: `
+ function (tooltip) {
+ this.point.y = Math.round(this.point.y)
+ var orig = tooltip.defaultFormatter.call(this, tooltip)
+ var epoch = timeToEpoch(this.x)
+ if (epoch > 0) {
+ orig[0] = orig[0] + 'Epoch ' + epoch + ''
+ }
+ return orig
+ }
+ `,
+ }
+
+ return chartData, nil
+}
+
+func TxCountChartData() (*types.GenericChartData, error) {
+ if LatestEpoch() == 0 {
+ return nil, fmt.Errorf("chart-data not available pre-genesis")
+ }
+
+ rows := []struct {
+ Day time.Time `db:"time"`
+ BurnedFees float64 `db:"value"`
+ }{}
+
+ epoch := LatestEpoch()
+ if epoch > 0 {
+ epoch--
+ }
+ ts := utils.EpochToTime(epoch)
+
+ err := db.ReaderDb.Select(&rows, "SELECT time, value FROM chart_series WHERE time < $1 and indicator = 'TX_COUNT' ORDER BY time", ts)
+ if err != nil {
+ return nil, err
+ }
+
+ seriesData := [][]float64{}
+
+ for _, row := range rows {
+ seriesData = append(seriesData, []float64{
+ float64(row.Day.UnixMilli()),
+ row.BurnedFees,
+ })
+ }
+
+ chartData := &types.GenericChartData{
+ Title: "Transactions",
+ Subtitle: "The total number of transactions per day",
+ XAxisTitle: "",
+ YAxisTitle: "Tx Count [#]",
+ StackingMode: "false",
+ Type: "area",
+ Series: []*types.GenericChartDataSeries{
+ {
+ Name: "Transactions",
+ Data: seriesData,
+ },
+ },
+ TooltipFormatter: `
+ function (tooltip) {
+ this.point.y = Math.round(this.point.y)
+ var orig = tooltip.defaultFormatter.call(this, tooltip)
+ var epoch = timeToEpoch(this.x)
+ if (epoch > 0) {
+ orig[0] = 'Epoch ' + epoch + '
' + orig[0]
+ }
+ return orig
+ }
+ `,
+ }
+
+ return chartData, nil
+}
+
+func AvgBlockSizeChartData() (*types.GenericChartData, error) {
+ return nil, fmt.Errorf("unimplemented")
+}
+
+func PowerConsumptionChartData() (*types.GenericChartData, error) {
+ return nil, fmt.Errorf("unimplemented")
+}
+
+func NewAccountsChartData() (*types.GenericChartData, error) {
+ return nil, fmt.Errorf("unimplemented")
+}
diff --git a/services/services.go b/services/services.go
index fdb24748dc..fccb010ea0 100644
--- a/services/services.go
+++ b/services/services.go
@@ -1359,6 +1359,7 @@ func getBurnPageData() (*types.BurnPageData, error) {
rewards = rewards.Sub(decimal.NewFromBigInt(new(big.Int).SetUint64(total.AttestationSourcePenalty), 0))
rewards = rewards.Sub(decimal.NewFromBigInt(new(big.Int).SetUint64(total.SlashingPenalty), 0))
+ // rewards per min
rewards = rewards.Div(decimal.NewFromInt(64))
// emission per minute
diff --git a/templates/charts.html b/templates/charts.html
index 783226a168..d95eb1f750 100644
--- a/templates/charts.html
+++ b/templates/charts.html
@@ -13,7 +13,7 @@
{{range $i, $e := .}}
chartFns.push(function () {
{{if .Data.IsNormalChart}}
- Highcharts.chart('chart-{{$i}}', {
+ Highcharts.chart('chart-{{.Order}}', {
chart: {
type: {{.Data.Type}},
},
@@ -24,7 +24,7 @@
},
subtitle: {
useHTML: true,
- text: {{.Data.Subtitle}}{{ if .Data.ShowGapHider }} +' '{{end}}
+ text: {{.Data.Subtitle}}{{ if .Data.ShowGapHider }} +' '{{end}}
},
plotOptions: {
column: {stacking: {{.Data.StackingMode}} },
@@ -74,7 +74,7 @@
})
s.data = d
})
- Highcharts.chart('chart-{{$i}}-nogaps', {
+ Highcharts.chart('chart-{{.Order}}-nogaps', {
chart: {type: {{.Data.Type}} },
rangeSelector: {enabled: false},
title: {
@@ -83,7 +83,7 @@
},
subtitle: {
useHTML: true,
- text: {{.Data.Subtitle}} +' '
+ text: {{.Data.Subtitle}} +' '
},
plotOptions: {
column: {
@@ -126,7 +126,7 @@
}
{{end}}
{{else}}
- Highcharts.stockChart('chart-{{$i}}', {
+ Highcharts.stockChart('chart-{{.Order}}', {
chart: {type: {{.Data.Type}} },
rangeSelector: {enabled: false},
title: {
@@ -295,24 +295,17 @@