From fe9000cff989aec801b572170bd8728762bffee8 Mon Sep 17 00:00:00 2001 From: Victor Baranov Date: Thu, 9 May 2024 17:52:12 +0300 Subject: [PATCH 001/150] feat: Diamond proxy (EIP-2535) support (#10034) * feat: Diamond proxy (eip-2535) * Additional logic change * Refactoring & backward compatibility * Refactor specs * Remove prepare_value function * implementation_address_hash_from_db, implementation_name_from_db to plural form * Remove check implementation_address_hash_strings is list * address_hash_to_smart_contract_with_bytecode_twin function: return options param into the call of single_implementation_smart_contract_from_proxy * Remove fallback "|| [burn_address_hash_string()]" * Update spec of set_proxy_verification_result * Fix web tests * Change the order of enum values to match db enum * Remove duplicated clause in save_implementation_data/4 * Remove duplicated line * Add clause for [] to set_proxy_verification_result --- .../api/v2/smart_contract_controller.ex | 33 +++- .../controllers/smart_contract_controller.ex | 1 + .../templates/address/_link.html.eex | 10 +- .../address/_responsive_hash.html.eex | 10 +- .../templates/transaction_log/_logs.html.eex | 11 +- .../views/api/v2/address_view.ex | 46 +++++- .../block_scout_web/views/api/v2/helper.ex | 15 +- .../account/api/v2/user_controller_test.exs | 4 + .../api/v2/address_controller_test.exs | 6 +- .../views/api/v2/address_view_test.exs | 2 +- apps/explorer/lib/explorer/chain.ex | 44 ++++-- .../lib/explorer/chain/smart_contract.ex | 27 +++- .../explorer/chain/smart_contract/proxy.ex | 85 ++++++---- .../chain/smart_contract/proxy/basic.ex | 9 +- .../chain/smart_contract/proxy/eip_2535.ex | 36 +++++ .../proxy/models/implementation.ex | 149 ++++++++++-------- .../proxy/verification_status.ex | 7 +- .../lib/explorer/etherscan/contracts.ex | 14 +- .../20240425185705_alter_proxy_type.exs | 7 + .../proxy/models/implementation_test.exs | 29 ++-- 20 files changed, 380 insertions(+), 165 deletions(-) create mode 100644 apps/explorer/lib/explorer/chain/smart_contract/proxy/eip_2535.ex create mode 100644 apps/explorer/priv/repo/migrations/20240425185705_alter_proxy_type.exs diff --git a/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/smart_contract_controller.ex b/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/smart_contract_controller.ex index 3695d317be87..a1c7d00a5f9c 100644 --- a/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/smart_contract_controller.ex +++ b/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/smart_contract_controller.ex @@ -6,7 +6,6 @@ defmodule BlockScoutWeb.API.V2.SmartContractController do import BlockScoutWeb.PagingHelper, only: [current_filter: 1, delete_parameters_from_next_page_params: 1, search_query: 1, smart_contracts_sorting: 1] - import Explorer.Chain.SmartContract, only: [burn_address_hash_string: 0] import Explorer.SmartContract.Solidity.Verifier, only: [parse_boolean: 1] alias BlockScoutWeb.{AccessHelper, AddressView} @@ -101,16 +100,25 @@ defmodule BlockScoutWeb.API.V2.SmartContractController do {:not_found, {:ok, address}} <- {:not_found, Chain.find_contract_address(address_hash, @smart_contract_address_options)}, {:not_found, false} <- {:not_found, is_nil(address.smart_contract)} do - implementation_address_hash_string = + implementation_address_hash_strings = address.smart_contract |> Implementation.get_implementation(@api_true) |> Tuple.to_list() - |> List.first() || burn_address_hash_string() + |> List.first() + + functions = + implementation_address_hash_strings + |> Enum.reduce([], fn implementation_address_hash_string, acc -> + functions_from_implementation = + Reader.read_only_functions_proxy(address_hash, implementation_address_hash_string, nil, @api_true) + + acc ++ functions_from_implementation + end) conn |> put_status(200) |> render(:read_functions, %{ - functions: Reader.read_only_functions_proxy(address_hash, implementation_address_hash_string, nil, @api_true) + functions: functions }) end end @@ -123,17 +131,26 @@ defmodule BlockScoutWeb.API.V2.SmartContractController do {:not_found, {:ok, address}} <- {:not_found, Chain.find_contract_address(address_hash, @smart_contract_address_options)}, {:not_found, false} <- {:not_found, is_nil(address.smart_contract)} do - implementation_address_hash_string = + implementation_address_hash_strings = address.smart_contract |> Implementation.get_implementation(@api_true) |> Tuple.to_list() - |> List.first() || burn_address_hash_string() + |> List.first() + + functions = + implementation_address_hash_strings + |> Enum.reduce([], fn implementation_address_hash_string, acc -> + functions_from_implementation = + implementation_address_hash_string + |> Writer.write_functions_proxy(@api_true) + + acc ++ functions_from_implementation + end) conn |> put_status(200) |> json( - implementation_address_hash_string - |> Writer.write_functions_proxy(@api_true) + functions |> Reader.get_abi_with_method_id() ) end diff --git a/apps/block_scout_web/lib/block_scout_web/controllers/smart_contract_controller.ex b/apps/block_scout_web/lib/block_scout_web/controllers/smart_contract_controller.ex index aee000a0093c..87eaf6bd7824 100644 --- a/apps/block_scout_web/lib/block_scout_web/controllers/smart_contract_controller.ex +++ b/apps/block_scout_web/lib/block_scout_web/controllers/smart_contract_controller.ex @@ -30,6 +30,7 @@ defmodule BlockScoutWeb.SmartContractController do address.smart_contract |> Implementation.get_implementation() |> Tuple.to_list() + |> List.first() |> List.first() || burn_address_hash_string() else burn_address_hash_string() diff --git a/apps/block_scout_web/lib/block_scout_web/templates/address/_link.html.eex b/apps/block_scout_web/lib/block_scout_web/templates/address/_link.html.eex index e9da293c2ea4..43c95a808547 100644 --- a/apps/block_scout_web/lib/block_scout_web/templates/address/_link.html.eex +++ b/apps/block_scout_web/lib/block_scout_web/templates/address/_link.html.eex @@ -1,6 +1,14 @@ +<% implementation_names = Implementation.names(@address) %> +<% implementation_name = + if Enum.empty?(implementation_names) do + nil + else + implementation_names |> Enum.at(0) + end +%> <%= if @address do %> <%= if assigns[:show_full_hash] do %> - <%= if name = if assigns[:ignore_implementation_name], do: primary_name(@address), else: Implementation.name(@address) || primary_name(@address) do %> + <%= if name = if assigns[:ignore_implementation_name], do: primary_name(@address), else: implementation_name || primary_name(@address) do %> <%= name %> | <% end %> <%= link to: address_path(BlockScoutWeb.Endpoint, :show, @address), "data-test": "address_hash_link", class: assigns[:class] do %> diff --git a/apps/block_scout_web/lib/block_scout_web/templates/address/_responsive_hash.html.eex b/apps/block_scout_web/lib/block_scout_web/templates/address/_responsive_hash.html.eex index c80f0d672247..b602d96e7d37 100644 --- a/apps/block_scout_web/lib/block_scout_web/templates/address/_responsive_hash.html.eex +++ b/apps/block_scout_web/lib/block_scout_web/templates/address/_responsive_hash.html.eex @@ -1,5 +1,13 @@ +<% implementation_names = Implementation.names(@address) %> +<% implementation_name = + if Enum.empty?(implementation_names) do + nil + else + implementation_names |> Enum.at(0) + end +%> - <%= if name = if assigns[:ignore_implementation_name], do: primary_name(@address), else: Implementation.name(@address) || primary_name(@address) do %> + <%= if name = if assigns[:ignore_implementation_name], do: primary_name(@address), else: implementation_name || primary_name(@address) do %> <%= if assigns[:no_tooltip] do %> <%= if @use_custom_tooltip == true do %> <%= name %> (<%= short_hash(@address) %>...) diff --git a/apps/block_scout_web/lib/block_scout_web/templates/transaction_log/_logs.html.eex b/apps/block_scout_web/lib/block_scout_web/templates/transaction_log/_logs.html.eex index fd9c951ea789..fe36dc6964e7 100644 --- a/apps/block_scout_web/lib/block_scout_web/templates/transaction_log/_logs.html.eex +++ b/apps/block_scout_web/lib/block_scout_web/templates/transaction_log/_logs.html.eex @@ -16,11 +16,20 @@ <%= nil %> <% end %> + <% implementation_names = Implementation.names(@log.address) %> + <% implementation_name = + if Enum.empty?(implementation_names) do + nil + else + implementation_names |> Enum.at(0) + end + %> +
<%= gettext "Address" %>

- <% name = Implementation.name(@log.address) || primary_name(@log.address)%> + <% name = implementation_name || primary_name(@log.address)%> <%= link( (if name, do: name <> " | "<> to_string(@log.address), else: @log.address), to: address_path(@conn, :show, @log.address), diff --git a/apps/block_scout_web/lib/block_scout_web/views/api/v2/address_view.ex b/apps/block_scout_web/lib/block_scout_web/views/api/v2/address_view.ex index b8d4a50e3144..6482b4733a63 100644 --- a/apps/block_scout_web/lib/block_scout_web/views/api/v2/address_view.ex +++ b/apps/block_scout_web/lib/block_scout_web/views/api/v2/address_view.ex @@ -95,17 +95,24 @@ defmodule BlockScoutWeb.API.V2.AddressView do is_proxy = AddressView.smart_contract_is_proxy?(address_with_smart_contract, @api_true) - {implementation_address, implementation_name} = + {implementation_addresses, implementation_names} = with true <- is_proxy, - {address, name} <- + {addresses, names} <- Implementation.get_implementation(address_with_smart_contract.smart_contract, @api_true), - false <- is_nil(address), - {:ok, address_hash} <- Chain.string_to_address_hash(address), - checksummed_address <- Address.checksum(address_hash) do - {checksummed_address, name} + false <- addresses && Enum.empty?(addresses) do + addresses + |> Enum.zip(names) + |> Enum.reduce({[], []}, fn {address, name}, {addresses, names} = acc -> + with {:ok, address_hash} <- Chain.string_to_address_hash(address), + checksummed_address <- Address.checksum(address_hash) do + {[checksummed_address | addresses], [name | names]} + else + _ -> acc + end + end) else _ -> - {nil, nil} + {[], []} end balance = address.fetched_coin_balance && address.fetched_coin_balance.value @@ -118,14 +125,21 @@ defmodule BlockScoutWeb.API.V2.AddressView do write_custom_abi? = AddressView.has_address_custom_abi_with_write_functions?(conn, address.hash) read_custom_abi? = AddressView.has_address_custom_abi_with_read_functions?(conn, address.hash) + # todo: added for backward compatibility, remove when frontend unbound from these props + {implementation_address, implementation_name} = + single_implementation(implementation_addresses, implementation_names) + Map.merge(base_info, %{ "creator_address_hash" => creator_hash && Address.checksum(creator_hash), "creation_tx_hash" => creation_tx, "token" => token, "coin_balance" => balance, "exchange_rate" => exchange_rate, + # todo: added for backward compatibility, remove when frontend unbound from these props "implementation_name" => implementation_name, + "implementation_names" => implementation_names, "implementation_address" => implementation_address, + "implementation_addresses" => implementation_addresses, "block_number_balance_updated_at" => address.fetched_coin_balance_block_number, "has_custom_methods_read" => read_custom_abi?, "has_custom_methods_write" => write_custom_abi?, @@ -144,6 +158,24 @@ defmodule BlockScoutWeb.API.V2.AddressView do }) end + defp single_implementation(implementation_addresses, implementation_names) do + implementation_name = + if implementation_names && !Enum.empty?(implementation_names) do + implementation_names |> Enum.at(0) + else + nil + end + + implementation_address = + if implementation_addresses && !Enum.empty?(implementation_addresses) do + implementation_addresses |> Enum.at(0) + else + nil + end + + {implementation_address, implementation_name} + end + @spec prepare_token_balance(Chain.Address.TokenBalance.t(), boolean()) :: map() defp prepare_token_balance(token_balance, fetch_token_instance? \\ false) do %{ diff --git a/apps/block_scout_web/lib/block_scout_web/views/api/v2/helper.ex b/apps/block_scout_web/lib/block_scout_web/views/api/v2/helper.ex index 06adffd7a4d4..7cbf82adb83f 100644 --- a/apps/block_scout_web/lib/block_scout_web/views/api/v2/helper.ex +++ b/apps/block_scout_web/lib/block_scout_web/views/api/v2/helper.ex @@ -54,11 +54,22 @@ defmodule BlockScoutWeb.API.V2.Helper do """ @spec address_with_info(any(), any()) :: nil | %{optional(<<_::32, _::_*8>>) => any()} def address_with_info(%Address{} = address, _address_hash) do + implementation_names = Implementation.names(address) + + implementation_name = + if Enum.empty?(implementation_names) do + nil + else + implementation_names |> Enum.at(0) + end + %{ "hash" => Address.checksum(address), "is_contract" => Address.smart_contract?(address), "name" => address_name(address), - "implementation_name" => Implementation.name(address), + # todo: added for backward compatibility, remove when frontend unbound from these props + "implementation_name" => implementation_name, + "implementation_names" => implementation_names, "is_verified" => verified?(address), "ens_domain_name" => address.ens_domain_name, "metadata" => address.metadata @@ -85,7 +96,9 @@ defmodule BlockScoutWeb.API.V2.Helper do "hash" => Address.checksum(address_hash), "is_contract" => false, "name" => nil, + # todo: added for backward compatibility, remove when frontend unbound from these props "implementation_name" => nil, + "implementation_names" => [], "is_verified" => nil, "ens_domain_name" => nil, "metadata" => nil diff --git a/apps/block_scout_web/test/block_scout_web/controllers/account/api/v2/user_controller_test.exs b/apps/block_scout_web/test/block_scout_web/controllers/account/api/v2/user_controller_test.exs index a30a4156414e..4ff5086ea5ad 100644 --- a/apps/block_scout_web/test/block_scout_web/controllers/account/api/v2/user_controller_test.exs +++ b/apps/block_scout_web/test/block_scout_web/controllers/account/api/v2/user_controller_test.exs @@ -151,7 +151,9 @@ defmodule BlockScoutWeb.Account.Api.V2.UserControllerTest do "name" => name, "address" => %{ "hash" => Address.checksum(addr), + # todo: added for backward compatibility, remove when frontend unbound from these props "implementation_name" => nil, + "implementation_names" => [], "is_contract" => false, "is_verified" => false, "name" => nil, @@ -205,7 +207,9 @@ defmodule BlockScoutWeb.Account.Api.V2.UserControllerTest do "name" => name, "address" => %{ "hash" => Address.checksum(addr), + # todo: added for backward compatibility, remove when frontend unbound from these props "implementation_name" => nil, + "implementation_names" => [], "is_contract" => false, "is_verified" => false, "name" => nil, diff --git a/apps/block_scout_web/test/block_scout_web/controllers/api/v2/address_controller_test.exs b/apps/block_scout_web/test/block_scout_web/controllers/api/v2/address_controller_test.exs index a7ae52fc91fe..cd79b0e494df 100644 --- a/apps/block_scout_web/test/block_scout_web/controllers/api/v2/address_controller_test.exs +++ b/apps/block_scout_web/test/block_scout_web/controllers/api/v2/address_controller_test.exs @@ -72,8 +72,11 @@ defmodule BlockScoutWeb.API.V2.AddressControllerTest do "token" => nil, "coin_balance" => nil, "exchange_rate" => nil, + # todo: added for backward compatibility, remove when frontend unbound from these props "implementation_name" => nil, + "implementation_names" => [], "implementation_address" => nil, + "implementation_addresses" => [], "block_number_balance_updated_at" => nil, "has_custom_methods_read" => false, "has_custom_methods_write" => false, @@ -138,7 +141,8 @@ defmodule BlockScoutWeb.API.V2.AddressControllerTest do "watchlist_names" => [], "creator_address_hash" => ^from, "creation_tx_hash" => ^tx_hash, - "implementation_address" => ^implementation_address_hash_string + "implementation_address" => ^implementation_address_hash_string, + "implementation_addresses" => [^implementation_address_hash_string] } = json_response(request, 200) end diff --git a/apps/block_scout_web/test/block_scout_web/views/api/v2/address_view_test.exs b/apps/block_scout_web/test/block_scout_web/views/api/v2/address_view_test.exs index 5d91aa259b7f..58cf82acec60 100644 --- a/apps/block_scout_web/test/block_scout_web/views/api/v2/address_view_test.exs +++ b/apps/block_scout_web/test/block_scout_web/views/api/v2/address_view_test.exs @@ -19,7 +19,7 @@ defmodule BlockScoutWeb.API.V2.AddressViewTest do proxy_address_hash: proxy_address.hash, proxy_type: "eip1967", address_hashes: [implementation_address.hash], - names: [] + names: [nil] ) assert implementation.proxy_type == :eip1967 diff --git a/apps/explorer/lib/explorer/chain.ex b/apps/explorer/lib/explorer/chain.ex index f1a4fcfa72e9..71a3f77425bb 100644 --- a/apps/explorer/lib/explorer/chain.ex +++ b/apps/explorer/lib/explorer/chain.ex @@ -1176,7 +1176,7 @@ defmodule Explorer.Chain do else LookUpSmartContractSourcesOnDemand.trigger_fetch(address_result, nil) - {implementation_address_hash, _} = + {implementation_address_hashes, _} = Implementation.get_implementation( %{ updated: %SmartContract{ @@ -1189,20 +1189,7 @@ defmodule Explorer.Chain do Keyword.put(options, :unverified_proxy_only?, true) ) - implementation_smart_contract = - implementation_address_hash - |> Proxy.implementation_to_smart_contract(options) - - address_verified_bytecode_twin_contract = - implementation_smart_contract || - SmartContract.get_address_verified_bytecode_twin_contract(hash, options).verified_contract - - address_result - |> SmartContract.add_bytecode_twin_info_to_contract(address_verified_bytecode_twin_contract, hash) - |> (&if(is_nil(implementation_smart_contract), - do: &1, - else: SmartContract.add_implementation_info_to_contract(&1, implementation_address_hash) - )).() + add_implementation_and_bytecode_twin_to_result(address_result, implementation_address_hashes, hash, options) end _ -> @@ -1218,6 +1205,33 @@ defmodule Explorer.Chain do end end + defp add_implementation_and_bytecode_twin_to_result(address_result, implementation_address_hashes, hash, options) do + # implementation is added only in the case when mapping proxy to implementation is 1:1 (excluding Diamond proxy) + {implementation_smart_contract, implementation_address_hash} = + if implementation_address_hashes && Enum.count(implementation_address_hashes) == 1 do + implementation_address_hash = implementation_address_hashes |> Enum.at(0) + + implementation_smart_contract = + implementation_address_hash + |> Proxy.implementation_to_smart_contract(options) + + {implementation_smart_contract, implementation_address_hash} + else + {nil, nil} + end + + address_verified_bytecode_twin_contract = + implementation_smart_contract || + SmartContract.get_address_verified_bytecode_twin_contract(hash, options).verified_contract + + address_result + |> SmartContract.add_bytecode_twin_info_to_contract(address_verified_bytecode_twin_contract, hash) + |> (&if(is_nil(implementation_smart_contract), + do: &1, + else: SmartContract.add_implementation_info_to_contract(&1, implementation_address_hash) + )).() + end + @spec find_decompiled_contract_address(Hash.Address.t()) :: {:ok, Address.t()} | {:error, :not_found} def find_decompiled_contract_address(%Hash{byte_count: unquote(Hash.Address.byte_count())} = hash) do query = diff --git a/apps/explorer/lib/explorer/chain/smart_contract.ex b/apps/explorer/lib/explorer/chain/smart_contract.ex index 8e48e0274a97..e13ab9951fce 100644 --- a/apps/explorer/lib/explorer/chain/smart_contract.ex +++ b/apps/explorer/lib/explorer/chain/smart_contract.ex @@ -604,6 +604,18 @@ defmodule Explorer.Chain.SmartContract do def compose_address_for_unverified_smart_contract(address_result, _hash, _options), do: address_result + def single_implementation_smart_contract_from_proxy(proxy_hash, options) do + {implementation_address_hashes, _} = Implementation.get_implementation(proxy_hash, options) + + if implementation_address_hashes && Enum.count(implementation_address_hashes) == 1 do + implementation_address_hashes + |> Enum.at(0) + |> Proxy.implementation_to_smart_contract(options) + else + nil + end + end + @doc """ Finds metadata for verification of a contract from verified twins: contracts with the same bytecode which were verified previously, returns a single t:SmartContract.t/0 @@ -934,13 +946,14 @@ defmodule Explorer.Chain.SmartContract do with true <- is_nil(current_smart_contract), {:ok, address} <- Chain.hash_to_address(address_hash), true <- Chain.contract?(address) do - {implementation_address, implementation_address_fetched?} = + {implementation_smart_contract, implementation_address_fetched?} = if fetch_implementation? do - {implementation_address_hash, _} = - Implementation.get_implementation( + implementation_smart_contract = + SmartContract.single_implementation_smart_contract_from_proxy( %{ - updated: %__MODULE__{ - address_hash: address_hash + updated: %SmartContract{ + address_hash: address_hash, + abi: nil }, implementation_updated_at: nil, implementation_address_fetched?: false, @@ -949,13 +962,13 @@ defmodule Explorer.Chain.SmartContract do Keyword.put(options, :unverified_proxy_only?, true) ) - {implementation_address_hash |> Proxy.implementation_to_smart_contract(options), true} + {implementation_smart_contract, true} else {nil, false} end address_verified_bytecode_twin_contract = - implementation_address || + implementation_smart_contract || get_address_verified_bytecode_twin_contract(address_hash, options).verified_contract smart_contract = diff --git a/apps/explorer/lib/explorer/chain/smart_contract/proxy.ex b/apps/explorer/lib/explorer/chain/smart_contract/proxy.ex index 7d00526ccf87..cb52efa53417 100644 --- a/apps/explorer/lib/explorer/chain/smart_contract/proxy.ex +++ b/apps/explorer/lib/explorer/chain/smart_contract/proxy.ex @@ -6,7 +6,7 @@ defmodule Explorer.Chain.SmartContract.Proxy do alias EthereumJSONRPC.Contract alias Explorer.Chain.{Hash, SmartContract} alias Explorer.Chain.SmartContract.Proxy - alias Explorer.Chain.SmartContract.Proxy.{Basic, EIP1167, EIP1822, EIP1967, EIP930, MasterCopy} + alias Explorer.Chain.SmartContract.Proxy.{Basic, EIP1167, EIP1822, EIP1967, EIP2535, EIP930, MasterCopy} import Explorer.Chain, only: [ @@ -41,30 +41,26 @@ defmodule Explorer.Chain.SmartContract.Proxy do Fetches into DB proxy contract implementation's address and name from different proxy patterns """ @spec fetch_implementation_address_hash(Hash.Address.t(), list(), options) :: - {String.t() | nil | :empty, String.t() | nil | :empty} + {[String.t()] | :empty | :error, [String.t()] | :empty | :error} def fetch_implementation_address_hash(proxy_address_hash, proxy_abi, options) when not is_nil(proxy_address_hash) do - %{implementation_address_hash_string: implementation_address_hash_string, proxy_type: proxy_type} = + %{implementation_address_hash_strings: implementation_address_hash_strings, proxy_type: proxy_type} = if options[:unverified_proxy_only?] do get_implementation_address_hash_string_for_non_verified_proxy(proxy_address_hash) else get_implementation_address_hash_string(proxy_address_hash, proxy_abi) end - if implementation_address_hash_string !== :error do - save_implementation_data( - implementation_address_hash_string, - proxy_address_hash, - proxy_type, - options - ) - else - {nil, nil} - end + save_implementation_data( + implementation_address_hash_strings, + proxy_address_hash, + proxy_type, + options + ) end def fetch_implementation_address_hash(_, _, _) do - {nil, nil} + {:empty, :empty} end @doc """ @@ -82,12 +78,20 @@ defmodule Explorer.Chain.SmartContract.Proxy do true else _ -> - {implementation_address_hash_string, _implementation_name} = get_implementation(smart_contract, options) - - with false <- is_nil(implementation_address_hash_string), - {:ok, implementation_address_hash} <- string_to_address_hash(implementation_address_hash_string), - false <- implementation_address_hash.bytes == burn_address_hash.bytes do - true + {implementation_address_hash_strings, _implementation_names} = get_implementation(smart_contract, options) + + with false <- is_nil(implementation_address_hash_strings), + false <- Enum.empty?(implementation_address_hash_strings) do + implementation_address_hash_strings + |> Enum.reduce_while(false, fn implementation_address_hash_string, acc -> + with {:ok, implementation_address_hash} <- string_to_address_hash(implementation_address_hash_string), + false <- implementation_address_hash.bytes == burn_address_hash.bytes do + {:halt, true} + else + _ -> + {:cont, acc} + end + end) else _ -> false @@ -124,9 +128,12 @@ defmodule Explorer.Chain.SmartContract.Proxy do options ) when not is_nil(proxy_address_hash) and not is_nil(abi) do - {implementation_address_hash_string, _name} = get_implementation(smart_contract, options) + {implementation_address_hash_strings, _names} = get_implementation(smart_contract, options) - SmartContract.get_smart_contract_abi(implementation_address_hash_string) + implementation_address_hash_strings + |> Enum.reduce([], fn implementation_address_hash_string, acc -> + SmartContract.get_smart_contract_abi(implementation_address_hash_string) ++ acc + end) end def get_implementation_abi_from_proxy(_, _), do: [] @@ -205,7 +212,7 @@ defmodule Explorer.Chain.SmartContract.Proxy do Returns EIP-1167 implementation address or tries next proxy pattern """ @spec get_implementation_address_hash_string_eip1167(Hash.Address.t(), any(), bool()) :: - %{implementation_address_hash_string: String.t() | :error | nil, proxy_type: atom()} + %{implementation_address_hash_strings: [String.t() | :error | nil], proxy_type: atom()} def get_implementation_address_hash_string_eip1167(proxy_address_hash, proxy_abi, go_to_fallback? \\ true) do get_implementation_address_hash_string_by_module( EIP1167, @@ -223,7 +230,7 @@ defmodule Explorer.Chain.SmartContract.Proxy do Returns EIP-1967 implementation address or tries next proxy pattern """ @spec get_implementation_address_hash_string_eip1967(Hash.Address.t(), any(), bool()) :: %{ - implementation_address_hash_string: String.t() | :error | nil, + implementation_address_hash_strings: [String.t() | :error | nil], proxy_type: atom() } def get_implementation_address_hash_string_eip1967(proxy_address_hash, proxy_abi, go_to_fallback?) do @@ -243,7 +250,7 @@ defmodule Explorer.Chain.SmartContract.Proxy do Returns EIP-1822 implementation address or tries next proxy pattern """ @spec get_implementation_address_hash_string_eip1822(Hash.Address.t(), any(), bool()) :: %{ - implementation_address_hash_string: String.t() | :error | nil, + implementation_address_hash_strings: [String.t() | :error | nil], proxy_type: atom() } def get_implementation_address_hash_string_eip1822(proxy_address_hash, proxy_abi, go_to_fallback?) do @@ -260,7 +267,7 @@ defmodule Explorer.Chain.SmartContract.Proxy do if !is_nil(implementation_address_hash_string) && implementation_address_hash_string !== burn_address_hash_string() && implementation_address_hash_string !== :error do - %{implementation_address_hash_string: implementation_address_hash_string, proxy_type: proxy_type} + %{implementation_address_hash_strings: [implementation_address_hash_string], proxy_type: proxy_type} else cond do next_func !== :fallback_proxy_detection -> @@ -278,13 +285,13 @@ defmodule Explorer.Chain.SmartContract.Proxy do end defp implementation_fallback_value(implementation_address_hash_string) do - value = if implementation_address_hash_string == :error, do: :error, else: nil + value = if implementation_address_hash_string == :error, do: :error, else: [] - %{implementation_address_hash_string: value, proxy_type: :unknown} + %{implementation_address_hash_strings: value, proxy_type: :unknown} end @spec fallback_proxy_detection(Hash.Address.t(), any(), :error | nil) :: %{ - implementation_address_hash_string: String.t() | :error | nil, + implementation_address_hash_strings: [String.t() | :error | nil], proxy_type: atom() } def fallback_proxy_detection(proxy_address_hash, proxy_abi, fallback_value \\ nil) do @@ -294,26 +301,36 @@ defmodule Explorer.Chain.SmartContract.Proxy do comptroller_implementation_method_abi = get_naive_implementation_abi(proxy_abi, "comptrollerImplementation") + diamond_implementation_method_abi = get_naive_implementation_abi(proxy_abi, "facetAddresses") + master_copy_method_abi = get_master_copy_pattern(proxy_abi) get_address_method_abi = get_naive_implementation_abi(proxy_abi, "getAddress") cond do + diamond_implementation_method_abi -> + implementation_address_hash_strings = EIP2535.get_implementation_address_hash_strings(proxy_address_hash) + + %{implementation_address_hash_strings: implementation_address_hash_strings, proxy_type: :eip2535} + implementation_method_abi -> implementation_address_hash_string = Basic.get_implementation_address_hash_string(@implementation_signature, proxy_address_hash, proxy_abi) - %{implementation_address_hash_string: implementation_address_hash_string, proxy_type: :basic_implementation} + %{implementation_address_hash_strings: [implementation_address_hash_string], proxy_type: :basic_implementation} get_implementation_method_abi -> implementation_address_hash_string = Basic.get_implementation_address_hash_string(@get_implementation_signature, proxy_address_hash, proxy_abi) - %{implementation_address_hash_string: implementation_address_hash_string, proxy_type: :basic_get_implementation} + %{ + implementation_address_hash_strings: [implementation_address_hash_string], + proxy_type: :basic_get_implementation + } master_copy_method_abi -> implementation_address_hash_string = MasterCopy.get_implementation_address_hash_string(proxy_address_hash) - %{implementation_address_hash_string: implementation_address_hash_string, proxy_type: :master_copy} + %{implementation_address_hash_strings: [implementation_address_hash_string], proxy_type: :master_copy} comptroller_implementation_method_abi -> implementation_address_hash_string = @@ -323,13 +340,13 @@ defmodule Explorer.Chain.SmartContract.Proxy do proxy_abi ) - %{implementation_address_hash_string: implementation_address_hash_string, proxy_type: :comptroller} + %{implementation_address_hash_strings: [implementation_address_hash_string], proxy_type: :comptroller} get_address_method_abi -> implementation_address_hash_string = EIP930.get_implementation_address_hash_string(@get_address_signature, proxy_address_hash, proxy_abi) - %{implementation_address_hash_string: implementation_address_hash_string, proxy_type: :eip_930} + %{implementation_address_hash_strings: [implementation_address_hash_string], proxy_type: :eip_930} true -> fallback_value diff --git a/apps/explorer/lib/explorer/chain/smart_contract/proxy/basic.ex b/apps/explorer/lib/explorer/chain/smart_contract/proxy/basic.ex index dc4f305900ee..d0d6a4fb5429 100644 --- a/apps/explorer/lib/explorer/chain/smart_contract/proxy/basic.ex +++ b/apps/explorer/lib/explorer/chain/smart_contract/proxy/basic.ex @@ -9,7 +9,7 @@ defmodule Explorer.Chain.SmartContract.Proxy.Basic do @doc """ Gets implementation hash string of proxy contract from getter. """ - @spec get_implementation_address_hash_string(binary, binary, SmartContract.abi()) :: nil | binary + @spec get_implementation_address_hash_string(binary, binary, SmartContract.abi()) :: nil | binary() | [binary()] def get_implementation_address_hash_string(signature, proxy_address_hash, abi) do implementation_address = case Reader.query_contract( @@ -30,9 +30,14 @@ defmodule Explorer.Chain.SmartContract.Proxy.Basic do @doc """ Adds 0x to address at the beginning """ - @spec adds_0x_to_address(nil | binary()) :: nil | binary() + @spec adds_0x_to_address(nil | binary()) :: nil | binary() | [binary()] def adds_0x_to_address(nil), do: nil + def adds_0x_to_address(addresses) when is_list(addresses) do + addresses + |> Enum.map(fn address -> adds_0x_to_address(address) end) + end + def adds_0x_to_address(address) do if address do if String.starts_with?(address, "0x") do diff --git a/apps/explorer/lib/explorer/chain/smart_contract/proxy/eip_2535.ex b/apps/explorer/lib/explorer/chain/smart_contract/proxy/eip_2535.ex new file mode 100644 index 000000000000..cc0d374e60c5 --- /dev/null +++ b/apps/explorer/lib/explorer/chain/smart_contract/proxy/eip_2535.ex @@ -0,0 +1,36 @@ +defmodule Explorer.Chain.SmartContract.Proxy.EIP2535 do + @moduledoc """ + Module for fetching proxy implementation from https://eips.ethereum.org/EIPS/eip-2535 (Diamond Proxy) + """ + + # 52ef6b2c = keccak256(facetAddresses()) + @facet_addresses_signature "52ef6b2c" + + alias Explorer.Chain.Hash + alias Explorer.Chain.SmartContract.Proxy.Basic + + @facet_addresses_method_abi [ + %{ + "inputs" => [], + "name" => "facetAddresses", + "outputs" => [%{"internalType" => "address[]", "name" => "facetAddresses_", "type" => "address[]"}], + "stateMutability" => "view", + "type" => "function" + } + ] + + @spec get_implementation_address_hash_strings(Hash.Address.t()) :: nil | [binary] + def get_implementation_address_hash_strings(proxy_address_hash) do + case @facet_addresses_signature + |> Basic.get_implementation_address_hash_string( + to_string(proxy_address_hash), + @facet_addresses_method_abi + ) do + implementation_addresses when is_list(implementation_addresses) -> + implementation_addresses + + _ -> + nil + end + end +end diff --git a/apps/explorer/lib/explorer/chain/smart_contract/proxy/models/implementation.ex b/apps/explorer/lib/explorer/chain/smart_contract/proxy/models/implementation.ex index 3f3388c3bf74..0b9437a5a140 100644 --- a/apps/explorer/lib/explorer/chain/smart_contract/proxy/models/implementation.ex +++ b/apps/explorer/lib/explorer/chain/smart_contract/proxy/models/implementation.ex @@ -30,6 +30,7 @@ defmodule Explorer.Chain.SmartContract.Proxy.Models.Implementation do typed_schema "proxy_implementations" do field(:proxy_address_hash, Hash.Address, primary_key: true, null: false) + # the order matches order of enum values in the DB field(:proxy_type, Ecto.Enum, values: [ :eip1167, @@ -40,6 +41,7 @@ defmodule Explorer.Chain.SmartContract.Proxy.Models.Implementation do :basic_implementation, :basic_get_implementation, :comptroller, + :eip2535, :unknown ], null: true @@ -153,7 +155,7 @@ defmodule Explorer.Chain.SmartContract.Proxy.Models.Implementation do }, options ) do - {implementation_address_hash_from_db, implementation_name_from_db, implementation_updated_at_from_db} = + {implementation_addresses_hash_from_db, implementation_names_from_db, implementation_updated_at_from_db} = implementation_from_db(address_hash, options) implementation_updated_at = implementation_updated_at || implementation_updated_at_from_db @@ -177,22 +179,26 @@ defmodule Explorer.Chain.SmartContract.Proxy.Models.Implementation do case Task.yield(get_implementation_address_hash_task, timeout) || Task.ignore(get_implementation_address_hash_task) do {:ok, {:empty, :empty}} -> - {nil, nil} + {[], []} + + {:ok, {:error, :error}} -> + {db_implementation_data_converter(implementation_addresses_hash_from_db), + db_implementation_data_converter(implementation_names_from_db)} {:ok, {address_hash, _name} = result} when not is_nil(address_hash) -> result _ -> - {db_implementation_data_converter(implementation_address_hash_from_db), - db_implementation_data_converter(implementation_name_from_db)} + {db_implementation_data_converter(implementation_addresses_hash_from_db), + db_implementation_data_converter(implementation_names_from_db)} end else - {db_implementation_data_converter(implementation_address_hash_from_db), - db_implementation_data_converter(implementation_name_from_db)} + {db_implementation_data_converter(implementation_addresses_hash_from_db), + db_implementation_data_converter(implementation_names_from_db)} end end - def get_implementation(_, _), do: {nil, nil} + def get_implementation(_, _), do: {[], []} defp fetch_implementation?(implementation_address_fetched?, refetch_necessity_checked?, implementation_updated_at) do (!implementation_address_fetched? || !refetch_necessity_checked?) && @@ -202,20 +208,10 @@ defmodule Explorer.Chain.SmartContract.Proxy.Models.Implementation do defp implementation_from_db(address_hash, options) do proxy_implementations = get_proxy_implementations(address_hash, options) - # todo: process multiple implementations in case of Diamond proxy if proxy_implementations do - {implementation_address_hash, implementation_name} = - if Enum.count(proxy_implementations.address_hashes) == 1 do - implementation_address_hash = proxy_implementations.address_hashes |> Enum.at(0) - implementation_name = proxy_implementations.names |> Enum.at(0) - {implementation_address_hash, implementation_name} - else - {nil, nil} - end - - {implementation_address_hash, implementation_name, proxy_implementations.updated_at} + {proxy_implementations.address_hashes, proxy_implementations.names, proxy_implementations.updated_at} else - {nil, nil, nil} + {[], [], nil} end end @@ -273,68 +269,90 @@ defmodule Explorer.Chain.SmartContract.Proxy.Models.Implementation do @doc """ Saves proxy's implementation into the DB """ - @spec save_implementation_data(String.t() | nil, Hash.Address.t(), atom() | nil, Keyword.t()) :: - {nil, nil} | {String.t(), String.t() | nil} + @spec save_implementation_data([String.t()], Hash.Address.t(), atom() | nil, Keyword.t()) :: + {[String.t()], [String.t()]} | {:empty, :empty} | {:error, :error} + def save_implementation_data(:error, _proxy_address_hash, _proxy_type, _options) do + {:error, :error} + end + + def save_implementation_data(implementation_address_hash_strings, proxy_address_hash, proxy_type, options) + when is_nil(implementation_address_hash_strings) or + implementation_address_hash_strings == [] do + upsert_implementation(proxy_address_hash, proxy_type, [], [], options) + + {:empty, :empty} + end + def save_implementation_data( - implementation_address_hash_string, + [empty_implementation_address_hash_string], proxy_address_hash, proxy_type, options ) - when is_nil(implementation_address_hash_string) or is_burn_signature(implementation_address_hash_string) do - upsert_implementation(proxy_address_hash, proxy_type, nil, nil, options) + when is_burn_signature(empty_implementation_address_hash_string) do + upsert_implementation(proxy_address_hash, proxy_type, [], [], options) {:empty, :empty} end def save_implementation_data( - implementation_address_hash_string, + implementation_address_hash_strings, proxy_address_hash, proxy_type, options - ) - when is_binary(implementation_address_hash_string) do - with {:ok, implementation_address_hash} <- string_to_address_hash(implementation_address_hash_string), - {:implementation, {%SmartContract{name: name}, _}} <- - {:implementation, - SmartContract.address_hash_to_smart_contract_with_bytecode_twin(implementation_address_hash, options, false)} do - upsert_implementation(proxy_address_hash, proxy_type, implementation_address_hash_string, name, options) - - {implementation_address_hash_string, name} + ) do + {implementation_addresses, implementation_names} = + implementation_address_hash_strings + |> Enum.map(fn implementation_address_hash_string -> + with {:ok, implementation_address_hash} <- string_to_address_hash(implementation_address_hash_string), + {:implementation, {%SmartContract{name: name}, _}} <- { + :implementation, + SmartContract.address_hash_to_smart_contract_with_bytecode_twin(implementation_address_hash, options) + } do + {implementation_address_hash_string, name} + else + :error -> + :error + + {:implementation, _} -> + {implementation_address_hash_string, nil} + end + end) + |> Enum.filter(&(&1 !== :error)) + |> Enum.unzip() + + if Enum.empty?(implementation_addresses) do + {:empty, :empty} else - :error -> - {:empty, :empty} - - {:implementation, _} -> - upsert_implementation( - proxy_address_hash, - proxy_type, - implementation_address_hash_string, - nil, - options - ) - - {implementation_address_hash_string, nil} + upsert_implementation( + proxy_address_hash, + proxy_type, + implementation_addresses, + implementation_names, + options + ) + + {implementation_addresses, implementation_names} end end - defp upsert_implementation(proxy_address_hash, proxy_type, implementation_address_hash_string, name, options) do + defp upsert_implementation(proxy_address_hash, proxy_type, implementation_address_hash_strings, names, options) do proxy = get_proxy_implementations(proxy_address_hash, options) if proxy do - update_implementation(proxy, proxy_type, implementation_address_hash_string, name) + update_implementation(proxy, proxy_type, implementation_address_hash_strings, names) else - insert_implementation(proxy_address_hash, proxy_type, implementation_address_hash_string, name) + insert_implementation(proxy_address_hash, proxy_type, implementation_address_hash_strings, names) end end - defp insert_implementation(proxy_address_hash, proxy_type, implementation_address_hash_string, name) + defp insert_implementation(proxy_address_hash, proxy_type, implementation_address_hash_strings, names) when not is_nil(proxy_address_hash) do changeset = %{ proxy_address_hash: proxy_address_hash, proxy_type: proxy_type, - address_hashes: (implementation_address_hash_string && [implementation_address_hash_string]) || [], - names: (name && [name]) || [] + address_hashes: implementation_address_hash_strings, + names: names } %__MODULE__{} @@ -342,36 +360,39 @@ defmodule Explorer.Chain.SmartContract.Proxy.Models.Implementation do |> Repo.insert() end - defp update_implementation(proxy, proxy_type, implementation_address_hash_string, name) do + defp update_implementation(proxy, proxy_type, implementation_address_hash_strings, names) do proxy |> changeset(%{ proxy_type: proxy_type, - address_hashes: (implementation_address_hash_string && [implementation_address_hash_string]) || [], - names: (name && [name]) || [] + address_hashes: implementation_address_hash_strings, + names: names }) |> Repo.update() end defp db_implementation_data_converter(nil), do: nil + + defp db_implementation_data_converter(list) when is_list(list), + do: list |> Enum.map(&db_implementation_data_converter(&1)) + defp db_implementation_data_converter(string) when is_binary(string), do: string defp db_implementation_data_converter(other), do: to_string(other) @doc """ - Returns proxy's implementation name + Returns proxy's implementation names """ - @spec name(Address.t() | nil) :: String.t() | nil - def name(_proxy_address, options \\ []) + @spec names(Address.t() | nil) :: String.t() | [String.t()] + def names(_proxy_address, options \\ []) - def name(proxy_address, options) when not is_nil(proxy_address) do + def names(proxy_address, options) when not is_nil(proxy_address) do proxy_implementations = get_proxy_implementations(proxy_address.hash, options) - # todo: process multiple implementations in case of Diamond proxy if proxy_implementations && not Enum.empty?(proxy_implementations.names) do - proxy_implementations.names |> Enum.at(0) + proxy_implementations.names else - nil + [] end end - def name(_, _), do: nil + def names(_, _), do: [] end diff --git a/apps/explorer/lib/explorer/chain/smart_contract/proxy/verification_status.ex b/apps/explorer/lib/explorer/chain/smart_contract/proxy/verification_status.ex index b952ee1373aa..75425bfebbe5 100644 --- a/apps/explorer/lib/explorer/chain/smart_contract/proxy/verification_status.ex +++ b/apps/explorer/lib/explorer/chain/smart_contract/proxy/verification_status.ex @@ -109,9 +109,12 @@ defmodule Explorer.Chain.SmartContract.Proxy.VerificationStatus do @doc """ Sets proxy verification result """ - @spec set_proxy_verification_result({String.t() | nil | :empty, String.t() | nil | :empty}, String.t()) :: + @spec set_proxy_verification_result({[String.t()] | :empty | :error, [String.t()] | :empty | :error}, String.t()) :: __MODULE__.t() - def set_proxy_verification_result({empty_or_nil, _}, uid) when empty_or_nil in [:empty, nil], + def set_proxy_verification_result({empty_or_error, _}, uid) when empty_or_error in [:empty, :error], + do: update_status(uid, :fail) + + def set_proxy_verification_result({[], _}, uid), do: update_status(uid, :fail) def set_proxy_verification_result({_, _}, uid), do: update_status(uid, :pass) diff --git a/apps/explorer/lib/explorer/etherscan/contracts.ex b/apps/explorer/lib/explorer/etherscan/contracts.ex index 25c1bc38a7a7..64449f858a09 100644 --- a/apps/explorer/lib/explorer/etherscan/contracts.ex +++ b/apps/explorer/lib/explorer/etherscan/contracts.ex @@ -41,8 +41,8 @@ defmodule Explorer.Etherscan.Contracts do | smart_contract: %{address_with_smart_contract.smart_contract | contract_source_code: formatted_code} } else - {implementation_address_hash, _} = - Implementation.get_implementation( + implementation_smart_contract = + SmartContract.single_implementation_smart_contract_from_proxy( %{ updated: %SmartContract{ address_hash: address_hash, @@ -52,13 +52,11 @@ defmodule Explorer.Etherscan.Contracts do implementation_address_fetched?: false, refetch_necessity_checked?: false }, - unverified_proxy_only?: true + [ + {:unverified_proxy_only?, true} + ] ) - implementation_smart_contract = - implementation_address_hash - |> Proxy.implementation_to_smart_contract([]) - address_verified_bytecode_twin_contract = implementation_smart_contract || maybe_fetch_bytecode_twin(twin_needed?, address_hash) @@ -95,7 +93,7 @@ defmodule Explorer.Etherscan.Contracts do smart_contract |> Map.put(:is_proxy, true) |> Map.put( - :implementation_address_hash_string, + :implementation_address_hash_strings, smart_contract |> Implementation.get_implementation() |> Tuple.to_list() diff --git a/apps/explorer/priv/repo/migrations/20240425185705_alter_proxy_type.exs b/apps/explorer/priv/repo/migrations/20240425185705_alter_proxy_type.exs new file mode 100644 index 000000000000..91459cb8294c --- /dev/null +++ b/apps/explorer/priv/repo/migrations/20240425185705_alter_proxy_type.exs @@ -0,0 +1,7 @@ +defmodule Explorer.Repo.Migrations.AlterProxyType do + use Ecto.Migration + + def change do + execute("ALTER TYPE proxy_type ADD VALUE 'eip2535' BEFORE 'unknown'") + end +end diff --git a/apps/explorer/test/explorer/chain/smart_contract/proxy/models/implementation_test.exs b/apps/explorer/test/explorer/chain/smart_contract/proxy/models/implementation_test.exs index 7879c411537a..477bb788e42d 100644 --- a/apps/explorer/test/explorer/chain/smart_contract/proxy/models/implementation_test.exs +++ b/apps/explorer/test/explorer/chain/smart_contract/proxy/models/implementation_test.exs @@ -28,7 +28,7 @@ defmodule Explorer.Chain.SmartContract.Proxy.Models.Implementation.Test do # fetch nil implementation and don't save it to db TestHelper.get_eip1967_implementation_zero_addresses() - assert {nil, nil} = Implementation.get_implementation(smart_contract) + assert {[], []} = Implementation.get_implementation(smart_contract) verify!(EthereumJSONRPC.Mox) assert_empty_implementation(smart_contract.address_hash) @@ -44,7 +44,8 @@ defmodule Explorer.Chain.SmartContract.Proxy.Models.Implementation.Test do expect_address_in_oz_slot_response(string_implementation_address_hash) - assert {^string_implementation_address_hash, "implementation"} = Implementation.get_implementation(smart_contract) + assert {[^string_implementation_address_hash], ["implementation"]} = + Implementation.get_implementation(smart_contract) verify!(EthereumJSONRPC.Mox) @@ -56,7 +57,8 @@ defmodule Explorer.Chain.SmartContract.Proxy.Models.Implementation.Test do TestHelper.get_eip1967_implementation_error_response() - assert {^string_implementation_address_hash, "implementation"} = Implementation.get_implementation(smart_contract) + assert {[^string_implementation_address_hash], ["implementation"]} = + Implementation.get_implementation(smart_contract) verify!(EthereumJSONRPC.Mox) @@ -76,7 +78,8 @@ defmodule Explorer.Chain.SmartContract.Proxy.Models.Implementation.Test do Application.put_env(:explorer, :proxy, proxy) - assert {^string_implementation_address_hash, "implementation"} = Implementation.get_implementation(smart_contract) + assert {[^string_implementation_address_hash], ["implementation"]} = + Implementation.get_implementation(smart_contract) {contract_2, _} = SmartContract.address_hash_to_smart_contract_with_bytecode_twin(smart_contract.address_hash) implementation_2 = Implementation.get_proxy_implementations(smart_contract.address_hash) @@ -93,7 +96,7 @@ defmodule Explorer.Chain.SmartContract.Proxy.Models.Implementation.Test do TestHelper.get_eip1967_implementation_zero_addresses() - assert {nil, nil} = Implementation.get_implementation(smart_contract) + assert {[], []} = Implementation.get_implementation(smart_contract) verify!(EthereumJSONRPC.Mox) @@ -103,7 +106,7 @@ defmodule Explorer.Chain.SmartContract.Proxy.Models.Implementation.Test do test "get_implementation/1 for twins contract" do # return nils for nil - assert {nil, nil} = Implementation.get_implementation(nil) + assert {[], []} = Implementation.get_implementation(nil) smart_contract = insert(:smart_contract) twin_address = insert(:contract_address) @@ -120,11 +123,11 @@ defmodule Explorer.Chain.SmartContract.Proxy.Models.Implementation.Test do Application.put_env(:explorer, :proxy, proxy) # fetch nil implementation - assert {nil, nil} = Implementation.get_implementation(bytecode_twin) + assert {[], []} = Implementation.get_implementation(bytecode_twin) verify!(EthereumJSONRPC.Mox) refute_implementations(smart_contract.address_hash) - assert {nil, nil} = Implementation.get_implementation(bytecode_twin) + assert {[], []} = Implementation.get_implementation(bytecode_twin) verify!(EthereumJSONRPC.Mox) refute_implementations(smart_contract.address_hash) @@ -140,7 +143,8 @@ defmodule Explorer.Chain.SmartContract.Proxy.Models.Implementation.Test do expect_address_in_oz_slot_response(string_implementation_address_hash) - assert {^string_implementation_address_hash, "implementation"} = Implementation.get_implementation(bytecode_twin) + assert {[^string_implementation_address_hash], ["implementation"]} = + Implementation.get_implementation(bytecode_twin) verify!(EthereumJSONRPC.Mox) @@ -154,7 +158,8 @@ defmodule Explorer.Chain.SmartContract.Proxy.Models.Implementation.Test do refute_implementations(smart_contract.address_hash) - assert {^string_implementation_address_hash, "implementation"} = Implementation.get_implementation(bytecode_twin) + assert {[^string_implementation_address_hash], ["implementation"]} = + Implementation.get_implementation(bytecode_twin) verify!(EthereumJSONRPC.Mox) @@ -166,11 +171,11 @@ defmodule Explorer.Chain.SmartContract.Proxy.Models.Implementation.Test do _implementation_smart_contract = insert(:smart_contract, name: "implementation") # fetch nil implementation - assert {nil, nil} = Implementation.get_implementation(bytecode_twin) + assert {[], []} = Implementation.get_implementation(bytecode_twin) verify!(EthereumJSONRPC.Mox) refute_implementations(smart_contract.address_hash) - assert {nil, nil} = Implementation.get_implementation(bytecode_twin) + assert {[], []} = Implementation.get_implementation(bytecode_twin) verify!(EthereumJSONRPC.Mox) refute_implementations(smart_contract.address_hash) From 18ef2c0ef5c3b6a405f3364dda76a0991f038422 Mon Sep 17 00:00:00 2001 From: Kevin Mathew Date: Fri, 10 May 2024 14:58:14 +0530 Subject: [PATCH 002/150] chore: Update outdated links to ETH JSON RPC Specification in docstrings (#10041) * Update outdated links to ETH JSON RPC Specification in docstrings * Remove empty line --- .../templates/api_docs/eth_rpc.html.eex | 2 +- apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/block.ex | 11 +++++------ .../lib/ethereum_jsonrpc/block/by_hash.ex | 2 +- .../lib/ethereum_jsonrpc/block/by_nephew.ex | 2 +- .../lib/ethereum_jsonrpc/block/by_number.ex | 2 +- apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/blocks.ex | 4 ++-- apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/log.ex | 2 +- apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/logs.ex | 2 +- apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/receipt.ex | 2 +- .../ethereum_jsonrpc/lib/ethereum_jsonrpc/receipts.ex | 2 +- .../lib/ethereum_jsonrpc/transaction.ex | 10 +++++----- .../lib/ethereum_jsonrpc/transactions.ex | 4 ++-- apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/uncle.ex | 2 +- apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/uncles.ex | 2 +- 14 files changed, 24 insertions(+), 25 deletions(-) diff --git a/apps/block_scout_web/lib/block_scout_web/templates/api_docs/eth_rpc.html.eex b/apps/block_scout_web/lib/block_scout_web/templates/api_docs/eth_rpc.html.eex index 24f78423fc67..e3a879d14a13 100644 --- a/apps/block_scout_web/lib/block_scout_web/templates/api_docs/eth_rpc.html.eex +++ b/apps/block_scout_web/lib/block_scout_web/templates/api_docs/eth_rpc.html.eex @@ -6,7 +6,7 @@

<%= gettext "This API is provided to support some rpc methods in the exact format specified for ethereum nodes, which can be found " %> - <%= gettext "here." %> + <%= gettext "here." %> <%= gettext "This is useful to allow sending requests to blockscout without having to change anything about the request." %> <%= gettext "However, in general, the" %> <%= link( gettext("custom RPC"), diff --git a/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/block.ex b/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/block.ex index 7269d403fbb3..ad7eb40fff7d 100644 --- a/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/block.ex +++ b/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/block.ex @@ -1,7 +1,7 @@ defmodule EthereumJSONRPC.Block do @moduledoc """ - Block format as returned by [`eth_getBlockByHash`](https://github.com/ethereum/wiki/wiki/JSON-RPC#eth_getblockbyhash) - and [`eth_getBlockByNumber`](https://github.com/ethereum/wiki/wiki/JSON-RPC#eth_getblockbynumber). + Block format as returned by [`eth_getBlockByHash`](https://github.com/ethereum/wiki/wiki/JSON-RPC/e8e0771b9f3677693649d945956bc60e886ceb2b#eth_getblockbyhash) + and [`eth_getBlockByNumber`](https://github.com/ethereum/wiki/wiki/JSON-RPC/e8e0771b9f3677693649d945956bc60e886ceb2b#eth_getblockbynumber). """ import EthereumJSONRPC, only: [quantity_to_integer: 1, timestamp_to_datetime: 1] @@ -75,20 +75,20 @@ defmodule EthereumJSONRPC.Block do * `"number"` - the block number `t:EthereumJSONRPC.quantity/0`. `nil` when block is pending. * `"parentHash" - the `t:EthereumJSONRPC.hash/0` of the parent block. * `"receiptsRoot"` - `t:EthereumJSONRPC.hash/0` of the root of the receipts. - [trie](https://github.com/ethereum/wiki/wiki/Patricia-Tree) of the block. + [trie](https://ethereum.org/en/developers/docs/data-structures-and-encoding/patricia-merkle-trie/) of the block. * `"sealFields"` - UNKNOWN * `"sha3Uncles"` - `t:EthereumJSONRPC.hash/0` of the [uncles](https://bitcoin.stackexchange.com/questions/39329/in-ethereum-what-is-an-uncle-block) data in the block. * `"signature"` - UNKNOWN * `"size"` - `t:EthereumJSONRPC.quantity/0` of bytes in this block * `"stateRoot" - `t:EthereumJSONRPC.hash/0` of the root of the final state - [trie](https://github.com/ethereum/wiki/wiki/Patricia-Tree) of the block. + [trie](https://ethereum.org/en/developers/docs/data-structures-and-encoding/patricia-merkle-trie/) of the block. * `"step"` - UNKNOWN * `"timestamp"`: the unix timestamp as a `t:EthereumJSONRPC.quantity/0` for when the block was collated. * `"totalDifficulty" - `t:EthereumJSONRPC.quantity/0` of the total difficulty of the chain until this block. * `"transactions"` - `t:list/0` of `t:EthereumJSONRPC.Transaction.t/0`. * `"transactionsRoot" - `t:EthereumJSONRPC.hash/0` of the root of the transaction - [trie](https://github.com/ethereum/wiki/wiki/Patricia-Tree) of the block. + [trie](https://ethereum.org/en/developers/docs/data-structures-and-encoding/patricia-merkle-trie/) of the block. * `uncles`: `t:list/0` of [uncles](https://bitcoin.stackexchange.com/questions/39329/in-ethereum-what-is-an-uncle-block) `t:EthereumJSONRPC.hash/0`. @@ -198,7 +198,6 @@ defmodule EthereumJSONRPC.Block do transactions_root: "0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421",\ #{case Application.compile_env(:explorer, :chain_type) do :rsk -> """ - bitcoin_merged_mining_coinbase_transaction: "0x00000000000000805bf0dc9203da49a3b4e3ec913806e43102cc07db991272dc8b7018da57eb5abe59a32d070000ffffffff03449a4d26000000001976a914536ffa992491508dca0354e52f32a3a7a679a53a88ac00000000000000002b6a2952534b424c4f434b3ad2508d21d28c8f89d495923c0758ec3f64bd6755b4ec416f5601312600542a400000000000000000266a24aa21a9ed4ae42ea6dca2687aaed665714bf58b055c4e11f2fb038605930d630b49ad7b9d00000000",\ bitcoin_merged_mining_header: "0x00006d20ffd048280094a6ea0851d854036aacaa25ee0f23f0040200000000000000000078d2638fe0b4477c54601e6449051afba8228e0a88ff06b0c91f091fd34d5da57487c76402610517372c2fe9",\ bitcoin_merged_mining_merkle_proof: "0x8e5a4ba74eb4eb2f9ad4cabc2913aeed380a5becf7cd4d513341617efb798002bd83a783c31c66a8a8f6cc56c071c2d471cb610e3dc13054b9d216021d8c7e9112f622564449ebedcedf7d4ccb6fe0ffac861b7ed1446c310813cdf712e1e6add28b1fe1c0ae5e916194ba4f285a9340aba41e91bf847bf31acf37a9623a04a2348a37ab9faa5908122db45596bbc03e9c3644b0d4589471c4ff30fc139f3ba50506e9136fa0df799b487494de3e2b3dec937338f1a2e18da057c1f60590a9723672a4355b9914b1d01af9f582d9e856f6e1744be00f268b0b01d559329f7e0685aa63ffeb7c28486d7462292021d1345cddbf7c920ca34bb7aa4c6cdbe068806e35d0db662e7fcda03cb4d779594638c62a1fdd7ec98d1fb6d240d853958abe57561d9b9d0465cf8b9d6ee3c58b0d8b07d6c4c5d8f348e43fe3c06011b6a0008db4e0b16c77ececc3981f9008201cea5939869d648e59a09bd2094b1196ff61126bffb626153deed2563e1745436247c94a85d2947756b606d67633781c99d7",\ diff --git a/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/block/by_hash.ex b/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/block/by_hash.ex index 5a923a3a95b1..83aa6fa4ad00 100644 --- a/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/block/by_hash.ex +++ b/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/block/by_hash.ex @@ -1,6 +1,6 @@ defmodule EthereumJSONRPC.Block.ByHash do @moduledoc """ - Block format as returned by [`eth_getBlockByHash`](https://github.com/ethereum/wiki/wiki/JSON-RPC#eth_getblockbyhash) + Block format as returned by [`eth_getBlockByHash`](https://github.com/ethereum/wiki/wiki/JSON-RPC/e8e0771b9f3677693649d945956bc60e886ceb2b#eth_getblockbyhash) """ def request(%{id: id, hash: hash}, hydrated \\ true) do diff --git a/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/block/by_nephew.ex b/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/block/by_nephew.ex index 56e839334326..eba8c46ec826 100644 --- a/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/block/by_nephew.ex +++ b/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/block/by_nephew.ex @@ -1,6 +1,6 @@ defmodule EthereumJSONRPC.Block.ByNephew do @moduledoc """ - Block format as returned by [`eth_getUncleByBlockHashAndIndex`](https://github.com/ethereum/wiki/wiki/JSON-RPC#eth_getUncleByBlockHashAndIndex) + Block format as returned by [`eth_getUncleByBlockHashAndIndex`](https://github.com/ethereum/wiki/wiki/JSON-RPC/e8e0771b9f3677693649d945956bc60e886ceb2b#eth_getunclebyblockhashandindex) """ import EthereumJSONRPC, only: [integer_to_quantity: 1] diff --git a/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/block/by_number.ex b/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/block/by_number.ex index 5a1cf1eeca41..2daf45086187 100644 --- a/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/block/by_number.ex +++ b/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/block/by_number.ex @@ -1,6 +1,6 @@ defmodule EthereumJSONRPC.Block.ByNumber do @moduledoc """ - Block format as returned by [`eth_getBlockByNumber`](https://github.com/ethereum/wiki/wiki/JSON-RPC#eth_getblockbyhash) + Block format as returned by [`eth_getBlockByNumber`](https://github.com/ethereum/wiki/wiki/JSON-RPC/e8e0771b9f3677693649d945956bc60e886ceb2b#eth_getblockbyhash) """ import EthereumJSONRPC, only: [integer_to_quantity: 1] diff --git a/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/blocks.ex b/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/blocks.ex index bd0c00f6df6b..8659195ff198 100644 --- a/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/blocks.ex +++ b/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/blocks.ex @@ -1,7 +1,7 @@ defmodule EthereumJSONRPC.Blocks do @moduledoc """ - Blocks format as returned by [`eth_getBlockByHash`](https://github.com/ethereum/wiki/wiki/JSON-RPC#eth_getblockbyhash) - and [`eth_getBlockByNumber`](https://github.com/ethereum/wiki/wiki/JSON-RPC#eth_getblockbynumber) from batch requests. + Blocks format as returned by [`eth_getBlockByHash`](https://github.com/ethereum/wiki/wiki/JSON-RPC/e8e0771b9f3677693649d945956bc60e886ceb2b#eth_getblockbyhash) + and [`eth_getBlockByNumber`](https://github.com/ethereum/wiki/wiki/JSON-RPC/e8e0771b9f3677693649d945956bc60e886ceb2b#eth_getblockbynumber) from batch requests. """ alias EthereumJSONRPC.{Block, Transactions, Transport, Uncles, Withdrawals} diff --git a/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/log.ex b/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/log.ex index 012ac0ec01e3..5ccd49a15d42 100644 --- a/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/log.ex +++ b/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/log.ex @@ -1,7 +1,7 @@ defmodule EthereumJSONRPC.Log do @moduledoc """ Log included in return from - [`eth_getTransactionReceipt`](https://github.com/ethereum/wiki/wiki/JSON-RPC#eth_gettransactionreceipt). + [`eth_getTransactionReceipt`](https://github.com/ethereum/wiki/wiki/JSON-RPC/e8e0771b9f3677693649d945956bc60e886ceb2b#eth_gettransactionreceipt). """ import EthereumJSONRPC, only: [quantity_to_integer: 1] diff --git a/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/logs.ex b/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/logs.ex index 7c4720f79b71..e62765d859c6 100644 --- a/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/logs.ex +++ b/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/logs.ex @@ -1,7 +1,7 @@ defmodule EthereumJSONRPC.Logs do @moduledoc """ Collection of logs included in return from - [`eth_getTransactionReceipt`](https://github.com/ethereum/wiki/wiki/JSON-RPC#eth_gettransactionreceipt). + [`eth_getTransactionReceipt`](https://github.com/ethereum/wiki/wiki/JSON-RPC/e8e0771b9f3677693649d945956bc60e886ceb2b#eth_gettransactionreceipt). """ alias EthereumJSONRPC.Log diff --git a/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/receipt.ex b/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/receipt.ex index f81ca2e03c6e..e1535a3ff0d5 100644 --- a/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/receipt.ex +++ b/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/receipt.ex @@ -1,7 +1,7 @@ defmodule EthereumJSONRPC.Receipt do @moduledoc """ Receipts format as returned by - [`eth_getTransactionReceipt`](https://github.com/ethereum/wiki/wiki/JSON-RPC#eth_gettransactionreceipt). + [`eth_getTransactionReceipt`](https://github.com/ethereum/wiki/wiki/JSON-RPC/e8e0771b9f3677693649d945956bc60e886ceb2b#eth_gettransactionreceipt). """ import EthereumJSONRPC, only: [quantity_to_integer: 1] diff --git a/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/receipts.ex b/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/receipts.ex index 73683fda3fc6..739eb43f7cee 100644 --- a/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/receipts.ex +++ b/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/receipts.ex @@ -1,7 +1,7 @@ defmodule EthereumJSONRPC.Receipts do @moduledoc """ Receipts format as returned by - [`eth_getTransactionReceipt`](https://github.com/ethereum/wiki/wiki/JSON-RPC#eth_gettransactionreceipt) from batch + [`eth_getTransactionReceipt`](https://github.com/ethereum/wiki/wiki/JSON-RPC/e8e0771b9f3677693649d945956bc60e886ceb2b#eth_gettransactionreceipt) from batch requests. """ diff --git a/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/transaction.ex b/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/transaction.ex index 482d0af67114..778c141877f6 100644 --- a/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/transaction.ex +++ b/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/transaction.ex @@ -1,11 +1,11 @@ defmodule EthereumJSONRPC.Transaction do @moduledoc """ Transaction format included in the return of - [`eth_getBlockByHash`](https://github.com/ethereum/wiki/wiki/JSON-RPC#eth_getblockbyhash) - and [`eth_getBlockByNumber`](https://github.com/ethereum/wiki/wiki/JSON-RPC#eth_getblockbynumber) and returned by - [`eth_getTransactionByHash`](https://github.com/ethereum/wiki/wiki/JSON-RPC#eth_gettransactionbyhash), - [`eth_getTransactionByBlockHashAndIndex`](https://github.com/ethereum/wiki/wiki/JSON-RPC#eth_gettransactionbyblockhashandindex), - and [`eth_getTransactionByBlockNumberAndIndex`](https://github.com/ethereum/wiki/wiki/JSON-RPC#eth_gettransactionbyblocknumberandindex) + [`eth_getBlockByHash`](https://github.com/ethereum/wiki/wiki/JSON-RPC/e8e0771b9f3677693649d945956bc60e886ceb2b#eth_getblockbyhash) + and [`eth_getBlockByNumber`](https://github.com/ethereum/wiki/wiki/JSON-RPC/e8e0771b9f3677693649d945956bc60e886ceb2b#eth_getblockbynumber) and returned by + [`eth_getTransactionByHash`](https://github.com/ethereum/wiki/wiki/JSON-RPC/e8e0771b9f3677693649d945956bc60e886ceb2b#eth_gettransactionbyhash), + [`eth_getTransactionByBlockHashAndIndex`](https://github.com/ethereum/wiki/wiki/JSON-RPC/e8e0771b9f3677693649d945956bc60e886ceb2b#eth_gettransactionbyblockhashandindex), + and [`eth_getTransactionByBlockNumberAndIndex`](https://github.com/ethereum/wiki/wiki/JSON-RPC/e8e0771b9f3677693649d945956bc60e886ceb2b#eth_gettransactionbyblocknumberandindex) """ import EthereumJSONRPC, only: [quantity_to_integer: 1, integer_to_quantity: 1, request: 1] diff --git a/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/transactions.ex b/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/transactions.ex index ecdf103b4e89..702786bae824 100644 --- a/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/transactions.ex +++ b/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/transactions.ex @@ -1,8 +1,8 @@ defmodule EthereumJSONRPC.Transactions do @moduledoc """ List of transactions format as included in return from - [`eth_getBlockByHash`](https://github.com/ethereum/wiki/wiki/JSON-RPC#eth_getblockbyhash) and - [`eth_getBlockByNumber`](https://github.com/ethereum/wiki/wiki/JSON-RPC#eth_getblockbynumber). + [`eth_getBlockByHash`](https://github.com/ethereum/wiki/wiki/JSON-RPC/e8e0771b9f3677693649d945956bc60e886ceb2b#eth_getblockbyhash) and + [`eth_getBlockByNumber`](https://github.com/ethereum/wiki/wiki/JSON-RPC/e8e0771b9f3677693649d945956bc60e886ceb2b#eth_getblockbynumber). """ alias EthereumJSONRPC.Transaction diff --git a/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/uncle.ex b/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/uncle.ex index fb0a6a397e53..b9cd6e58d126 100644 --- a/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/uncle.ex +++ b/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/uncle.ex @@ -1,6 +1,6 @@ defmodule EthereumJSONRPC.Uncle do @moduledoc """ - [Uncle](https://github.com/ethereum/wiki/wiki/Glossary#ethereum-blockchain). + [Uncle](https://ethereum.org/en/glossary). An uncle is a block that didn't make the main chain due to them being validated slightly behind what became the main chain. diff --git a/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/uncles.ex b/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/uncles.ex index 817474faf91e..fe36b7aa33c1 100644 --- a/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/uncles.ex +++ b/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/uncles.ex @@ -1,6 +1,6 @@ defmodule EthereumJSONRPC.Uncles do @moduledoc """ - List of [uncles](https://github.com/ethereum/wiki/wiki/Glossary#ethereum-blockchain). Uncles are blocks that didn't + List of [uncles](https://ethereum.org/en/glossary). Uncles are blocks that didn't make the main chain due to them being validated slightly behind what became the main chain. """ From fb4fde678d99a7dfc4ce5c066a6758b947a0fcc3 Mon Sep 17 00:00:00 2001 From: Kirill Fedoseev Date: Fri, 10 May 2024 14:56:59 +0400 Subject: [PATCH 003/150] feat: MUD API support (#9869) * feat: mud support * chore: fix ci warnings * feat: skip missing schemas * ci: build redstone image * chore: fix dialyzer * chore: remove noop migration * feat: full-text table search * fix: don't show deleted records * fix: type specs and dializer fixes * feat: checksum addresses * fix: handle invalid params * chore: add missing envs --- .../publish-docker-image-for-redstone.yml | 44 ++ .github/workflows/release-redstone.yml | 46 ++ .../lib/block_scout_web/api_router.ex | 12 + .../controllers/api/v2/mud_controller.ex | 261 ++++++++++++ .../lib/block_scout_web/paging_helper.ex | 17 + .../block_scout_web/views/api/v2/mud_view.ex | 85 ++++ apps/block_scout_web/test/test_helper.exs | 1 + apps/explorer/config/config.exs | 2 + apps/explorer/config/dev.exs | 2 + apps/explorer/config/prod.exs | 4 + apps/explorer/config/test.exs | 3 +- apps/explorer/lib/explorer/application.ex | 10 +- apps/explorer/lib/explorer/chain/mud.ex | 395 ++++++++++++++++++ .../explorer/lib/explorer/chain/mud/schema.ex | 110 +++++ apps/explorer/lib/explorer/chain/mud/table.ex | 88 ++++ apps/explorer/lib/explorer/repo.ex | 10 + .../lib/explorer/repo/config_helper.ex | 2 + apps/explorer/test/test_helper.exs | 1 + config/config_helper.exs | 14 +- config/runtime/dev.exs | 11 + config/runtime/prod.exs | 7 + cspell.json | 1 + docker-compose/envs/common-blockscout.env | 5 +- docker/Dockerfile | 2 + 24 files changed, 1125 insertions(+), 8 deletions(-) create mode 100644 .github/workflows/publish-docker-image-for-redstone.yml create mode 100644 .github/workflows/release-redstone.yml create mode 100644 apps/block_scout_web/lib/block_scout_web/controllers/api/v2/mud_controller.ex create mode 100644 apps/block_scout_web/lib/block_scout_web/views/api/v2/mud_view.ex create mode 100644 apps/explorer/lib/explorer/chain/mud.ex create mode 100644 apps/explorer/lib/explorer/chain/mud/schema.ex create mode 100644 apps/explorer/lib/explorer/chain/mud/table.ex diff --git a/.github/workflows/publish-docker-image-for-redstone.yml b/.github/workflows/publish-docker-image-for-redstone.yml new file mode 100644 index 000000000000..029c42bc698f --- /dev/null +++ b/.github/workflows/publish-docker-image-for-redstone.yml @@ -0,0 +1,44 @@ +name: Redstone Publish Docker image + +on: + workflow_dispatch: + push: + branches: + - production-redstone +jobs: + push_to_registry: + name: Push Docker image to Docker Hub + runs-on: ubuntu-latest + env: + RELEASE_VERSION: ${{ vars.RELEASE_VERSION }} + DOCKER_CHAIN_NAME: redstone + steps: + - uses: actions/checkout@v4 + - name: Setup repo + uses: ./.github/actions/setup-repo-and-short-sha + with: + docker-username: ${{ secrets.DOCKER_USERNAME }} + docker-password: ${{ secrets.DOCKER_PASSWORD }} + + - name: Build and push Docker image + uses: docker/build-push-action@v5 + with: + context: . + file: ./docker/Dockerfile + push: true + tags: blockscout/blockscout-${{ env.DOCKER_CHAIN_NAME }}:latest, blockscout/blockscout-${{ env.DOCKER_CHAIN_NAME }}:${{ env.RELEASE_VERSION }}-postrelease-${{ env.SHORT_SHA }} + platforms: | + linux/amd64 + linux/arm64/v8 + build-args: | + CACHE_EXCHANGE_RATES_PERIOD= + API_V1_READ_METHODS_DISABLED=false + DISABLE_WEBAPP=false + API_V1_WRITE_METHODS_DISABLED=false + CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= + ADMIN_PANEL_ENABLED=false + CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta.+commit.${{ env.SHORT_SHA }} + RELEASE_VERSION=${{ env.RELEASE_VERSION }} + CHAIN_TYPE=optimism + MUD_INDEXER_ENABLED=true \ No newline at end of file diff --git a/.github/workflows/release-redstone.yml b/.github/workflows/release-redstone.yml new file mode 100644 index 000000000000..9207dd195ac8 --- /dev/null +++ b/.github/workflows/release-redstone.yml @@ -0,0 +1,46 @@ +name: Release for Redstone + +on: + release: + types: [published] + +env: + OTP_VERSION: ${{ vars.OTP_VERSION }} + ELIXIR_VERSION: ${{ vars.ELIXIR_VERSION }} + +jobs: + push_to_registry: + name: Push Docker image to Docker Hub + runs-on: ubuntu-latest + env: + RELEASE_VERSION: ${{ vars.RELEASE_VERSION }} + steps: + - uses: actions/checkout@v4 + - name: Setup repo + uses: ./.github/actions/setup-repo + with: + docker-username: ${{ secrets.DOCKER_USERNAME }} + docker-password: ${{ secrets.DOCKER_PASSWORD }} + + - name: Build and push Docker image for Redstone + uses: docker/build-push-action@v5 + with: + context: . + file: ./docker/Dockerfile + push: true + tags: blockscout/blockscout-redstone:latest, blockscout/blockscout-redstone:${{ env.RELEASE_VERSION }} + platforms: | + linux/amd64 + linux/arm64/v8 + build-args: | + CACHE_EXCHANGE_RATES_PERIOD= + API_V1_READ_METHODS_DISABLED=false + DISABLE_WEBAPP=false + API_V1_WRITE_METHODS_DISABLED=false + CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= + ADMIN_PANEL_ENABLED=false + CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + RELEASE_VERSION=${{ env.RELEASE_VERSION }} + CHAIN_TYPE=optimism + MUD_INDEXER_ENABLED=true \ No newline at end of file diff --git a/apps/block_scout_web/lib/block_scout_web/api_router.ex b/apps/block_scout_web/lib/block_scout_web/api_router.ex index 939e55297c26..475d89641677 100644 --- a/apps/block_scout_web/lib/block_scout_web/api_router.ex +++ b/apps/block_scout_web/lib/block_scout_web/api_router.ex @@ -346,6 +346,18 @@ defmodule BlockScoutWeb.ApiRouter do get("/batches/:batch_number", V2.ZkSyncController, :batch) end end + + scope "/mud" do + if Application.compile_env(:explorer, Explorer.Chain.Mud)[:enabled] do + get("/worlds", V2.MudController, :worlds) + get("/worlds/count", V2.MudController, :worlds_count) + get("/worlds/:world/tables", V2.MudController, :world_tables) + get("/worlds/:world/tables/count", V2.MudController, :world_tables_count) + get("/worlds/:world/tables/:table_id/records", V2.MudController, :world_table_records) + get("/worlds/:world/tables/:table_id/records/count", V2.MudController, :world_table_records_count) + get("/worlds/:world/tables/:table_id/records/:record_id", V2.MudController, :world_table_record) + end + end end scope "/v1/graphql" do diff --git a/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/mud_controller.ex b/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/mud_controller.ex new file mode 100644 index 000000000000..8f9d0810841a --- /dev/null +++ b/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/mud_controller.ex @@ -0,0 +1,261 @@ +defmodule BlockScoutWeb.API.V2.MudController do + use BlockScoutWeb, :controller + + import BlockScoutWeb.Chain, + only: [ + next_page_params: 4, + split_list_by_page: 1, + default_paging_options: 0 + ] + + import BlockScoutWeb.PagingHelper, only: [mud_records_sorting: 1] + + alias Explorer.Chain.{Data, Hash, Mud, Mud.Schema.FieldSchema, Mud.Table} + + action_fallback(BlockScoutWeb.API.V2.FallbackController) + + @doc """ + Function to handle GET requests to `/api/v2/mud/worlds` endpoint. + """ + @spec worlds(Plug.Conn.t(), map()) :: Plug.Conn.t() + def worlds(conn, params) do + {worlds, next_page} = + params + |> mud_paging_options(["world"], [Hash.Address]) + |> Mud.worlds_list() + |> split_list_by_page() + + next_page_params = + next_page_params(next_page, worlds, conn.query_params, fn item -> + %{"world" => item} + end) + + conn + |> put_status(200) + |> render(:worlds, %{worlds: worlds, next_page_params: next_page_params}) + end + + @doc """ + Function to handle GET requests to `/api/v2/mud/worlds/count` endpoint. + """ + @spec worlds_count(Plug.Conn.t(), map()) :: Plug.Conn.t() + def worlds_count(conn, _params) do + count = Mud.worlds_count() + + conn + |> put_status(200) + |> render(:count, %{count: count}) + end + + @doc """ + Function to handle GET requests to `/api/v2/mud/worlds/:world/tables` endpoint. + """ + @spec world_tables(Plug.Conn.t(), map()) :: Plug.Conn.t() + def world_tables(conn, %{"world" => world_param} = params) do + with {:format, {:ok, world}} <- {:format, Hash.Address.cast(world_param)} do + options = params |> mud_paging_options(["table_id"], [Hash.Full]) |> Keyword.merge(mud_tables_filter(params)) + + {tables, next_page} = + world + |> Mud.world_tables(options) + |> split_list_by_page() + + next_page_params = + next_page_params(next_page, tables, conn.query_params, fn item -> + %{"table_id" => item |> elem(0)} + end) + + conn + |> put_status(200) + |> render(:tables, %{tables: tables, next_page_params: next_page_params}) + end + end + + @doc """ + Function to handle GET requests to `/api/v2/mud/worlds/:world/tables/count` endpoint. + """ + @spec world_tables_count(Plug.Conn.t(), map()) :: Plug.Conn.t() + def world_tables_count(conn, %{"world" => world_param} = params) do + with {:format, {:ok, world}} <- {:format, Hash.Address.cast(world_param)} do + options = params |> mud_tables_filter() + + count = Mud.world_tables_count(world, options) + + conn + |> put_status(200) + |> render(:count, %{count: count}) + end + end + + @doc """ + Function to handle GET requests to `/api/v2/mud/worlds/:world/tables/:table_id/records` endpoint. + """ + @spec world_table_records(Plug.Conn.t(), map()) :: Plug.Conn.t() + def world_table_records(conn, %{"world" => world_param, "table_id" => table_id_param} = params) do + with {:format, {:ok, world}} <- {:format, Hash.Address.cast(world_param)}, + {:format, {:ok, table_id}} <- {:format, Hash.Full.cast(table_id_param)}, + {:ok, schema} <- Mud.world_table_schema(world, table_id) do + options = + params + |> mud_paging_options(["key_bytes", "key0", "key1"], [Data, Hash.Full, Hash.Full]) + |> Keyword.merge(mud_records_filter(params, schema)) + |> Keyword.merge(mud_records_sorting(params)) + + {records, next_page} = world |> Mud.world_table_records(table_id, options) |> split_list_by_page() + + blocks = Mud.preload_records_timestamps(records) + + next_page_params = + next_page_params(next_page, records, conn.query_params, fn item -> + keys = [item.key_bytes, item.key0, item.key1] |> Enum.filter(&(!is_nil(&1))) + ["key_bytes", "key0", "key1"] |> Enum.zip(keys) |> Enum.into(%{}) + end) + + conn + |> put_status(200) + |> render(:records, %{ + records: records, + table_id: table_id, + schema: schema, + blocks: blocks, + next_page_params: next_page_params + }) + end + end + + @doc """ + Function to handle GET requests to `/api/v2/mud/worlds/:world/tables/:table_id/records/count` endpoint. + """ + @spec world_table_records_count(Plug.Conn.t(), map()) :: Plug.Conn.t() + def world_table_records_count(conn, %{"world" => world_param, "table_id" => table_id_param} = params) do + with {:format, {:ok, world}} <- {:format, Hash.Address.cast(world_param)}, + {:format, {:ok, table_id}} <- {:format, Hash.Full.cast(table_id_param)}, + {:ok, schema} <- Mud.world_table_schema(world, table_id) do + options = params |> mud_records_filter(schema) + + count = Mud.world_table_records_count(world, table_id, options) + + conn + |> put_status(200) + |> render(:count, %{count: count}) + end + end + + @doc """ + Function to handle GET requests to `/api/v2/mud/worlds/:world/tables/:table_id/records/:record_id` endpoint. + """ + @spec world_table_record(Plug.Conn.t(), map()) :: Plug.Conn.t() + def world_table_record( + conn, + %{"world" => world_param, "table_id" => table_id_param, "record_id" => record_id_param} = _params + ) do + with {:format, {:ok, world}} <- {:format, Hash.Address.cast(world_param)}, + {:format, {:ok, table_id}} <- {:format, Hash.Full.cast(table_id_param)}, + {:format, {:ok, record_id}} <- {:format, Data.cast(record_id_param)}, + {:ok, schema} <- Mud.world_table_schema(world, table_id), + {:ok, record} <- Mud.world_table_record(world, table_id, record_id) do + blocks = Mud.preload_records_timestamps([record]) + + conn + |> put_status(200) + |> render(:record, %{record: record, table_id: table_id, schema: schema, blocks: blocks}) + end + end + + defp mud_tables_filter(params) do + Enum.reduce(params, [], fn {key, value}, acc -> + case key do + "filter_namespace" -> + Keyword.put(acc, :filter_namespace, parse_namespace_string(value)) + + "q" -> + Keyword.put(acc, :filter_search, parse_search_string(value)) + + _ -> + acc + end + end) + end + + defp parse_namespace_string(namespace) do + filter = + case namespace do + nil -> {:ok, nil} + "0x" <> hex -> Base.decode16(hex, case: :mixed) + str -> {:ok, str} + end + + case filter do + {:ok, ns} when is_binary(ns) and byte_size(ns) <= 14 -> + ns |> String.pad_trailing(14, <<0>>) + + _ -> + nil + end + end + + defp parse_search_string(q) do + # If the search string looks like hex-encoded table id or table full name, + # we try to parse and filter by that table id directly. + # Otherwise we do a full-text search of given string inside table id. + with :error <- Hash.Full.cast(q), + :error <- Table.table_full_name_to_table_id(q) do + q + else + {:ok, table_id} -> table_id + end + end + + defp mud_records_filter(params, schema) do + Enum.reduce(params, [], fn {key, value}, acc -> + case key do + "filter_key0" -> Keyword.put(acc, :filter_key0, encode_filter(value, schema, 0)) + "filter_key1" -> Keyword.put(acc, :filter_key1, encode_filter(value, schema, 1)) + _ -> acc + end + end) + end + + defp encode_filter(value, schema, field_idx) do + case value do + "false" -> + <<0::256>> + + "true" -> + <<1::256>> + + "0x" <> hex -> + bin = Base.decode16!(hex, case: :mixed) + # addresses are padded to 32 bytes with zeros on the right + if FieldSchema.type_of(schema.key_schema, field_idx) == 97 do + <<0::size(256 - byte_size(bin) * 8), bin::binary>> + else + <> + end + + dec -> + num = dec |> Integer.parse() |> elem(0) + <> + end + end + + defp mud_paging_options(params, keys, types) do + page_key = + keys + |> Enum.zip(types) + |> Enum.reduce(%{}, fn {key, type}, acc -> + with param when param != nil <- Map.get(params, key), + {:ok, val} <- type.cast(param) do + acc |> Map.put(String.to_existing_atom(key), val) + else + _ -> acc + end + end) + + if page_key == %{} do + [paging_options: default_paging_options()] + else + [paging_options: %{default_paging_options() | key: page_key}] + end + end +end diff --git a/apps/block_scout_web/lib/block_scout_web/paging_helper.ex b/apps/block_scout_web/lib/block_scout_web/paging_helper.ex index 4bd8be1d4040..bea0045e3a8e 100644 --- a/apps/block_scout_web/lib/block_scout_web/paging_helper.ex +++ b/apps/block_scout_web/lib/block_scout_web/paging_helper.ex @@ -303,4 +303,21 @@ defmodule BlockScoutWeb.PagingHelper do do: [{:dynamic, :blocks_validated, :desc_nulls_last, ValidatorStability.dynamic_validated_blocks()}] defp do_validators_stability_sorting(_, _), do: [] + + @spec mud_records_sorting(%{required(String.t()) => String.t()}) :: [ + {:sorting, SortingHelper.sorting_params()} + ] + def mud_records_sorting(%{"sort" => sort_field, "order" => order}) do + [sorting: do_mud_records_sorting(sort_field, order)] + end + + def mud_records_sorting(_), do: [] + + defp do_mud_records_sorting("key_bytes", "asc"), do: [asc_nulls_first: :key_bytes] + defp do_mud_records_sorting("key_bytes", "desc"), do: [desc_nulls_last: :key_bytes] + defp do_mud_records_sorting("key0", "asc"), do: [asc_nulls_first: :key0] + defp do_mud_records_sorting("key0", "desc"), do: [desc_nulls_last: :key0] + defp do_mud_records_sorting("key1", "asc"), do: [asc_nulls_first: :key1] + defp do_mud_records_sorting("key1", "desc"), do: [desc_nulls_last: :key1] + defp do_mud_records_sorting(_, _), do: [] end diff --git a/apps/block_scout_web/lib/block_scout_web/views/api/v2/mud_view.ex b/apps/block_scout_web/lib/block_scout_web/views/api/v2/mud_view.ex new file mode 100644 index 000000000000..128c748bb994 --- /dev/null +++ b/apps/block_scout_web/lib/block_scout_web/views/api/v2/mud_view.ex @@ -0,0 +1,85 @@ +defmodule BlockScoutWeb.API.V2.MudView do + use BlockScoutWeb, :view + + alias Explorer.Chain.{Mud, Mud.Table} + + @doc """ + Function to render GET requests to `/api/v2/mud/worlds` endpoint. + """ + @spec render(String.t(), map()) :: map() + def render("worlds.json", %{worlds: worlds, next_page_params: next_page_params}) do + %{ + items: worlds, + next_page_params: next_page_params + } + end + + @doc """ + Function to render GET requests to `/api/v2/mud/worlds/count` endpoint. + """ + def render("count.json", %{count: count}) do + %{ + count: count + } + end + + @doc """ + Function to render GET requests to `/api/v2/mud/worlds/:world/tables` endpoint. + """ + def render("tables.json", %{tables: tables, next_page_params: next_page_params}) do + %{ + items: tables |> Enum.map(&%{table: Table.from(&1 |> elem(0)), schema: &1 |> elem(1)}), + next_page_params: next_page_params + } + end + + @doc """ + Function to render GET requests to `/api/v2/mud/worlds/:world/tables/:table_id/records` endpoint. + """ + def render("records.json", %{ + records: records, + table_id: table_id, + schema: schema, + blocks: blocks, + next_page_params: next_page_params + }) do + %{ + items: records |> Enum.map(&format_record(&1, schema, blocks)), + table: table_id |> Table.from(), + schema: schema, + next_page_params: next_page_params + } + end + + @doc """ + Function to render GET requests to `/api/v2/mud/worlds/:world/tables/:table_id/records/:record_id` endpoint. + """ + def render("record.json", %{record: record, table_id: table_id, blocks: blocks, schema: schema}) do + %{ + record: record |> format_record(schema, blocks), + table: table_id |> Table.from(), + schema: schema + } + end + + defp format_record(nil, _schema, _blocks), do: nil + + defp format_record(record, schema, blocks) do + %{ + id: record.key_bytes, + raw: %{ + key_bytes: record.key_bytes, + key0: record.key0, + key1: record.key1, + static_data: record.static_data, + encoded_lengths: record.encoded_lengths, + dynamic_data: record.dynamic_data, + block_number: record.block_number, + log_index: record.log_index + }, + is_deleted: record.is_deleted, + decoded: Mud.decode_record(record, schema), + timestamp: blocks |> Map.get(Decimal.to_integer(record.block_number), nil) + } + end +end diff --git a/apps/block_scout_web/test/test_helper.exs b/apps/block_scout_web/test/test_helper.exs index 11003096da36..ee03ef1827ba 100644 --- a/apps/block_scout_web/test/test_helper.exs +++ b/apps/block_scout_web/test/test_helper.exs @@ -35,6 +35,7 @@ Ecto.Adapters.SQL.Sandbox.mode(Explorer.Repo.Beacon, :manual) Ecto.Adapters.SQL.Sandbox.mode(Explorer.Repo.Stability, :manual) Ecto.Adapters.SQL.Sandbox.mode(Explorer.Repo.BridgedTokens, :manual) Ecto.Adapters.SQL.Sandbox.mode(Explorer.Repo.Filecoin, :manual) +Ecto.Adapters.SQL.Sandbox.mode(Explorer.Repo.Mud, :manual) Absinthe.Test.prime(BlockScoutWeb.GraphQL.Schema) diff --git a/apps/explorer/config/config.exs b/apps/explorer/config/config.exs index ca7095c15b63..78aded683b3d 100644 --- a/apps/explorer/config/config.exs +++ b/apps/explorer/config/config.exs @@ -150,6 +150,8 @@ config :explorer, :http_adapter, HTTPoison config :explorer, Explorer.Chain.BridgedToken, enabled: ConfigHelper.parse_bool_env_var("BRIDGED_TOKENS_ENABLED") +config :explorer, Explorer.Chain.Mud, enabled: ConfigHelper.parse_bool_env_var("MUD_INDEXER_ENABLED") + config :logger, :explorer, # keep synced with `config/config.exs` format: "$dateT$time $metadata[$level] $message\n", diff --git a/apps/explorer/config/dev.exs b/apps/explorer/config/dev.exs index a387ee24220c..36ba58629488 100644 --- a/apps/explorer/config/dev.exs +++ b/apps/explorer/config/dev.exs @@ -37,6 +37,8 @@ config :explorer, Explorer.Repo.Filecoin, timeout: :timer.seconds(80) config :explorer, Explorer.Repo.Stability, timeout: :timer.seconds(80) +config :explorer, Explorer.Repo.Mud, timeout: :timer.seconds(80) + config :explorer, Explorer.Tracer, env: "dev", disabled?: true config :logger, :explorer, diff --git a/apps/explorer/config/prod.exs b/apps/explorer/config/prod.exs index 27fa8cad9575..f8337d04ca74 100644 --- a/apps/explorer/config/prod.exs +++ b/apps/explorer/config/prod.exs @@ -60,6 +60,10 @@ config :explorer, Explorer.Repo.Stability, prepare: :unnamed, timeout: :timer.seconds(60) +config :explorer, Explorer.Repo.Mud, + prepare: :unnamed, + timeout: :timer.seconds(60) + config :explorer, Explorer.Tracer, env: "production", disabled?: true config :logger, :explorer, diff --git a/apps/explorer/config/test.exs b/apps/explorer/config/test.exs index 9ace0f12c261..64d37ba266e7 100644 --- a/apps/explorer/config/test.exs +++ b/apps/explorer/config/test.exs @@ -54,7 +54,8 @@ for repo <- [ Explorer.Repo.Suave, Explorer.Repo.BridgedTokens, Explorer.Repo.Filecoin, - Explorer.Repo.Stability + Explorer.Repo.Stability, + Explorer.Repo.Mud ] do config :explorer, repo, database: "explorer_test", diff --git a/apps/explorer/lib/explorer/application.ex b/apps/explorer/lib/explorer/application.ex index 3b1c32cfeb77..72c1296a5eab 100644 --- a/apps/explorer/lib/explorer/application.ex +++ b/apps/explorer/lib/explorer/application.ex @@ -141,7 +141,7 @@ defmodule Explorer.Application do ] |> List.flatten() - repos_by_chain_type() ++ account_repo() ++ configurable_children_set + repos_by_chain_type() ++ account_repo() ++ mud_repo() ++ configurable_children_set end defp repos_by_chain_type do @@ -172,6 +172,14 @@ defmodule Explorer.Application do end end + defp mud_repo do + if Application.get_env(:explorer, Explorer.Chain.Mud)[:enabled] || Mix.env() == :test do + [Explorer.Repo.Mud] + else + [] + end + end + defp should_start?(process) do Application.get_env(:explorer, process, [])[:enabled] == true end diff --git a/apps/explorer/lib/explorer/chain/mud.ex b/apps/explorer/lib/explorer/chain/mud.ex new file mode 100644 index 000000000000..4f8f2895d954 --- /dev/null +++ b/apps/explorer/lib/explorer/chain/mud.ex @@ -0,0 +1,395 @@ +defmodule Explorer.Chain.Mud do + @moduledoc """ + Represents a MUD framework database record. + """ + use Explorer.Schema + + import Ecto.Query, + only: [ + distinct: 2, + order_by: 3, + select: 3, + where: 3, + limit: 2 + ] + + alias ABI.TypeDecoder + alias Explorer.{Chain, PagingOptions, Repo, SortingHelper} + + alias Explorer.Chain.{ + Address, + Block, + Data, + Hash, + Mud, + Mud.Schema, + Mud.Schema.FieldSchema + } + + require Logger + + @schema_prefix "mud" + + @store_tables_table_id Base.decode16!("746273746f72650000000000000000005461626c657300000000000000000000", case: :lower) + + # https://github.com/latticexyz/mud/blob/cc4f4246e52982354e398113c46442910f9b04bb/packages/store/src/codegen/tables/Tables.sol#L34-L42 + @store_tables_table_schema %Schema{ + key_schema: FieldSchema.from("0x002001005f000000000000000000000000000000000000000000000000000000"), + value_schema: FieldSchema.from("0x006003025f5f5fc4c40000000000000000000000000000000000000000000000"), + key_names: ["tableId"], + value_names: ["fieldLayout", "keySchema", "valueSchema", "abiEncodedKeyNames", "abiEncodedValueNames"] + } + + @primary_key false + typed_schema "records" do + field(:address, Hash.Address, null: false) + field(:table_id, Hash.Full, null: false) + field(:key_bytes, Data) + field(:key0, Hash.Full) + field(:key1, Hash.Full) + field(:static_data, Data) + field(:encoded_lengths, Data) + field(:dynamic_data, Data) + field(:is_deleted, :boolean, null: false) + field(:block_number, :decimal, null: false) + field(:log_index, :decimal, null: false) + end + + def enabled? do + Application.get_env(:explorer, __MODULE__)[:enabled] + end + + @doc """ + Returns the paginated list of registered MUD world addresses. + """ + @spec worlds_list(Keyword.t()) :: [Mud.t()] + def worlds_list(options \\ []) do + paging_options = Keyword.get(options, :paging_options, Chain.default_paging_options()) + + Mud + |> select([r], r.address) + |> distinct(true) + |> page_worlds(paging_options) + |> limit(^paging_options.page_size) + |> Repo.Mud.all() + end + + defp page_worlds(query, %PagingOptions{key: %{world: world}}) do + query |> where([item], item.address > ^world) + end + + defp page_worlds(query, _), do: query + + @doc """ + Returns the total number of registered MUD worlds. + """ + @spec worlds_count() :: non_neg_integer() + def worlds_count do + Mud + |> select([r], r.address) + |> distinct(true) + |> Repo.Mud.aggregate(:count) + end + + @doc """ + Returns the decoded MUD table schema by world address and table ID. + """ + @spec world_table_schema(Hash.Address.t(), Hash.Full.t()) :: {:ok, Schema.t()} | {:error, :not_found} + def world_table_schema(world, table_id) do + Mud + |> where([r], r.address == ^world and r.table_id == ^@store_tables_table_id and r.key0 == ^table_id) + |> Repo.Mud.one() + |> case do + nil -> + {:error, :not_found} + + r -> + {:ok, decode_schema(r)} + end + end + + @doc """ + Returns the paginated list of registered MUD tables in the given world, optionally filtered by namespace or table name. + Each returned table in the resulting list is represented as a tuple of its ID and decoded schema. + """ + @spec world_tables(Hash.Address.t(), Keyword.t()) :: [{Hash.Full.t(), Schema.t()}] + def world_tables(world, options \\ []) do + paging_options = Keyword.get(options, :paging_options, Chain.default_paging_options()) + filter_namespace = Keyword.get(options, :filter_namespace, nil) + filter_search = Keyword.get(options, :filter_search, nil) + + Mud + |> where([r], r.address == ^world and r.table_id == ^@store_tables_table_id) + |> filter_tables_by_namespace(filter_namespace) + |> filter_tables_by_search(filter_search) + |> page_tables(paging_options) + |> order_by([r], asc: r.key0) + |> limit(^paging_options.page_size) + |> Repo.Mud.all() + |> Enum.map(&{&1.key0, decode_schema(&1)}) + end + + defp page_tables(query, %PagingOptions{key: %{table_id: table_id}}) do + query |> where([item], item.key0 > ^table_id) + end + + defp page_tables(query, _), do: query + + @doc """ + Returns the number of registered MUD tables in the given world. + """ + @spec world_tables_count(Hash.Address.t(), Keyword.t()) :: non_neg_integer() + def world_tables_count(world, options \\ []) do + filter_namespace = Keyword.get(options, :filter_namespace, nil) + filter_search = Keyword.get(options, :filter_search, nil) + + Mud + |> where([r], r.address == ^world and r.table_id == ^@store_tables_table_id) + |> filter_tables_by_namespace(filter_namespace) + |> filter_tables_by_search(filter_search) + |> Repo.Mud.aggregate(:count) + end + + defp filter_tables_by_namespace(query, nil), do: query + + defp filter_tables_by_namespace(query, namespace) do + query |> where([tb], fragment("substring(? FROM 3 FOR 14)", tb.key0) == ^namespace) + end + + defp filter_tables_by_search(query, %Hash{} = table_id) do + query |> where([tb], tb.key0 == ^table_id) + end + + defp filter_tables_by_search(query, search_string) when is_binary(search_string) do + query |> where([tb], ilike(fragment("encode(?, 'escape')", tb.key0), ^"%#{search_string}%")) + end + + defp filter_tables_by_search(query, _), do: query + + @default_sorting [ + asc: :key_bytes + ] + + @doc """ + Returns the paginated list of raw MUD records in the given world table. + Resulting records can be sorted or filtered by any of the first 2 key columns. + """ + @spec world_table_records(Hash.Address.t(), Hash.Full.t(), Keyword.t()) :: [Mud.t()] + def world_table_records(world, table_id, options \\ []) do + paging_options = Keyword.get(options, :paging_options, Chain.default_paging_options()) + sorting = Keyword.get(options, :sorting, []) + + Mud + |> where([r], r.address == ^world and r.table_id == ^table_id and r.is_deleted == false) + |> filter_records(:key0, Keyword.get(options, :filter_key0)) + |> filter_records(:key1, Keyword.get(options, :filter_key1)) + |> SortingHelper.apply_sorting(sorting, @default_sorting) + |> SortingHelper.page_with_sorting(paging_options, sorting, @default_sorting) + |> Repo.Mud.all() + end + + @doc """ + Preloads last modification timestamps for the list of raw MUD records. + + Returns a map of block numbers to timestamps. + """ + @spec preload_records_timestamps([Mud.t()]) :: %{non_neg_integer() => DateTime.t()} + def preload_records_timestamps(records) do + block_numbers = records |> Enum.map(&(&1.block_number |> Decimal.to_integer())) |> Enum.uniq() + + Block + |> where([b], b.number in ^block_numbers) + |> select([b], {b.number, b.timestamp}) + |> Repo.all() + |> Enum.into(%{}) + end + + @doc """ + Returns the number of MUD records in the given world table. + """ + @spec world_table_records_count(Hash.Address.t(), Hash.Full.t(), Keyword.t()) :: non_neg_integer() + def world_table_records_count(world, table_id, options \\ []) do + Mud + |> where([r], r.address == ^world and r.table_id == ^table_id and r.is_deleted == false) + |> filter_records(:key0, Keyword.get(options, :filter_key0)) + |> filter_records(:key1, Keyword.get(options, :filter_key1)) + |> Repo.Mud.aggregate(:count) + end + + defp filter_records(query, _key_name, nil), do: query + + defp filter_records(query, :key0, key), do: query |> where([r], r.key0 == ^key) + + defp filter_records(query, :key1, key), do: query |> where([r], r.key1 == ^key) + + @doc """ + Returns the raw MUD record from the given world table by its ID. + """ + @spec world_table_record(Hash.Address.t(), Hash.Full.t(), Data.t()) :: {:ok, Mud.t()} | {:error, :not_found} + def world_table_record(world, table_id, record_id) do + Mud + |> where([r], r.address == ^world and r.table_id == ^table_id and r.key_bytes == ^record_id) + |> Repo.Mud.one() + |> case do + nil -> + {:error, :not_found} + + r -> + {:ok, r} + end + end + + defp decode_schema(nil), do: nil + + defp decode_schema(record) do + schema_record = decode_record(record, @store_tables_table_schema) + + %Schema{ + key_schema: schema_record["keySchema"] |> FieldSchema.from(), + value_schema: schema_record["valueSchema"] |> FieldSchema.from(), + key_names: schema_record["abiEncodedKeyNames"] |> decode_abi_encoded_strings(), + value_names: schema_record["abiEncodedValueNames"] |> decode_abi_encoded_strings() + } + end + + defp decode_abi_encoded_strings("0x" <> hex_encoded) do + hex_encoded + |> Base.decode16!(case: :mixed) + |> TypeDecoder.decode_raw([{:array, :string}]) + |> Enum.at(0) + end + + @doc """ + Decodes a given raw MUD record according to table schema. + + Returns a JSON-like map with decoded field names and values. + """ + @spec decode_record(Mud.t() | nil, Schema.t() | nil) :: map() | nil + def decode_record(nil, _schema), do: nil + + def decode_record(_record, nil), do: nil + + def decode_record(record, schema) do + key = decode_key_tuple(record.key_bytes.bytes, schema.key_names, schema.key_schema) + + value = + if record.is_deleted do + schema.value_names |> Enum.into(%{}, &{&1, nil}) + else + decode_fields( + record.static_data, + record.encoded_lengths, + record.dynamic_data, + schema.value_names, + schema.value_schema + ) + end + + key |> Map.merge(value) + end + + defp decode_key_tuple(key_bytes, fields, layout_schema) do + {_, types} = Schema.decode_types(layout_schema) + + fields + |> Enum.zip(types) + |> Enum.reduce({%{}, key_bytes}, fn {field, type}, {acc, data} -> + type_size = static_type_size(type) + <> = data + + enc = + if type < 64 or type >= 96 do + :binary.part(word, 32 - type_size, type_size) + else + :binary.part(word, 0, type_size) + end + + decoded = decode_type(type, enc) + + {Map.put(acc, field, decoded), rest} + end) + |> elem(0) + end + + defp decode_fields(static_data, encoded_lengths, dynamic_data, fields, layout_schema) do + {static_fields_count, types} = Schema.decode_types(layout_schema) + + {static_types, dynamic_types} = Enum.split(types, static_fields_count) + + {static_fields, dynamic_fields} = Enum.split(fields, static_fields_count) + + res = + static_fields + |> Enum.zip(static_types) + |> Enum.reduce({%{}, (static_data && static_data.bytes) || <<>>}, fn {field, type}, {acc, data} -> + type_size = static_type_size(type) + <> = data + decoded = decode_type(type, enc) + {Map.put(acc, field, decoded), rest} + end) + |> elem(0) + + if encoded_lengths == nil or byte_size(encoded_lengths.bytes) == 0 do + res + else + dynamic_type_lengths = + encoded_lengths.bytes + |> :binary.bin_to_list(0, 25) + |> Enum.chunk_every(5) + |> Enum.reverse() + |> Enum.map(&(&1 |> :binary.list_to_bin() |> :binary.decode_unsigned())) + + [dynamic_fields, dynamic_types, dynamic_type_lengths] + |> Enum.zip() + |> Enum.reduce({res, (dynamic_data && dynamic_data.bytes) || <<>>}, fn {field, type, length}, {acc, data} -> + <> = data + decoded = decode_type(type, enc) + + {Map.put(acc, field, decoded), rest} + end) + |> elem(0) + end + end + + defp static_type_size(type) do + case type do + _ when type < 97 -> rem(type, 32) + 1 + 97 -> 20 + _ -> 0 + end + end + + # credo:disable-for-next-line Credo.Check.Refactor.CyclomaticComplexity + defp decode_type(type, raw) do + case type do + _ when type < 32 -> + raw |> :binary.decode_unsigned() |> Integer.to_string() + + _ when type < 64 -> + size = static_type_size(type) + <> = raw + int |> Integer.to_string() + + _ when type < 96 or type == 196 -> + "0x" <> Base.encode16(raw, case: :lower) + + 96 -> + raw == <<1>> + + 97 -> + Address.checksum(raw) + + _ when type < 196 -> + raw + |> :binary.bin_to_list() + |> Enum.chunk_every(static_type_size(type - 98)) + |> Enum.map(&decode_type(type - 98, :binary.list_to_bin(&1))) + + 197 -> + raw + + _ -> + raise "Unknown type: #{type}" + end + end +end diff --git a/apps/explorer/lib/explorer/chain/mud/schema.ex b/apps/explorer/lib/explorer/chain/mud/schema.ex new file mode 100644 index 000000000000..42c22359f9ef --- /dev/null +++ b/apps/explorer/lib/explorer/chain/mud/schema.ex @@ -0,0 +1,110 @@ +defmodule Explorer.Chain.Mud.Schema do + @moduledoc """ + Represents a MUD framework database record schema. + """ + + defmodule FieldSchema do + @moduledoc """ + Represents a MUD framework database record field schema. Describes number of columns and their types. + """ + + defstruct [:word] + + @typedoc """ + The MUD field schema. + * `word` - The field schema as 32-byte value. + """ + @type t :: %__MODULE__{ + word: <<_::256>> + } + + @doc """ + Decodes field schema type from raw binary or hex-encoded string. + """ + @spec from(binary()) :: t() | :error + def from(<>), do: %__MODULE__{word: bin} + + def from("0x" <> <>) do + with {:ok, bin} <- Base.decode16(hex, case: :mixed) do + %__MODULE__{word: bin} + end + end + + def from(_), do: :error + + @doc """ + Tells the type of the field at index `index` in the field schema. + """ + @spec type_of(t(), non_neg_integer()) :: non_neg_integer() + def type_of(%FieldSchema{word: word}, index), do: :binary.at(word, index + 4) + end + + @enforce_keys [:key_schema, :value_schema, :key_names, :value_names] + defstruct [:key_schema, :value_schema, :key_names, :value_names] + + @typedoc """ + The MUD table schema. Describe column types and names for the given MUD table. + * `key_schema` - The field schema for the key columns. + * `value_schema` - The field schema for the value columns. + * `key_names` - The names of the key columns. + * `value_names` - The names of the value columns. + """ + @type t :: %__MODULE__{ + key_schema: FieldSchema.t(), + value_schema: FieldSchema.t(), + key_names: [String.t()], + value_names: [String.t()] + } + + defimpl Jason.Encoder, for: Explorer.Chain.Mud.Schema do + alias Explorer.Chain.Mud.Schema + alias Jason.Encode + + def encode(data, opts) do + Encode.map( + %{ + "key_types" => data.key_schema |> Schema.decode_type_names(), + "value_types" => data.value_schema |> Schema.decode_type_names(), + "key_names" => data.key_names, + "value_names" => data.value_names + }, + opts + ) + end + end + + @doc """ + Tells the number of static fields in the schema and the list of raw type IDs of all fields in the schema. + """ + @spec decode_types(FieldSchema.t()) :: {non_neg_integer(), [non_neg_integer()]} + def decode_types(layout_schema) do + static_fields_count = :binary.at(layout_schema.word, 2) + dynamic_fields_count = :binary.at(layout_schema.word, 3) + + {static_fields_count, :binary.bin_to_list(layout_schema.word, 4, static_fields_count + dynamic_fields_count)} + end + + @doc """ + Tells the list of decoded type names for all fields in the schema. + """ + @spec decode_type_names(FieldSchema.t()) :: [String.t()] + def decode_type_names(layout_schema) do + {_, types} = decode_types(layout_schema) + types |> Enum.map(&encode_type_name/1) + end + + # credo:disable-for-next-line Credo.Check.Refactor.CyclomaticComplexity + defp encode_type_name(type) do + case type do + _ when type < 32 -> "uint" <> Integer.to_string((type + 1) * 8) + _ when type < 64 -> "int" <> Integer.to_string((type - 31) * 8) + _ when type < 96 -> "bytes" <> Integer.to_string(type - 63) + 96 -> "bool" + 97 -> "address" + _ when type < 196 -> encode_type_name(type - 98) <> "[]" + 196 -> "bytes" + 197 -> "string" + _ -> "unknown_type_" <> Integer.to_string(type) + end + end +end diff --git a/apps/explorer/lib/explorer/chain/mud/table.ex b/apps/explorer/lib/explorer/chain/mud/table.ex new file mode 100644 index 000000000000..3adfc39a6593 --- /dev/null +++ b/apps/explorer/lib/explorer/chain/mud/table.ex @@ -0,0 +1,88 @@ +defmodule Explorer.Chain.Mud.Table do + @moduledoc """ + Represents a decoded MUD framework database table ID. + """ + + alias Explorer.Chain.Hash + + @enforce_keys [:table_id, :table_full_name, :table_type, :table_namespace, :table_name] + @derive Jason.Encoder + defstruct [:table_id, :table_full_name, :table_type, :table_namespace, :table_name] + + @typedoc """ + Decoded MUD table name struct. + * `table_id` - The 32-bytes raw MUD table ID. + * `table_full_name` - The decoded table full name. + * `table_type` - The decoded table type: "offchain" or "onchain". + * `table_namespace` - The decoded table namespace. + * `table_name` - The decoded table name. + """ + @type t :: %__MODULE__{ + table_id: Hash.Full.t(), + table_full_name: String.t(), + table_type: String.t(), + table_namespace: String.t(), + table_name: String.t() + } + + @doc """ + Decodes table type, namespace and name information from raw MUD table ID. + """ + @spec from(Hash.Full.t()) :: t() + def from(%Hash{byte_count: 32, bytes: raw} = table_id) do + <> = raw + + trimmed_namespace = String.trim_trailing(namespace, <<0>>) + trimmed_table_name = String.trim_trailing(table_name, <<0>>) + + table_full_name = + if String.length(trimmed_namespace) > 0 do + prefix <> "." <> trimmed_namespace <> "." <> trimmed_table_name + else + prefix <> "." <> trimmed_table_name + end + + table_type = + case prefix do + "ot" -> "offchain" + "tb" -> "onchain" + _ -> "unknown" + end + + %__MODULE__{ + table_id: table_id, + table_full_name: table_full_name, + table_type: table_type, + table_namespace: trimmed_namespace, + table_name: trimmed_table_name + } + end + + @doc """ + Encodes table full name as a raw MUD table ID. + """ + @spec table_full_name_to_table_id(String.t()) :: {:ok, Hash.Full.t()} | :error + def table_full_name_to_table_id(full_name) do + parts = + case String.split(full_name, ".") do + [prefix, name] -> [prefix, "", name] + [prefix, namespace, name] -> [prefix, namespace, name] + _ -> :error + end + + with [prefix, namespace, name] <- parts, + {:ok, prefix} <- normalize_length(prefix, 2), + {:ok, namespace} <- normalize_length(namespace, 14), + {:ok, name} <- normalize_length(name, 16) do + Hash.Full.cast(prefix <> namespace <> name) + end + end + + defp normalize_length(str, len) do + if String.length(str) <= len do + {:ok, String.pad_trailing(str, len, <<0>>)} + else + :error + end + end +end diff --git a/apps/explorer/lib/explorer/repo.ex b/apps/explorer/lib/explorer/repo.ex index a1d4f35ad76d..845faa2fc2fe 100644 --- a/apps/explorer/lib/explorer/repo.ex +++ b/apps/explorer/lib/explorer/repo.ex @@ -274,4 +274,14 @@ defmodule Explorer.Repo do ConfigHelper.init_repo_module(__MODULE__, opts) end end + + defmodule Mud do + use Ecto.Repo, + otp_app: :explorer, + adapter: Ecto.Adapters.Postgres + + def init(_, opts) do + ConfigHelper.init_repo_module(__MODULE__, opts) + end + end end diff --git a/apps/explorer/lib/explorer/repo/config_helper.ex b/apps/explorer/lib/explorer/repo/config_helper.ex index e1edad2bc230..d1544aa80172 100644 --- a/apps/explorer/lib/explorer/repo/config_helper.ex +++ b/apps/explorer/lib/explorer/repo/config_helper.ex @@ -32,6 +32,8 @@ defmodule Explorer.Repo.ConfigHelper do def get_api_db_url, do: System.get_env("DATABASE_READ_ONLY_API_URL") || System.get_env("DATABASE_URL") + def get_mud_db_url, do: System.get_env("MUD_DATABASE_URL") || System.get_env("DATABASE_URL") + def init_repo_module(module, opts) do db_url = Application.get_env(:explorer, module)[:url] repo_conf = Application.get_env(:explorer, module) diff --git a/apps/explorer/test/test_helper.exs b/apps/explorer/test/test_helper.exs index 90d21435e5d6..c5eda8f3e457 100644 --- a/apps/explorer/test/test_helper.exs +++ b/apps/explorer/test/test_helper.exs @@ -22,6 +22,7 @@ Ecto.Adapters.SQL.Sandbox.mode(Explorer.Repo.Beacon, :auto) Ecto.Adapters.SQL.Sandbox.mode(Explorer.Repo.BridgedTokens, :auto) Ecto.Adapters.SQL.Sandbox.mode(Explorer.Repo.Filecoin, :auto) Ecto.Adapters.SQL.Sandbox.mode(Explorer.Repo.Stability, :auto) +Ecto.Adapters.SQL.Sandbox.mode(Explorer.Repo.Mud, :auto) Mox.defmock(Explorer.ExchangeRates.Source.TestSource, for: Explorer.ExchangeRates.Source) Mox.defmock(Explorer.Market.History.Source.Price.TestSource, for: Explorer.Market.History.Source.Price) diff --git a/config/config_helper.exs b/config/config_helper.exs index bde67152ddbe..46f37b5ee7a5 100644 --- a/config/config_helper.exs +++ b/config/config_helper.exs @@ -24,11 +24,15 @@ defmodule ConfigHelper do _ -> base_repos end - if System.get_env("BRIDGED_TOKENS_ENABLED") do - repos ++ [Explorer.Repo.BridgedTokens] - else - repos - end + ext_repos = + [ + {parse_bool_env_var("BRIDGED_TOKENS_ENABLED"), Explorer.Repo.BridgedTokens}, + {parse_bool_env_var("MUD_INDEXER_ENABLED"), Explorer.Repo.Mud} + ] + |> Enum.filter(&elem(&1, 0)) + |> Enum.map(&elem(&1, 1)) + + repos ++ ext_repos end @spec hackney_options() :: any() diff --git a/config/runtime/dev.exs b/config/runtime/dev.exs index a8f21fdbbbbd..cb1aa7d3cbea 100644 --- a/config/runtime/dev.exs +++ b/config/runtime/dev.exs @@ -163,6 +163,17 @@ config :explorer, Explorer.Repo.Stability, url: System.get_env("DATABASE_URL"), pool_size: 1 +database_mud = if System.get_env("MUD_DATABASE_URL"), do: nil, else: database +hostname_mud = if System.get_env("MUD_DATABASE_URL"), do: nil, else: hostname + +# Configure MUD indexer database +config :explorer, Explorer.Repo.Mud, + database: database_mud, + hostname: hostname_mud, + url: ExplorerConfigHelper.get_mud_db_url(), + pool_size: ConfigHelper.parse_integer_env_var("MUD_POOL_SIZE", 10), + queue_target: queue_target + variant = Variant.get() Code.require_file("#{variant}.exs", "apps/explorer/config/dev") diff --git a/config/runtime/prod.exs b/config/runtime/prod.exs index cabdab7b9429..899a772783f1 100644 --- a/config/runtime/prod.exs +++ b/config/runtime/prod.exs @@ -127,6 +127,13 @@ config :explorer, Explorer.Repo.Stability, pool_size: 1, ssl: ExplorerConfigHelper.ssl_enabled?() +# Configures Mud database +config :explorer, Explorer.Repo.Mud, + url: ExplorerConfigHelper.get_mud_db_url(), + pool_size: ConfigHelper.parse_integer_env_var("MUD_POOL_SIZE", 50), + ssl: ExplorerConfigHelper.ssl_enabled?(), + queue_target: queue_target + variant = Variant.get() Code.require_file("#{variant}.exs", "apps/explorer/config/prod") diff --git a/cspell.json b/cspell.json index 065e1e2d804d..0fa574b82cef 100644 --- a/cspell.json +++ b/cspell.json @@ -391,6 +391,7 @@ "nowrap", "ntoa", "nxdomain", + "offchain", "omni", "onclick", "onconnect", diff --git a/docker-compose/envs/common-blockscout.env b/docker-compose/envs/common-blockscout.env index dab55ec0e61d..f421e2b6fec9 100644 --- a/docker-compose/envs/common-blockscout.env +++ b/docker-compose/envs/common-blockscout.env @@ -370,4 +370,7 @@ TENDERLY_CHAIN_PATH= # BRIDGED_TOKENS_BSC_OMNI_BRIDGE_MEDIATOR= # BRIDGED_TOKENS_POA_OMNI_BRIDGE_MEDIATOR= # BRIDGED_TOKENS_AMB_BRIDGE_MEDIATORS -# BRIDGED_TOKENS_FOREIGN_JSON_RPC \ No newline at end of file +# BRIDGED_TOKENS_FOREIGN_JSON_RPC +# MUD_INDEXER_ENABLED= +# MUD_DATABASE_URL= +# MUD_POOL_SIZE=50 \ No newline at end of file diff --git a/docker/Dockerfile b/docker/Dockerfile index 779985c1ab18..ee2b67d2895c 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -25,6 +25,8 @@ ARG CHAIN_TYPE ENV CHAIN_TYPE=${CHAIN_TYPE} ARG BRIDGED_TOKENS_ENABLED ENV BRIDGED_TOKENS_ENABLED=${BRIDGED_TOKENS_ENABLED} +ARG MUD_INDEXER_ENABLED +ENV MUD_INDEXER_ENABLED=${MUD_INDEXER_ENABLED} # Cache elixir deps ADD mix.exs mix.lock ./ From 3ecb5559c59e5a54dda557f33ef0cf7099ccb2dd Mon Sep 17 00:00:00 2001 From: varasev <33550681+varasev@users.noreply.github.com> Date: Fri, 10 May 2024 14:47:57 +0300 Subject: [PATCH 004/150] chore: Add support of Blast-specific L1 OP withdrawal events (#10049) * Add support of Blast-specific L1 OP withdrawal events * mix format --------- Co-authored-by: POA <33550681+poa@users.noreply.github.com> --- .../fetcher/optimism/withdrawal_event.ex | 17 ++++++++++++++--- 1 file changed, 14 insertions(+), 3 deletions(-) diff --git a/apps/indexer/lib/indexer/fetcher/optimism/withdrawal_event.ex b/apps/indexer/lib/indexer/fetcher/optimism/withdrawal_event.ex index 2da5504c2925..e2c107bbac29 100644 --- a/apps/indexer/lib/indexer/fetcher/optimism/withdrawal_event.ex +++ b/apps/indexer/lib/indexer/fetcher/optimism/withdrawal_event.ex @@ -24,9 +24,15 @@ defmodule Indexer.Fetcher.Optimism.WithdrawalEvent do # 32-byte signature of the event WithdrawalProven(bytes32 indexed withdrawalHash, address indexed from, address indexed to) @withdrawal_proven_event "0x67a6208cfcc0801d50f6cbe764733f4fddf66ac0b04442061a8a8c0cb6b63f62" + # 32-byte signature of the Blast chain event WithdrawalProven(bytes32 indexed withdrawalHash, address indexed from, address indexed to, uint256 requestId) + @withdrawal_proven_event_blast "0x5d5446905f1f582d57d04ced5b1bed0f1a6847bcee57f7dd9d6f2ec12ab9ec2e" + # 32-byte signature of the event WithdrawalFinalized(bytes32 indexed withdrawalHash, bool success) @withdrawal_finalized_event "0xdb5c7652857aa163daadd670e116628fb42e869d8ac4251ef8971d9e5727df1b" + # 32-byte signature of the Blast chain event WithdrawalFinalized(bytes32 indexed withdrawalHash, uint256 indexed hintId, bool success) + @withdrawal_finalized_event_blast "0x36d89e6190aa646d1a48286f8ad05e60a144483f42fd7e0ea08baba79343645b" + def child_spec(start_link_arguments) do spec = %{ id: __MODULE__, @@ -88,7 +94,12 @@ defmodule Indexer.Fetcher.Optimism.WithdrawalEvent do chunk_start, chunk_end, optimism_portal, - [@withdrawal_proven_event, @withdrawal_finalized_event], + [ + @withdrawal_proven_event, + @withdrawal_proven_event_blast, + @withdrawal_finalized_event, + @withdrawal_finalized_event_blast + ], json_rpc_named_arguments, Helper.infinite_retries_number() ) @@ -179,7 +190,7 @@ defmodule Indexer.Fetcher.Optimism.WithdrawalEvent do tx_hashes = events |> Enum.reduce([], fn event, acc -> - if Enum.at(event["topics"], 0) == @withdrawal_proven_event do + if Enum.member?([@withdrawal_proven_event, @withdrawal_proven_event_blast], Enum.at(event["topics"], 0)) do [event["transactionHash"] | acc] else acc @@ -200,7 +211,7 @@ defmodule Indexer.Fetcher.Optimism.WithdrawalEvent do tx_hash = event["transactionHash"] {l1_event_type, game_index} = - if Enum.at(event["topics"], 0) == @withdrawal_proven_event do + if Enum.member?([@withdrawal_proven_event, @withdrawal_proven_event_blast], Enum.at(event["topics"], 0)) do game_index = input_by_hash |> Map.get(tx_hash) From 2dd96493bd30f328cb770dc9d5c2d20e730d1d44 Mon Sep 17 00:00:00 2001 From: nikitosing <32202610+nikitosing@users.noreply.github.com> Date: Mon, 13 May 2024 12:16:51 +0300 Subject: [PATCH 005/150] feat: Improve retry NFT fetcher (#10027) * feat: Improve retry NFT fetcher * Process review comments * Fix spelling --- apps/explorer/lib/explorer/chain.ex | 33 +++- .../lib/explorer/chain/token/instance.ex | 8 +- ...08_add_nft_instance_fetcher_aux_fields.exs | 10 ++ apps/indexer/lib/indexer/buffered_task.ex | 9 +- .../indexer/fetcher/token_instance/helper.ex | 13 +- .../indexer/fetcher/token_instance/retry.ex | 28 ++-- .../fetcher/token_instance/helper_test.exs | 141 +++++++++++++++++- config/runtime.exs | 4 +- cspell.json | 1 + 9 files changed, 226 insertions(+), 21 deletions(-) create mode 100644 apps/explorer/priv/repo/migrations/20240503091708_add_nft_instance_fetcher_aux_fields.exs diff --git a/apps/explorer/lib/explorer/chain.ex b/apps/explorer/lib/explorer/chain.ex index 71a3f77425bb..4fb2b4cf05c0 100644 --- a/apps/explorer/lib/explorer/chain.ex +++ b/apps/explorer/lib/explorer/chain.ex @@ -3602,12 +3602,16 @@ defmodule Explorer.Chain do Instance |> where([instance], not is_nil(instance.error)) + |> where([instance], is_nil(instance.refetch_after) or instance.refetch_after > ^DateTime.utc_now()) |> select([instance], %{ contract_address_hash: instance.token_contract_address_hash, - token_id: instance.token_id, - updated_at: instance.updated_at + token_id: instance.token_id }) - |> order_by([instance], desc: instance.error in ^high_priority, asc: instance.error in ^negative_priority) + |> order_by([instance], + asc: instance.refetch_after, + desc: instance.error in ^high_priority, + asc: instance.error in ^negative_priority + ) |> add_fetcher_limit(limited?) |> Repo.stream_reduce(initial, reducer) end @@ -3803,6 +3807,12 @@ defmodule Explorer.Chain do end defp token_instance_metadata_on_conflict do + config = Application.get_env(:indexer, Indexer.Fetcher.TokenInstance.Retry) + + coef = config[:exp_timeout_coeff] + base = config[:exp_timeout_base] + max_refetch_interval = config[:max_refetch_interval] + from( token_instance in Instance, update: [ @@ -3813,7 +3823,22 @@ defmodule Explorer.Chain do owner_updated_at_log_index: token_instance.owner_updated_at_log_index, owner_address_hash: token_instance.owner_address_hash, inserted_at: fragment("LEAST(?, EXCLUDED.inserted_at)", token_instance.inserted_at), - updated_at: fragment("GREATEST(?, EXCLUDED.updated_at)", token_instance.updated_at) + updated_at: fragment("GREATEST(?, EXCLUDED.updated_at)", token_instance.updated_at), + retries_count: token_instance.retries_count + 1, + refetch_after: + fragment( + """ + CASE WHEN EXCLUDED.metadata IS NULL THEN + NOW() AT TIME ZONE 'UTC' + LEAST(interval '1 seconds' * (? * ? ^ (? + 1)), interval '1 milliseconds' * ?) + ELSE + NULL + END + """, + ^coef, + ^base, + token_instance.retries_count, + ^max_refetch_interval + ) ] ], where: is_nil(token_instance.metadata) diff --git a/apps/explorer/lib/explorer/chain/token/instance.ex b/apps/explorer/lib/explorer/chain/token/instance.ex index 9854e63c02a2..52f26d3c2c47 100644 --- a/apps/explorer/lib/explorer/chain/token/instance.ex +++ b/apps/explorer/lib/explorer/chain/token/instance.ex @@ -16,6 +16,8 @@ defmodule Explorer.Chain.Token.Instance do * `token_contract_address_hash` - Address hash foreign key * `metadata` - Token instance metadata * `error` - error fetching token instance + * `refetch_after` - when to refetch the token instance + * `retries_count` - number of times the token instance has been retried """ @primary_key false typed_schema "token_instances" do @@ -26,6 +28,8 @@ defmodule Explorer.Chain.Token.Instance do field(:owner_updated_at_log_index, :integer) field(:current_token_balance, :any, virtual: true) field(:is_unique, :boolean, virtual: true) + field(:refetch_after, :utc_datetime_usec) + field(:retries_count, :integer) belongs_to(:owner, Address, foreign_key: :owner_address_hash, references: :hash, type: Hash.Address) @@ -51,7 +55,9 @@ defmodule Explorer.Chain.Token.Instance do :error, :owner_address_hash, :owner_updated_at_block, - :owner_updated_at_log_index + :owner_updated_at_log_index, + :refetch_after, + :retries_count ]) |> validate_required([:token_id, :token_contract_address_hash]) |> foreign_key_constraint(:token_contract_address_hash) diff --git a/apps/explorer/priv/repo/migrations/20240503091708_add_nft_instance_fetcher_aux_fields.exs b/apps/explorer/priv/repo/migrations/20240503091708_add_nft_instance_fetcher_aux_fields.exs new file mode 100644 index 000000000000..29195625806f --- /dev/null +++ b/apps/explorer/priv/repo/migrations/20240503091708_add_nft_instance_fetcher_aux_fields.exs @@ -0,0 +1,10 @@ +defmodule Explorer.Repo.Migrations.AddNftInstanceFetcherAuxFields do + use Ecto.Migration + + def change do + alter table(:token_instances) do + add(:refetch_after, :utc_datetime_usec, null: true) + add(:retries_count, :smallint, default: 0, null: false) + end + end +end diff --git a/apps/indexer/lib/indexer/buffered_task.ex b/apps/indexer/lib/indexer/buffered_task.ex index 3cdbc37f8bbc..e728e802c621 100644 --- a/apps/indexer/lib/indexer/buffered_task.ex +++ b/apps/indexer/lib/indexer/buffered_task.ex @@ -216,7 +216,6 @@ defmodule Indexer.BufferedTask do def start_link({module, base_init_opts}, genserver_opts \\ []) do default_opts = Application.get_all_env(:indexer) init_opts = Keyword.merge(default_opts, base_init_opts) - GenServer.start_link(__MODULE__, {module, init_opts}, genserver_opts) end @@ -297,6 +296,14 @@ defmodule Indexer.BufferedTask do {:reply, %{buffer: count, tasks: Enum.count(task_ref_to_batch)}, state} end + def handle_call( + :state, + _from, + state + ) do + {:reply, state, state} + end + def handle_call({:push_back, entries}, _from, state) when is_list(entries) do new_state = state diff --git a/apps/indexer/lib/indexer/fetcher/token_instance/helper.ex b/apps/indexer/lib/indexer/fetcher/token_instance/helper.ex index 898ac561a1a7..83c405af2726 100644 --- a/apps/indexer/lib/indexer/fetcher/token_instance/helper.ex +++ b/apps/indexer/lib/indexer/fetcher/token_instance/helper.ex @@ -254,7 +254,8 @@ defmodule Indexer.Fetcher.TokenInstance.Helper do token_id: token_id, token_contract_address_hash: token_contract_address_hash, metadata: metadata, - error: nil + error: nil, + refetch_after: nil } end @@ -265,10 +266,18 @@ defmodule Indexer.Fetcher.TokenInstance.Helper do do: token_instance_map_with_error(token_id, token_contract_address_hash, reason) defp token_instance_map_with_error(token_id, token_contract_address_hash, error) do + config = Application.get_env(:indexer, Indexer.Fetcher.TokenInstance.Retry) + + coef = config[:exp_timeout_coeff] + max_refetch_interval = config[:max_refetch_interval] + + timeout = min(coef * 1000, max_refetch_interval) + %{ token_id: token_id, token_contract_address_hash: token_contract_address_hash, - error: error + error: error, + refetch_after: DateTime.add(DateTime.utc_now(), timeout, :millisecond) } end diff --git a/apps/indexer/lib/indexer/fetcher/token_instance/retry.ex b/apps/indexer/lib/indexer/fetcher/token_instance/retry.ex index a09bce459698..a279a143d660 100644 --- a/apps/indexer/lib/indexer/fetcher/token_instance/retry.ex +++ b/apps/indexer/lib/indexer/fetcher/token_instance/retry.ex @@ -15,6 +15,8 @@ defmodule Indexer.Fetcher.TokenInstance.Retry do @default_max_batch_size 10 @default_max_concurrency 10 + @max_queue_size 5000 + @busy_waiting_timeout 500 @doc false def child_spec([init_options, gen_server_options]) do @@ -32,31 +34,35 @@ defmodule Indexer.Fetcher.TokenInstance.Retry do Chain.stream_token_instances_with_error( initial_acc, fn data, acc -> - reducer.(data, acc) + reduce_if_queue_is_not_full(data, acc, reducer) end ) acc end + defp reduce_if_queue_is_not_full(data, acc, reducer) do + bound_queue = GenServer.call(__MODULE__, :state).bound_queue + + if bound_queue.size >= @max_queue_size or (bound_queue.maximum_size && bound_queue.size >= bound_queue.maximum_size) do + :timer.sleep(@busy_waiting_timeout) + + reduce_if_queue_is_not_full(data, acc, reducer) + else + reducer.(data, acc) + end + end + @impl BufferedTask def run(token_instances, _json_rpc_named_arguments) when is_list(token_instances) do - refetch_interval = Application.get_env(:indexer, __MODULE__)[:refetch_interval] - - token_instances - |> Enum.filter(fn %{contract_address_hash: _hash, token_id: _token_id, updated_at: updated_at} -> - updated_at - |> DateTime.add(refetch_interval, :millisecond) - |> DateTime.compare(DateTime.utc_now()) != :gt - end) - |> batch_fetch_instances() + batch_fetch_instances(token_instances) :ok end defp defaults do [ - flush_interval: :timer.minutes(10), + flush_interval: :timer.seconds(10), max_concurrency: Application.get_env(:indexer, __MODULE__)[:concurrency] || @default_max_concurrency, max_batch_size: Application.get_env(:indexer, __MODULE__)[:batch_size] || @default_max_batch_size, task_supervisor: __MODULE__.TaskSupervisor diff --git a/apps/indexer/test/indexer/fetcher/token_instance/helper_test.exs b/apps/indexer/test/indexer/fetcher/token_instance/helper_test.exs index 0dfae419fb08..2b250a6d5906 100644 --- a/apps/indexer/test/indexer/fetcher/token_instance/helper_test.exs +++ b/apps/indexer/test/indexer/fetcher/token_instance/helper_test.exs @@ -4,7 +4,6 @@ defmodule Indexer.Fetcher.TokenInstance.HelperTest do alias Explorer.Chain.Token.Instance alias Explorer.Repo - alias EthereumJSONRPC.Encoder alias Indexer.Fetcher.TokenInstance.Helper alias Plug.Conn @@ -381,4 +380,144 @@ defmodule Indexer.Fetcher.TokenInstance.HelperTest do } = 777 |> Instance.token_instance_query("0x5caebd3b32e210e85ce3e9d51638b9c445481567") |> Repo.one() end end + + describe "check retries count and refetch after" do + test "retries count 0 for new instance" do + config = Application.get_env(:indexer, Indexer.Fetcher.TokenInstance.Retry) + + coef = config[:exp_timeout_coeff] + base = config[:exp_timeout_base] + max_refetch_interval = config[:max_refetch_interval] + + erc_721_token = insert(:token, type: "ERC-721") + + token_instance = build(:token_instance, token_contract_address_hash: erc_721_token.contract_address_hash) + + Helper.batch_fetch_instances([ + %{contract_address_hash: token_instance.token_contract_address_hash, token_id: token_instance.token_id} + ]) + + now = DateTime.utc_now() + timeout = min(coef * base ** 0 * 1000, max_refetch_interval) + refetch_after = DateTime.add(now, timeout, :millisecond) + + [instance] = Repo.all(Instance) + + assert instance.retries_count == 0 + assert DateTime.diff(refetch_after, instance.refetch_after) < 1 + assert !is_nil(instance.error) + end + + test "proper updates retries count and refetch after on retry" do + config = Application.get_env(:indexer, Indexer.Fetcher.TokenInstance.Retry) + + coef = config[:exp_timeout_coeff] + base = config[:exp_timeout_base] + max_refetch_interval = config[:max_refetch_interval] + + erc_721_token = insert(:token, type: "ERC-721") + + token_instance = + insert(:token_instance, + token_contract_address_hash: erc_721_token.contract_address_hash, + error: "error", + metadata: nil + ) + + Helper.batch_fetch_instances([ + %{contract_address_hash: token_instance.token_contract_address_hash, token_id: token_instance.token_id} + ]) + + now = DateTime.utc_now() + timeout = min(coef * base ** 1 * 1000, max_refetch_interval) + refetch_after = DateTime.add(now, timeout, :millisecond) + + [instance] = Repo.all(Instance) + + assert instance.retries_count == 1 + assert DateTime.diff(refetch_after, instance.refetch_after) < 1 + assert !is_nil(instance.error) + end + + test "success insert after retry" do + config = Application.get_env(:indexer, Indexer.Fetcher.TokenInstance.Retry) + + coef = config[:exp_timeout_coeff] + base = config[:exp_timeout_base] + max_refetch_interval = config[:max_refetch_interval] + + erc_721_token = insert(:token, type: "ERC-721") + + token_instance = build(:token_instance, token_contract_address_hash: erc_721_token.contract_address_hash) + + Helper.batch_fetch_instances([ + %{contract_address_hash: token_instance.token_contract_address_hash, token_id: token_instance.token_id} + ]) + + Helper.batch_fetch_instances([ + %{contract_address_hash: token_instance.token_contract_address_hash, token_id: token_instance.token_id} + ]) + + now = DateTime.utc_now() + timeout = min(coef * base ** 1 * 1000, max_refetch_interval) + refetch_after = DateTime.add(now, timeout, :millisecond) + + [instance] = Repo.all(Instance) + + assert instance.retries_count == 1 + assert DateTime.diff(refetch_after, instance.refetch_after) < 1 + assert !is_nil(instance.error) + + token_address = to_string(erc_721_token.contract_address_hash) + + data = + "0xc87b56dd" <> + (ABI.TypeEncoder.encode([token_instance.token_id], [{:uint, 256}]) |> Base.encode16(case: :lower)) + + EthereumJSONRPC.Mox + |> expect(:json_rpc, fn [ + %{ + id: 0, + jsonrpc: "2.0", + method: "eth_call", + params: [ + %{ + data: ^data, + to: ^token_address + }, + "latest" + ] + } + ], + _options -> + {:ok, + [ + %{ + id: 0, + jsonrpc: "2.0", + result: + "0x00000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000115646174613a6170706c69636174696f6e2f6a736f6e3b757466382c7b226e616d65223a20224f4d4e493430342023333030303637303030303030303030303030222c226465736372697074696f6e223a225468652066726f6e74696572206f66207065726d697373696f6e6c657373206173736574732e222c2265787465726e616c5f75726c223a2268747470733a2f2f747769747465722e636f6d2f6f6d6e69636861696e343034222c22696d616765223a2268747470733a2f2f697066732e696f2f697066732f516d55364447586369535a5854483166554b6b45716a3734503846655850524b7853546a675273564b55516139352f626173652f3330303036373030303030303030303030302e4a5047227d0000000000000000000000" + } + ]} + end) + + Helper.batch_fetch_instances([ + %{contract_address_hash: token_instance.token_contract_address_hash, token_id: token_instance.token_id} + ]) + + [instance] = Repo.all(Instance) + + assert instance.retries_count == 2 + assert is_nil(instance.refetch_after) + assert is_nil(instance.error) + + assert instance.metadata == %{ + "name" => "OMNI404 #300067000000000000", + "description" => "The frontier of permissionless assets.", + "external_url" => "https://twitter.com/omnichain404", + "image" => + "https://ipfs.io/ipfs/QmU6DGXciSZXTH1fUKkEqj74P8FeXPRKxSTjgRsVKUQa95/base/300067000000000000.JPG" + } + end + end end diff --git a/config/runtime.exs b/config/runtime.exs index 7deb0e20a041..6aacf275b007 100644 --- a/config/runtime.exs +++ b/config/runtime.exs @@ -727,7 +727,9 @@ config :indexer, Indexer.Fetcher.TokenInstance.Helper, config :indexer, Indexer.Fetcher.TokenInstance.Retry, concurrency: ConfigHelper.parse_integer_env_var("INDEXER_TOKEN_INSTANCE_RETRY_CONCURRENCY", 10), batch_size: ConfigHelper.parse_integer_env_var("INDEXER_TOKEN_INSTANCE_RETRY_BATCH_SIZE", 10), - refetch_interval: ConfigHelper.parse_time_env_var("INDEXER_TOKEN_INSTANCE_RETRY_REFETCH_INTERVAL", "24h") + max_refetch_interval: ConfigHelper.parse_time_env_var("INDEXER_TOKEN_INSTANCE_RETRY_MAX_REFETCH_INTERVAL", "168h"), + exp_timeout_base: ConfigHelper.parse_integer_env_var("INDEXER_TOKEN_INSTANCE_RETRY_EXPONENTIAL_TIMEOUT_BASE", 2), + exp_timeout_coeff: ConfigHelper.parse_integer_env_var("INDEXER_TOKEN_INSTANCE_RETRY_EXPONENTIAL_TIMEOUT_COEFF", 100) config :indexer, Indexer.Fetcher.TokenInstance.Realtime, concurrency: ConfigHelper.parse_integer_env_var("INDEXER_TOKEN_INSTANCE_REALTIME_CONCURRENCY", 10), diff --git a/cspell.json b/cspell.json index 0fa574b82cef..14815e397628 100644 --- a/cspell.json +++ b/cspell.json @@ -172,6 +172,7 @@ "clickover", "codeformat", "coef", + "coeff", "coinprice", "coinsupply", "coinzilla", From 6fa60b4e4b0c0c4c1493f33027c0492e07cb8284 Mon Sep 17 00:00:00 2001 From: Fedor Ivanov Date: Mon, 13 May 2024 18:21:50 +0300 Subject: [PATCH 006/150] chore: remove `has_methods` from `/addresses` (#10051) * chore: remove `has_methods_` `read`/`read_proxy`/`write`/`write_proxy` from `/addresses` * chore: move proxy contract test to `smart_contract_controller_test.exs` --- .../views/api/v2/address_view.ex | 10 ------ .../api/v2/address_controller_test.exs | 6 ---- .../api/v2/smart_contract_controller_test.exs | 27 ++++++++++++++++ .../views/api/v2/address_view_test.exs | 31 ------------------- 4 files changed, 27 insertions(+), 47 deletions(-) delete mode 100644 apps/block_scout_web/test/block_scout_web/views/api/v2/address_view_test.exs diff --git a/apps/block_scout_web/lib/block_scout_web/views/api/v2/address_view.ex b/apps/block_scout_web/lib/block_scout_web/views/api/v2/address_view.ex index 6482b4733a63..32a1296499bd 100644 --- a/apps/block_scout_web/lib/block_scout_web/views/api/v2/address_view.ex +++ b/apps/block_scout_web/lib/block_scout_web/views/api/v2/address_view.ex @@ -122,9 +122,6 @@ defmodule BlockScoutWeb.API.V2.AddressView do creation_tx = creator_hash && AddressView.transaction_hash(address) token = address.token && TokenView.render("token.json", %{token: address.token}) - write_custom_abi? = AddressView.has_address_custom_abi_with_write_functions?(conn, address.hash) - read_custom_abi? = AddressView.has_address_custom_abi_with_read_functions?(conn, address.hash) - # todo: added for backward compatibility, remove when frontend unbound from these props {implementation_address, implementation_name} = single_implementation(implementation_addresses, implementation_names) @@ -141,13 +138,6 @@ defmodule BlockScoutWeb.API.V2.AddressView do "implementation_address" => implementation_address, "implementation_addresses" => implementation_addresses, "block_number_balance_updated_at" => address.fetched_coin_balance_block_number, - "has_custom_methods_read" => read_custom_abi?, - "has_custom_methods_write" => write_custom_abi?, - "has_methods_read" => AddressView.smart_contract_with_read_only_functions?(address), - "has_methods_write" => AddressView.smart_contract_with_write_functions?(address), - "has_methods_read_proxy" => is_proxy, - "has_methods_write_proxy" => - AddressView.smart_contract_with_write_functions?(address_with_smart_contract) && is_proxy, "has_decompiled_code" => AddressView.has_decompiled_code?(address), "has_validated_blocks" => Counters.check_if_validated_blocks_at_address(address.hash, @api_true), "has_logs" => Counters.check_if_logs_at_address(address.hash, @api_true), diff --git a/apps/block_scout_web/test/block_scout_web/controllers/api/v2/address_controller_test.exs b/apps/block_scout_web/test/block_scout_web/controllers/api/v2/address_controller_test.exs index cd79b0e494df..1d41ca8d6dae 100644 --- a/apps/block_scout_web/test/block_scout_web/controllers/api/v2/address_controller_test.exs +++ b/apps/block_scout_web/test/block_scout_web/controllers/api/v2/address_controller_test.exs @@ -78,12 +78,6 @@ defmodule BlockScoutWeb.API.V2.AddressControllerTest do "implementation_address" => nil, "implementation_addresses" => [], "block_number_balance_updated_at" => nil, - "has_custom_methods_read" => false, - "has_custom_methods_write" => false, - "has_methods_read" => false, - "has_methods_write" => false, - "has_methods_read_proxy" => false, - "has_methods_write_proxy" => false, "has_decompiled_code" => false, "has_validated_blocks" => false, "has_logs" => false, diff --git a/apps/block_scout_web/test/block_scout_web/controllers/api/v2/smart_contract_controller_test.exs b/apps/block_scout_web/test/block_scout_web/controllers/api/v2/smart_contract_controller_test.exs index fe2caa28bf64..9880f312a13d 100644 --- a/apps/block_scout_web/test/block_scout_web/controllers/api/v2/smart_contract_controller_test.exs +++ b/apps/block_scout_web/test/block_scout_web/controllers/api/v2/smart_contract_controller_test.exs @@ -67,6 +67,33 @@ defmodule BlockScoutWeb.API.V2.SmartContractControllerTest do } end + test "get an eip1967 proxy contract", %{conn: conn} do + implementation_address = insert(:contract_address) + proxy_address = insert(:contract_address) + + _proxy_smart_contract = + insert(:smart_contract, + address_hash: proxy_address.hash, + contract_code_md5: "123" + ) + + implementation = + insert(:proxy_implementation, + proxy_address_hash: proxy_address.hash, + proxy_type: "eip1967", + address_hashes: [implementation_address.hash], + names: [nil] + ) + + assert implementation.proxy_type == :eip1967 + + request = get(conn, "/api/v2/smart-contracts/#{Address.checksum(proxy_address.hash)}") + response = json_response(request, 200) + + assert response["has_methods_read_proxy"] == true + assert response["has_methods_write_proxy"] == true + end + test "get smart-contract", %{conn: conn} do lib_address = build(:address) lib_address_string = to_string(lib_address) diff --git a/apps/block_scout_web/test/block_scout_web/views/api/v2/address_view_test.exs b/apps/block_scout_web/test/block_scout_web/views/api/v2/address_view_test.exs deleted file mode 100644 index 58cf82acec60..000000000000 --- a/apps/block_scout_web/test/block_scout_web/views/api/v2/address_view_test.exs +++ /dev/null @@ -1,31 +0,0 @@ -defmodule BlockScoutWeb.API.V2.AddressViewTest do - use BlockScoutWeb.ConnCase, async: true - - alias BlockScoutWeb.API.V2.AddressView - alias Explorer.{Repo, TestHelper} - - test "for a proxy contract has_methods_read_proxy is true" do - implementation_address = insert(:contract_address) - proxy_address = insert(:contract_address) |> Repo.preload([:token]) - - _proxy_smart_contract = - insert(:smart_contract, - address_hash: proxy_address.hash, - contract_code_md5: "123" - ) - - implementation = - insert(:proxy_implementation, - proxy_address_hash: proxy_address.hash, - proxy_type: "eip1967", - address_hashes: [implementation_address.hash], - names: [nil] - ) - - assert implementation.proxy_type == :eip1967 - - TestHelper.get_eip1967_implementation_zero_addresses() - - assert AddressView.prepare_address(proxy_address)["has_methods_read_proxy"] == true - end -end From 6682065d3c37a9556fe098a72c84b3778241f567 Mon Sep 17 00:00:00 2001 From: Fedor Ivanov Date: Mon, 13 May 2024 18:23:45 +0300 Subject: [PATCH 007/150] refactor: test database config (#9662) * refactor: test database config * feat: configure test db with special envs * fix: prefer `url` from config over raw env * refactor: remove redundant fallback * refactor: use `ConfigHelper.init_repo_module/2` to configure `ZkSync` repo --- apps/explorer/config/test.exs | 32 +++++++++++++------ apps/explorer/lib/explorer/repo.ex | 32 ++----------------- .../lib/explorer/repo/config_helper.ex | 2 +- docker-compose/envs/common-blockscout.env | 4 ++- 4 files changed, 28 insertions(+), 42 deletions(-) diff --git a/apps/explorer/config/test.exs b/apps/explorer/config/test.exs index 64d37ba266e7..b3670fe1e927 100644 --- a/apps/explorer/config/test.exs +++ b/apps/explorer/config/test.exs @@ -3,10 +3,15 @@ import Config # Lower hashing rounds for faster tests config :bcrypt_elixir, log_rounds: 4 +database_url = System.get_env("TEST_DATABASE_URL") +database = if database_url, do: nil, else: "explorer_test" +hostname = if database_url, do: nil, else: "localhost" + # Configure your database config :explorer, Explorer.Repo, - database: "explorer_test", - hostname: "localhost", + database: database, + hostname: hostname, + url: database_url, pool: Ecto.Adapters.SQL.Sandbox, # Default of `5_000` was too low for `BlockFetcher` test ownership_timeout: :timer.minutes(7), @@ -17,8 +22,9 @@ config :explorer, Explorer.Repo, # Configure API database config :explorer, Explorer.Repo.Replica1, - database: "explorer_test", - hostname: "localhost", + database: database, + hostname: hostname, + url: database_url, pool: Ecto.Adapters.SQL.Sandbox, # Default of `5_000` was too low for `BlockFetcher` test ownership_timeout: :timer.minutes(1), @@ -32,10 +38,14 @@ config :explorer, :proxy, fallback_cached_implementation_data_ttl: :timer.seconds(20), implementation_data_fetching_timeout: :timer.seconds(20) +account_database_url = System.get_env("TEST_DATABASE_READ_ONLY_API_URL") || database_url +account_database = if account_database_url, do: nil, else: "explorer_test_account" + # Configure API database config :explorer, Explorer.Repo.Account, - database: "explorer_test_account", - hostname: "localhost", + database: account_database, + hostname: hostname, + url: account_database_url, pool: Ecto.Adapters.SQL.Sandbox, # Default of `5_000` was too low for `BlockFetcher` test ownership_timeout: :timer.minutes(1), @@ -58,8 +68,9 @@ for repo <- [ Explorer.Repo.Mud ] do config :explorer, repo, - database: "explorer_test", - hostname: "localhost", + database: database, + hostname: hostname, + url: database_url, pool: Ecto.Adapters.SQL.Sandbox, # Default of `5_000` was too low for `BlockFetcher` test ownership_timeout: :timer.minutes(1), @@ -70,8 +81,9 @@ for repo <- [ end config :explorer, Explorer.Repo.PolygonZkevm, - database: "explorer_test", - hostname: "localhost", + database: database, + hostname: hostname, + url: database_url, pool: Ecto.Adapters.SQL.Sandbox, # Default of `5_000` was too low for `BlockFetcher` test ownership_timeout: :timer.minutes(1), diff --git a/apps/explorer/lib/explorer/repo.ex b/apps/explorer/lib/explorer/repo.ex index 845faa2fc2fe..38b19e86392b 100644 --- a/apps/explorer/lib/explorer/repo.ex +++ b/apps/explorer/lib/explorer/repo.ex @@ -12,21 +12,7 @@ defmodule Explorer.Repo do DATABASE_URL environment variable. """ def init(_, opts) do - db_url = System.get_env("DATABASE_URL") - repo_conf = Application.get_env(:explorer, Explorer.Repo) - - merged = - %{url: db_url} - |> ConfigHelper.get_db_config() - |> Keyword.merge(repo_conf, fn - _key, v1, nil -> v1 - _key, nil, v2 -> v2 - _, _, v2 -> v2 - end) - - Application.put_env(:explorer, Explorer.Repo, merged) - - {:ok, Keyword.put(opts, :url, db_url)} + ConfigHelper.init_repo_module(__MODULE__, opts) end def logged_transaction(fun_or_multi, opts \\ []) do @@ -187,21 +173,7 @@ defmodule Explorer.Repo do adapter: Ecto.Adapters.Postgres def init(_, opts) do - db_url = Application.get_env(:explorer, __MODULE__)[:url] - repo_conf = Application.get_env(:explorer, __MODULE__) - - merged = - %{url: db_url} - |> ConfigHelper.get_db_config() - |> Keyword.merge(repo_conf, fn - _key, v1, nil -> v1 - _key, nil, v2 -> v2 - _, _, v2 -> v2 - end) - - Application.put_env(:explorer, __MODULE__, merged) - - {:ok, Keyword.put(opts, :url, db_url)} + ConfigHelper.init_repo_module(__MODULE__, opts) end end diff --git a/apps/explorer/lib/explorer/repo/config_helper.ex b/apps/explorer/lib/explorer/repo/config_helper.ex index d1544aa80172..26e53f31e7cf 100644 --- a/apps/explorer/lib/explorer/repo/config_helper.ex +++ b/apps/explorer/lib/explorer/repo/config_helper.ex @@ -17,7 +17,7 @@ defmodule Explorer.Repo.ConfigHelper do ] def get_db_config(opts) do - url_encoded = opts[:url] || System.get_env("DATABASE_URL") + url_encoded = opts[:url] url = url_encoded && URI.decode(url_encoded) env_function = opts[:env_func] || (&System.get_env/1) diff --git a/docker-compose/envs/common-blockscout.env b/docker-compose/envs/common-blockscout.env index f421e2b6fec9..0f3c2a579143 100644 --- a/docker-compose/envs/common-blockscout.env +++ b/docker-compose/envs/common-blockscout.env @@ -3,6 +3,8 @@ ETHEREUM_JSONRPC_HTTP_URL=http://host.docker.internal:8545/ # ETHEREUM_JSONRPC_FALLBACK_HTTP_URL= DATABASE_URL=postgresql://blockscout:ceWb1MeLBEeOIfk65gU8EjF8@db:5432/blockscout # DATABASE_QUEUE_TARGET +# TEST_DATABASE_URL= +# TEST_DATABASE_READ_ONLY_API_URL= ETHEREUM_JSONRPC_TRACE_URL=http://host.docker.internal:8545/ # ETHEREUM_JSONRPC_FALLBACK_TRACE_URL= # ETHEREUM_JSONRPC_FALLBACK_ETH_CALL_URL= @@ -373,4 +375,4 @@ TENDERLY_CHAIN_PATH= # BRIDGED_TOKENS_FOREIGN_JSON_RPC # MUD_INDEXER_ENABLED= # MUD_DATABASE_URL= -# MUD_POOL_SIZE=50 \ No newline at end of file +# MUD_POOL_SIZE=50 From d066b0e7c8b7f5eae44537ba232b1d29a0927dd3 Mon Sep 17 00:00:00 2001 From: nikitosing <32202610+nikitosing@users.noreply.github.com> Date: Mon, 13 May 2024 19:03:00 +0300 Subject: [PATCH 008/150] fix: Fix Unknown UID bug at smart-contract verification (#9986) * fix: Fix Unknown UID bug * Fix credo * Fix credo * Refactoring * Fix aliases --- .../smart_contract/verification_status.ex | 20 +++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/apps/explorer/lib/explorer/chain/smart_contract/verification_status.ex b/apps/explorer/lib/explorer/chain/smart_contract/verification_status.ex index 83037584c1a4..48fcc772da77 100644 --- a/apps/explorer/lib/explorer/chain/smart_contract/verification_status.ex +++ b/apps/explorer/lib/explorer/chain/smart_contract/verification_status.ex @@ -9,6 +9,8 @@ defmodule Explorer.Chain.SmartContract.VerificationStatus do alias Explorer.Chain.Hash alias Explorer.{Chain, Repo} + alias Explorer.SmartContract.Solidity.PublisherWorker, as: SolidityPublisherWorker + alias Que.Persistence, as: QuePersistence @typedoc """ * `address_hash` - address of the contract which was tried to verify @@ -90,6 +92,7 @@ defmodule Explorer.Chain.SmartContract.VerificationStatus do |> Repo.get_by(uid: valid_uid) |> (&if(is_nil(&1), do: 3, else: Map.get(&1, :status))).() |> decode_status() + |> mb_find_uid_in_queue(uid) _ -> :unknown_uid @@ -123,4 +126,21 @@ defmodule Explorer.Chain.SmartContract.VerificationStatus do end def validate_uid(_), do: :error + + defp mb_find_uid_in_queue(:unknown_uid, uid) do + SolidityPublisherWorker + |> QuePersistence.all() + |> Enum.any?(fn + %Que.Job{arguments: {"flattened_api", _, _, ^uid}} -> + :pending + + %Que.Job{arguments: {"json_api", _, _, ^uid}} -> + :pending + + _ -> + :unknown_uid + end) + end + + defp mb_find_uid_in_queue(other_status, _), do: other_status end From 06c2dde82e4b2595c9402322708861a566177ecf Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 13 May 2024 22:41:17 +0300 Subject: [PATCH 009/150] chore: Bump ex_doc from 0.32.1 to 0.32.2 (#10061) Bumps [ex_doc](https://github.com/elixir-lang/ex_doc) from 0.32.1 to 0.32.2. - [Release notes](https://github.com/elixir-lang/ex_doc/releases) - [Changelog](https://github.com/elixir-lang/ex_doc/blob/main/CHANGELOG.md) - [Commits](https://github.com/elixir-lang/ex_doc/compare/v0.32.1...v0.32.2) --- updated-dependencies: - dependency-name: ex_doc dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- mix.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/mix.lock b/mix.lock index 5eb48d248642..1a468ace2732 100644 --- a/mix.lock +++ b/mix.lock @@ -47,7 +47,7 @@ "ex_cldr_lists": {:hex, :ex_cldr_lists, "2.11.0", "1d39e75f0e493ccc95adfc85c55b4ca34f0771626350ce326d9ab8813d91444e", [:mix], [{:ex_cldr_numbers, "~> 2.25", [hex: :ex_cldr_numbers, repo: "hexpm", optional: false]}, {:ex_doc, "~> 0.18", [hex: :ex_doc, repo: "hexpm", optional: true]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}], "hexpm", "8132b30a5506ae8a09e5c9a21c23fd60c8837ce6c3a1de9966d813eb78951695"}, "ex_cldr_numbers": {:hex, :ex_cldr_numbers, "2.33.1", "49dc6e77e6d9ad22660aaa2480a7408ad3aedfbe517e4e83e5fe3a7bf5345770", [:mix], [{:decimal, "~> 1.6 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:digital_token, "~> 0.3 or ~> 1.0", [hex: :digital_token, repo: "hexpm", optional: false]}, {:ex_cldr, "~> 2.38", [hex: :ex_cldr, repo: "hexpm", optional: false]}, {:ex_cldr_currencies, "~> 2.16", [hex: :ex_cldr_currencies, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}], "hexpm", "c003bfaa3fdee6bab5195f128b94038c2ce1cf4879a759eef431dd075d9a5dac"}, "ex_cldr_units": {:hex, :ex_cldr_units, "3.17.0", "f26dcde31a8fbb7808afa106ce2c7cbf38e0e0e0678ac523e795cdfdc67ab502", [:mix], [{:cldr_utils, "~> 2.25", [hex: :cldr_utils, repo: "hexpm", optional: false]}, {:decimal, "~> 1.6 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:ex_cldr_lists, "~> 2.10", [hex: :ex_cldr_lists, repo: "hexpm", optional: false]}, {:ex_cldr_numbers, "~> 2.33.0", [hex: :ex_cldr_numbers, repo: "hexpm", optional: false]}, {:ex_doc, "~> 0.18", [hex: :ex_doc, repo: "hexpm", optional: true]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}], "hexpm", "b9f09c420f5e3b86ed41f135751086bc59bf2bb8e633516e8d3e9f24d6d9e777"}, - "ex_doc": {:hex, :ex_doc, "0.32.1", "21e40f939515373bcdc9cffe65f3b3543f05015ac6c3d01d991874129d173420", [:mix], [{:earmark_parser, "~> 1.4.39", [hex: :earmark_parser, repo: "hexpm", optional: false]}, {:makeup_c, ">= 0.1.1", [hex: :makeup_c, repo: "hexpm", optional: true]}, {:makeup_elixir, "~> 0.14", [hex: :makeup_elixir, repo: "hexpm", optional: false]}, {:makeup_erlang, "~> 0.1", [hex: :makeup_erlang, repo: "hexpm", optional: false]}], "hexpm", "5142c9db521f106d61ff33250f779807ed2a88620e472ac95dc7d59c380113da"}, + "ex_doc": {:hex, :ex_doc, "0.32.2", "f60bbeb6ccbe75d005763e2a328e6f05e0624232f2393bc693611c2d3ae9fa0e", [:mix], [{:earmark_parser, "~> 1.4.39", [hex: :earmark_parser, repo: "hexpm", optional: false]}, {:makeup_c, ">= 0.1.0", [hex: :makeup_c, repo: "hexpm", optional: true]}, {:makeup_elixir, "~> 0.14 or ~> 1.0", [hex: :makeup_elixir, repo: "hexpm", optional: false]}, {:makeup_erlang, "~> 0.1 or ~> 1.0", [hex: :makeup_erlang, repo: "hexpm", optional: false]}, {:makeup_html, ">= 0.1.0", [hex: :makeup_html, repo: "hexpm", optional: true]}], "hexpm", "a4480305cdfe7fdfcbb77d1092c76161626d9a7aa4fb698aee745996e34602df"}, "ex_json_schema": {:hex, :ex_json_schema, "0.10.2", "7c4b8c1481fdeb1741e2ce66223976edfb9bccebc8014f6aec35d4efe964fb71", [:mix], [{:decimal, "~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}], "hexpm", "37f43be60f8407659d4d0155a7e45e7f406dab1f827051d3d35858a709baf6a6"}, "ex_keccak": {:hex, :ex_keccak, "0.7.5", "f3b733173510d48ae9a1ea1de415e694b2651f35c787e63f33b5ed0013fbfd35", [:mix], [{:rustler, ">= 0.0.0", [hex: :rustler, repo: "hexpm", optional: true]}, {:rustler_precompiled, "~> 0.7", [hex: :rustler_precompiled, repo: "hexpm", optional: false]}], "hexpm", "8a5e1cb7f96fff5e480ff6a121477b90c4fd8c150984086dffd98819f5d83763"}, "ex_machina": {:hex, :ex_machina, "2.7.0", "b792cc3127fd0680fecdb6299235b4727a4944a09ff0fa904cc639272cd92dc7", [:mix], [{:ecto, "~> 2.2 or ~> 3.0", [hex: :ecto, repo: "hexpm", optional: true]}, {:ecto_sql, "~> 3.0", [hex: :ecto_sql, repo: "hexpm", optional: true]}], "hexpm", "419aa7a39bde11894c87a615c4ecaa52d8f107bbdd81d810465186f783245bf8"}, @@ -76,9 +76,9 @@ "junit_formatter": {:hex, :junit_formatter, "3.4.0", "d0e8db6c34dab6d3c4154c3b46b21540db1109ae709d6cf99ba7e7a2ce4b1ac2", [:mix], [], "hexpm", "bb36e2ae83f1ced6ab931c4ce51dd3dbef1ef61bb4932412e173b0cfa259dacd"}, "logger_file_backend": {:hex, :logger_file_backend, "0.0.14", "774bb661f1c3fed51b624d2859180c01e386eb1273dc22de4f4a155ef749a602", [:mix], [], "hexpm", "071354a18196468f3904ef09413af20971d55164267427f6257b52cfba03f9e6"}, "logger_json": {:hex, :logger_json, "5.1.4", "9e30a4f2e31a8b9e402bdc20bd37cf9b67d3a31f19d0b33082a19a06b4c50f6d", [:mix], [{:ecto, "~> 2.1 or ~> 3.0", [hex: :ecto, repo: "hexpm", optional: true]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}, {:phoenix, ">= 1.5.0", [hex: :phoenix, repo: "hexpm", optional: true]}, {:plug, "~> 1.0", [hex: :plug, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4.0 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: true]}], "hexpm", "3f20eea58e406a33d3eb7814c7dff5accb503bab2ee8601e84da02976fa3934c"}, - "makeup": {:hex, :makeup, "1.1.1", "fa0bc768698053b2b3869fa8a62616501ff9d11a562f3ce39580d60860c3a55e", [:mix], [{:nimble_parsec, "~> 1.2.2 or ~> 1.3", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "5dc62fbdd0de44de194898b6710692490be74baa02d9d108bc29f007783b0b48"}, + "makeup": {:hex, :makeup, "1.1.2", "9ba8837913bdf757787e71c1581c21f9d2455f4dd04cfca785c70bbfff1a76a3", [:mix], [{:nimble_parsec, "~> 1.2.2 or ~> 1.3", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "cce1566b81fbcbd21eca8ffe808f33b221f9eee2cbc7a1706fc3da9ff18e6cac"}, "makeup_elixir": {:hex, :makeup_elixir, "0.16.2", "627e84b8e8bf22e60a2579dad15067c755531fea049ae26ef1020cad58fe9578", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}, {:nimble_parsec, "~> 1.2.3 or ~> 1.3", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "41193978704763f6bbe6cc2758b84909e62984c7752b3784bd3c218bb341706b"}, - "makeup_erlang": {:hex, :makeup_erlang, "0.1.5", "e0ff5a7c708dda34311f7522a8758e23bfcd7d8d8068dc312b5eb41c6fd76eba", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}], "hexpm", "94d2e986428585a21516d7d7149781480013c56e30c6a233534bedf38867a59a"}, + "makeup_erlang": {:hex, :makeup_erlang, "1.0.0", "6f0eff9c9c489f26b69b61440bf1b238d95badae49adac77973cbacae87e3c2e", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}], "hexpm", "ea7a9307de9d1548d2a72d299058d1fd2339e3d398560a0e46c27dab4891e4d2"}, "math": {:hex, :math, "0.7.0", "12af548c3892abf939a2e242216c3e7cbfb65b9b2fe0d872d05c6fb609f8127b", [:mix], [], "hexpm", "7987af97a0c6b58ad9db43eb5252a49fc1dfe1f6d98f17da9282e297f594ebc2"}, "meck": {:hex, :meck, "0.9.2", "85ccbab053f1db86c7ca240e9fc718170ee5bda03810a6292b5306bf31bae5f5", [:rebar3], [], "hexpm", "81344f561357dc40a8344afa53767c32669153355b626ea9fcbc8da6b3045826"}, "memento": {:hex, :memento, "0.3.2", "38cfc8ff9bcb1adff7cbd0f3b78a762636b86dff764729d1c82d0464c539bdd0", [:mix], [], "hexpm", "25cf691a98a0cb70262f4a7543c04bab24648cb2041d937eb64154a8d6f8012b"}, From 47cd3d287a42e65883ea07bcb239edd2c985bebf Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 14 May 2024 09:35:30 +0300 Subject: [PATCH 010/150] chore: Bump redix from 1.5.0 to 1.5.1 (#10059) Bumps [redix](https://github.com/whatyouhide/redix) from 1.5.0 to 1.5.1. - [Changelog](https://github.com/whatyouhide/redix/blob/main/CHANGELOG.md) - [Commits](https://github.com/whatyouhide/redix/compare/v1.5.0...v1.5.1) --- updated-dependencies: - dependency-name: redix dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- mix.lock | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mix.lock b/mix.lock index 1a468ace2732..0eaeeefe0864 100644 --- a/mix.lock +++ b/mix.lock @@ -121,7 +121,7 @@ "que": {:hex, :que, "0.10.1", "788ed0ec92ed69bdf9cfb29bf41a94ca6355b8d44959bd0669cf706e557ac891", [:mix], [{:ex_utils, "~> 0.1.6", [hex: :ex_utils, repo: "hexpm", optional: false]}, {:memento, "~> 0.3.0", [hex: :memento, repo: "hexpm", optional: false]}], "hexpm", "a737b365253e75dbd24b2d51acc1d851049e87baae08cd0c94e2bc5cd65088d5"}, "ranch": {:hex, :ranch, "1.8.0", "8c7a100a139fd57f17327b6413e4167ac559fbc04ca7448e9be9057311597a1d", [:make, :rebar3], [], "hexpm", "49fbcfd3682fab1f5d109351b61257676da1a2fdbe295904176d5e521a2ddfe5"}, "ratio": {:hex, :ratio, "2.4.2", "c8518f3536d49b1b00d88dd20d49f8b11abb7819638093314a6348139f14f9f9", [:mix], [{:decimal, "~> 1.6 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: true]}, {:numbers, "~> 5.2.0", [hex: :numbers, repo: "hexpm", optional: false]}], "hexpm", "441ef6f73172a3503de65ccf1769030997b0d533b1039422f1e5e0e0b4cbf89e"}, - "redix": {:hex, :redix, "1.5.0", "fffadf4526722ba9f062c0bda64e881613fbf99c2c1708692751def8fe6d546d", [:mix], [{:castore, "~> 0.1.0 or ~> 1.0", [hex: :castore, repo: "hexpm", optional: true]}, {:nimble_options, "~> 0.5.0 or ~> 1.0", [hex: :nimble_options, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4.0 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "01084c005339c1eb8f2ba9e3737277ec3648d1e9b5c8e96525dcdb38e474ca96"}, + "redix": {:hex, :redix, "1.5.1", "a2386971e69bf23630fb3a215a831b5478d2ee7dc9ea7ac811ed89186ab5d7b7", [:mix], [{:castore, "~> 0.1.0 or ~> 1.0", [hex: :castore, repo: "hexpm", optional: true]}, {:nimble_options, "~> 0.5.0 or ~> 1.0", [hex: :nimble_options, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4.0 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "85224eb2b683c516b80d472eb89b76067d5866913bf0be59d646f550de71f5c4"}, "remote_ip": {:hex, :remote_ip, "1.1.0", "cb308841595d15df3f9073b7c39243a1dd6ca56e5020295cb012c76fbec50f2d", [:mix], [{:combine, "~> 0.10", [hex: :combine, repo: "hexpm", optional: false]}, {:plug, "~> 1.14", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "616ffdf66aaad6a72fc546dabf42eed87e2a99e97b09cbd92b10cc180d02ed74"}, "rustler_precompiled": {:hex, :rustler_precompiled, "0.7.1", "ecadf02cc59a0eccbaed6c1937303a5827fbcf60010c541595e6d3747d3d0f9f", [:mix], [{:castore, "~> 0.1 or ~> 1.0", [hex: :castore, repo: "hexpm", optional: false]}, {:rustler, "~> 0.23", [hex: :rustler, repo: "hexpm", optional: true]}], "hexpm", "b9e4657b99a1483ea31502e1d58c464bedebe9028808eda45c3a429af4550c66"}, "sobelow": {:hex, :sobelow, "0.13.0", "218afe9075904793f5c64b8837cc356e493d88fddde126a463839351870b8d1e", [:mix], [{:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "cd6e9026b85fc35d7529da14f95e85a078d9dd1907a9097b3ba6ac7ebbe34a0d"}, From ce934ec338ca1d4e6701bab84c9c94317bd7fe43 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 14 May 2024 10:07:30 +0300 Subject: [PATCH 011/150] chore: Bump credo from 1.7.5 to 1.7.6 (#10060) Bumps [credo](https://github.com/rrrene/credo) from 1.7.5 to 1.7.6. - [Release notes](https://github.com/rrrene/credo/releases) - [Changelog](https://github.com/rrrene/credo/blob/master/CHANGELOG.md) - [Commits](https://github.com/rrrene/credo/compare/v1.7.5...v1.7.6) --- updated-dependencies: - dependency-name: credo dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- mix.lock | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mix.lock b/mix.lock index 0eaeeefe0864..8973c1f953a9 100644 --- a/mix.lock +++ b/mix.lock @@ -27,7 +27,7 @@ "cowboy": {:hex, :cowboy, "2.12.0", "f276d521a1ff88b2b9b4c54d0e753da6c66dd7be6c9fca3d9418b561828a3731", [:make, :rebar3], [{:cowlib, "2.13.0", [hex: :cowlib, repo: "hexpm", optional: false]}, {:ranch, "1.8.0", [hex: :ranch, repo: "hexpm", optional: false]}], "hexpm", "8a7abe6d183372ceb21caa2709bec928ab2b72e18a3911aa1771639bef82651e"}, "cowboy_telemetry": {:hex, :cowboy_telemetry, "0.3.1", "ebd1a1d7aff97f27c66654e78ece187abdc646992714164380d8a041eda16754", [:rebar3], [{:cowboy, "~> 2.7", [hex: :cowboy, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "3a6efd3366130eab84ca372cbd4a7d3c3a97bdfcfb4911233b035d117063f0af"}, "cowlib": {:hex, :cowlib, "2.13.0", "db8f7505d8332d98ef50a3ef34b34c1afddec7506e4ee4dd4a3a266285d282ca", [:make, :rebar3], [], "hexpm", "e1e1284dc3fc030a64b1ad0d8382ae7e99da46c3246b815318a4b848873800a4"}, - "credo": {:hex, :credo, "1.7.5", "643213503b1c766ec0496d828c90c424471ea54da77c8a168c725686377b9545", [:mix], [{:bunt, "~> 0.2.1 or ~> 1.0", [hex: :bunt, repo: "hexpm", optional: false]}, {:file_system, "~> 0.2 or ~> 1.0", [hex: :file_system, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "f799e9b5cd1891577d8c773d245668aa74a2fcd15eb277f51a0131690ebfb3fd"}, + "credo": {:hex, :credo, "1.7.6", "b8f14011a5443f2839b04def0b252300842ce7388f3af177157c86da18dfbeea", [:mix], [{:bunt, "~> 0.2.1 or ~> 1.0", [hex: :bunt, repo: "hexpm", optional: false]}, {:file_system, "~> 0.2 or ~> 1.0", [hex: :file_system, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "146f347fb9f8cbc5f7e39e3f22f70acbef51d441baa6d10169dd604bfbc55296"}, "csv": {:hex, :csv, "2.5.0", "c47b5a5221bf2e56d6e8eb79e77884046d7fd516280dc7d9b674251e0ae46246", [:mix], [{:parallel_stream, "~> 1.0.4 or ~> 1.1.0", [hex: :parallel_stream, repo: "hexpm", optional: false]}], "hexpm", "e821f541487045c7591a1963eeb42afff0dfa99bdcdbeb3410795a2f59c77d34"}, "dataloader": {:hex, :dataloader, "1.0.11", "49bbfc7dd8a1990423c51000b869b1fecaab9e3ccd6b29eab51616ae8ad0a2f5", [:mix], [{:ecto, ">= 3.4.3 and < 4.0.0", [hex: :ecto, repo: "hexpm", optional: true]}, {:telemetry, "~> 1.0 or ~> 0.4", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "ba0b0ec532ec68e9d033d03553561d693129bd7cbd5c649dc7903f07ffba08fe"}, "db_connection": {:hex, :db_connection, "2.6.0", "77d835c472b5b67fc4f29556dee74bf511bbafecdcaf98c27d27fa5918152086", [:mix], [{:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "c2f992d15725e721ec7fbc1189d4ecdb8afef76648c746a8e1cad35e3b8a35f3"}, From b377ba5588bacefadd00a3deae0670a9f299dafd Mon Sep 17 00:00:00 2001 From: Victor Baranov Date: Tue, 14 May 2024 11:25:39 +0300 Subject: [PATCH 012/150] refactor: Refactor get_additional_sources/4 -> get_additional_sources/3 (#10046) --- .../templates/address_contract/index.html.eex | 20 +++++++------- .../views/api/v2/smart_contract_view.ex | 26 +++++++++---------- 2 files changed, 23 insertions(+), 23 deletions(-) diff --git a/apps/block_scout_web/lib/block_scout_web/templates/address_contract/index.html.eex b/apps/block_scout_web/lib/block_scout_web/templates/address_contract/index.html.eex index cc06aefe272e..d5b804f42b3f 100644 --- a/apps/block_scout_web/lib/block_scout_web/templates/address_contract/index.html.eex +++ b/apps/block_scout_web/lib/block_scout_web/templates/address_contract/index.html.eex @@ -1,9 +1,9 @@ <% contract_creation_code = contract_creation_code(@address) %> <% minimal_proxy_template = EIP1167.get_implementation_smart_contract(@address.hash) %> -<% metadata_for_verification = minimal_proxy_template || SmartContract.get_address_verified_bytecode_twin_contract(@address.hash).verified_contract %> +<% implementation_or_bytecode_twin_contract = minimal_proxy_template || SmartContract.get_address_verified_bytecode_twin_contract(@address.hash).verified_contract %> <% smart_contract_verified = BlockScoutWeb.AddressView.smart_contract_verified?(@address) %> <% fully_verified = SmartContract.verified_with_full_match?(@address.hash)%> -<% additional_sources = BlockScoutWeb.API.V2.SmartContractView.get_additional_sources(@address.smart_contract, smart_contract_verified, minimal_proxy_template, SmartContract.get_address_verified_bytecode_twin_contract(@address.hash)) %> +<% additional_sources = BlockScoutWeb.API.V2.SmartContractView.get_additional_sources(@address.smart_contract, smart_contract_verified, implementation_or_bytecode_twin_contract) %> <% visualize_sol2uml_enabled = Explorer.Visualize.Sol2uml.enabled?() %>

<% is_proxy = BlockScoutWeb.AddressView.smart_contract_is_proxy?(@address) %> @@ -15,16 +15,16 @@
<%= unless smart_contract_verified do %> <%= if minimal_proxy_template do %> - <%= render BlockScoutWeb.CommonComponentsView, "_minimal_proxy_pattern.html", address_hash: metadata_for_verification.address_hash, conn: @conn %> + <%= render BlockScoutWeb.CommonComponentsView, "_minimal_proxy_pattern.html", address_hash: implementation_or_bytecode_twin_contract.address_hash, conn: @conn %> <% else %> - <%= if metadata_for_verification do %> + <%= if implementation_or_bytecode_twin_contract do %> <% path = address_verify_contract_path(@conn, :new, @address.hash) %>
<%= render BlockScoutWeb.CommonComponentsView, "_info.html" %> <%= gettext("Contract is not verified. However, we found a verified contract with the same bytecode in Blockscout DB") %> <%= link( - metadata_for_verification.address_hash, - to: address_contract_path(@conn, :index, metadata_for_verification.address_hash)) %>.
<%= gettext("All metadata displayed below is from that contract. In order to verify current contract, click") %> <%= gettext("Verify & Publish") %> <%= gettext("button") %>
+ implementation_or_bytecode_twin_contract.address_hash, + to: address_contract_path(@conn, :index, implementation_or_bytecode_twin_contract.address_hash)) %>.
<%= gettext("All metadata displayed below is from that contract. In order to verify current contract, click") %> <%= gettext("Verify & Publish") %> <%= gettext("button") %>
<%= link(gettext("Verify & Publish"), to: path, class: "button button-primary button-sm float-right ml-3", "data-test": "verify_and_publish") %>
@@ -40,8 +40,8 @@
<% end %> <% end %> - <%= if smart_contract_verified || (!smart_contract_verified && metadata_for_verification) do %> - <% target_contract = if smart_contract_verified, do: @address.smart_contract, else: metadata_for_verification %> + <%= if smart_contract_verified || (!smart_contract_verified && implementation_or_bytecode_twin_contract) do %> + <% target_contract = if smart_contract_verified, do: @address.smart_contract, else: implementation_or_bytecode_twin_contract %> <%= if @address.smart_contract && @address.smart_contract.verified_via_sourcify && @address.smart_contract.partially_verified && smart_contract_verified do %>
<%= gettext("This contract has been partially verified via Sourcify.") %> @@ -240,8 +240,8 @@ <% end %>
- <%= if smart_contract_verified || (!smart_contract_verified && metadata_for_verification) do %> - <% target_contract = if smart_contract_verified, do: @address.smart_contract, else: metadata_for_verification %> + <%= if smart_contract_verified || (!smart_contract_verified && implementation_or_bytecode_twin_contract) do %> + <% target_contract = if smart_contract_verified, do: @address.smart_contract, else: implementation_or_bytecode_twin_contract %> <%= if target_contract.external_libraries && target_contract.external_libraries != [] do %>
diff --git a/apps/block_scout_web/lib/block_scout_web/views/api/v2/smart_contract_view.ex b/apps/block_scout_web/lib/block_scout_web/views/api/v2/smart_contract_view.ex index c5cb462303c3..19ab49aebde6 100644 --- a/apps/block_scout_web/lib/block_scout_web/views/api/v2/smart_contract_view.ex +++ b/apps/block_scout_web/lib/block_scout_web/views/api/v2/smart_contract_view.ex @@ -148,7 +148,7 @@ defmodule BlockScoutWeb.API.V2.SmartContractView do def prepare_smart_contract(%Address{smart_contract: %SmartContract{} = smart_contract} = address, conn) do bytecode_twin = SmartContract.get_address_verified_bytecode_twin_contract(address.hash, @api_true) minimal_proxy_address_hash = address.implementation - metadata_for_verification = address.implementation || bytecode_twin.verified_contract + implementation_or_bytecode_twin_contract = address.implementation || bytecode_twin.verified_contract smart_contract_verified = AddressView.smart_contract_verified?(address) fully_verified = SmartContract.verified_with_full_match?(address.hash, @api_true) write_methods? = AddressView.smart_contract_with_write_functions?(address) @@ -162,16 +162,18 @@ defmodule BlockScoutWeb.API.V2.SmartContractView do get_additional_sources( smart_contract, smart_contract_verified, - metadata_for_verification, - bytecode_twin + implementation_or_bytecode_twin_contract ) visualize_sol2uml_enabled = Sol2uml.enabled?() - target_contract = if smart_contract_verified, do: address.smart_contract, else: metadata_for_verification + + target_contract = + if smart_contract_verified, do: address.smart_contract, else: implementation_or_bytecode_twin_contract %{ "verified_twin_address_hash" => - metadata_for_verification && Address.checksum(metadata_for_verification.address_hash), + implementation_or_bytecode_twin_contract && + Address.checksum(implementation_or_bytecode_twin_contract.address_hash), "is_verified" => smart_contract_verified, "is_changed_bytecode" => smart_contract_verified && address.smart_contract.is_changed_bytecode, "is_partially_verified" => address.smart_contract.partially_verified && smart_contract_verified, @@ -233,20 +235,18 @@ defmodule BlockScoutWeb.API.V2.SmartContractView do @doc """ Returns additional sources of the smart-contract or from bytecode twin or from implementation, if it fits minimal proxy pattern (EIP-1167) """ - @spec get_additional_sources(SmartContract.t(), boolean, SmartContract.t() | nil, %{ - :verified_contract => any(), - :additional_sources => SmartContractAdditionalSource.t() | nil - }) :: [SmartContractAdditionalSource.t()] | nil - def get_additional_sources(smart_contract, smart_contract_verified, minimal_proxy_template, bytecode_twin) do + @spec get_additional_sources(SmartContract.t(), boolean, SmartContract.t() | nil) :: + [SmartContractAdditionalSource.t()] | nil + def get_additional_sources(smart_contract, smart_contract_verified, implementation_or_bytecode_twin_contract) do cond do - !is_nil(minimal_proxy_template) -> - minimal_proxy_template.smart_contract_additional_sources + !is_nil(implementation_or_bytecode_twin_contract) -> + implementation_or_bytecode_twin_contract.smart_contract_additional_sources smart_contract_verified -> smart_contract.smart_contract_additional_sources true -> - bytecode_twin.additional_sources + [] end end From 621024c046aac781ee256e61f0e0ad0c51da762e Mon Sep 17 00:00:00 2001 From: Rim Rakhimov Date: Tue, 14 May 2024 14:48:08 +0300 Subject: [PATCH 013/150] fix: vyper contracts re-verificaiton (#10053) * Add test case for publishing re-verified solidity contract * Add (failing) test case for publishing re-verified vyper contract * fix: allow for vyper contracts re-verification * Remove commented line --- .../smart_contract/vyper/publisher.ex | 40 +++++++++- .../solidity/publisher_test.exs | 40 ++++++++++ .../smart_contract/vyper/publisher_test.exs | 80 +++++++++++++++++++ apps/explorer/test/support/factory.ex | 42 ++++++++++ 4 files changed, 200 insertions(+), 2 deletions(-) create mode 100644 apps/explorer/test/explorer/smart_contract/vyper/publisher_test.exs diff --git a/apps/explorer/lib/explorer/smart_contract/vyper/publisher.ex b/apps/explorer/lib/explorer/smart_contract/vyper/publisher.ex index a95ec70eb455..d8938b503c3d 100644 --- a/apps/explorer/lib/explorer/smart_contract/vyper/publisher.ex +++ b/apps/explorer/lib/explorer/smart_contract/vyper/publisher.ex @@ -123,10 +123,46 @@ defmodule Explorer.SmartContract.Vyper.Publisher do end def publish_smart_contract(address_hash, params, abi) do - Logger.info("Publish successfully verified Vyper smart-contract #{address_hash} into the DB") attrs = address_hash |> attributes(params, abi) - SmartContract.create_smart_contract(attrs, attrs.external_libraries, attrs.secondary_sources) + create_or_update_smart_contract(address_hash, attrs) + end + + @doc """ + Creates or updates a smart contract record based on its verification status. + + This function first checks if a smart contract associated with the provided address hash + is already verified. If verified, it updates the existing smart contract record with the + new attributes provided, such as external libraries and secondary sources. During the update, + the contract methods are also updated: existing methods are preserved, and any new methods + from the provided ABI are added to ensure the contract's integrity and completeness. + + If the smart contract is not verified, it creates a new record in the database with the + provided attributes, setting it up for verification. In this case, all contract methods + from the ABI are freshly inserted as part of the new smart contract creation. + + ## Parameters + - `address_hash`: The hash of the address for the smart contract. + - `attrs`: A map containing attributes such as external libraries and secondary sources. + + ## Returns + - `{:ok, Explorer.Chain.SmartContract.t()}`: Successfully created or updated smart + contract. + - `{:error, data}`: on failure, returning `Ecto.Changeset.t()` or, if any issues + happen during setting the address as verified, an error message. + """ + @spec create_or_update_smart_contract(binary() | Explorer.Chain.Hash.t(), %{ + :secondary_sources => list(), + optional(any()) => any() + }) :: {:error, Ecto.Changeset.t() | String.t()} | {:ok, Explorer.Chain.SmartContract.t()} + def create_or_update_smart_contract(address_hash, attrs) do + Logger.info("Publish successfully verified Vyper smart-contract #{address_hash} into the DB") + + if SmartContract.verified?(address_hash) do + SmartContract.update_smart_contract(attrs, attrs.external_libraries, attrs.secondary_sources) + else + SmartContract.create_smart_contract(attrs, attrs.external_libraries, attrs.secondary_sources) + end end defp unverified_smart_contract(address_hash, params, error, error_message, verification_with_files? \\ false) do diff --git a/apps/explorer/test/explorer/smart_contract/solidity/publisher_test.exs b/apps/explorer/test/explorer/smart_contract/solidity/publisher_test.exs index 5cc612bb45b6..3719efb3ccfc 100644 --- a/apps/explorer/test/explorer/smart_contract/solidity/publisher_test.exs +++ b/apps/explorer/test/explorer/smart_contract/solidity/publisher_test.exs @@ -193,5 +193,45 @@ defmodule Explorer.SmartContract.Solidity.PublisherTest do response = Publisher.publish(contract_address.hash, params, external_libraries_form_params) assert {:ok, %SmartContract{} = _smart_contract} = response end + + test "allows to re-verify solidity contracts" do + contract_code_info = Factory.contract_code_info_modern_compiler() + + contract_address = insert(:contract_address, contract_code: contract_code_info.bytecode) + + :transaction + |> insert(created_contract_address_hash: contract_address.hash, input: contract_code_info.tx_input) + |> with_block(status: :ok) + + valid_attrs = %{ + "contract_source_code" => contract_code_info.source_code, + "compiler_version" => contract_code_info.version, + "name" => contract_code_info.name, + "optimization" => contract_code_info.optimized + } + + response = Publisher.publish(contract_address.hash, valid_attrs) + assert {:ok, %SmartContract{}} = response + + updated_name = "AnotherContractName" + + updated_contract_source_code = + String.replace( + valid_attrs["contract_source_code"], + "contract #{valid_attrs["name"]}", + "contract #{updated_name}" + ) + + valid_attrs = + valid_attrs + |> Map.put("name", updated_name) + |> Map.put("contract_source_code", updated_contract_source_code) + + response = Publisher.publish(contract_address.hash, valid_attrs) + assert {:ok, %SmartContract{} = smart_contract} = response + + assert smart_contract.name == valid_attrs["name"] + assert smart_contract.contract_source_code == valid_attrs["contract_source_code"] + end end end diff --git a/apps/explorer/test/explorer/smart_contract/vyper/publisher_test.exs b/apps/explorer/test/explorer/smart_contract/vyper/publisher_test.exs new file mode 100644 index 000000000000..5eeee2e4a8da --- /dev/null +++ b/apps/explorer/test/explorer/smart_contract/vyper/publisher_test.exs @@ -0,0 +1,80 @@ +defmodule Explorer.SmartContract.Vyper.PublisherTest do + use ExUnit.Case, async: true + + use Explorer.DataCase + + doctest Explorer.SmartContract.Vyper.Publisher + + @moduletag timeout: :infinity + + alias Explorer.Chain.{ContractMethod, SmartContract} + alias Explorer.{Factory, Repo} + alias Explorer.SmartContract.Vyper.Publisher + + setup do + configuration = Application.get_env(:explorer, Explorer.SmartContract.RustVerifierInterfaceBehaviour) + Application.put_env(:explorer, Explorer.SmartContract.RustVerifierInterfaceBehaviour, enabled: false) + + on_exit(fn -> + Application.put_env(:explorer, Explorer.SmartContract.RustVerifierInterfaceBehaviour, configuration) + end) + end + + describe "publish/2" do + test "with valid data creates a smart_contract" do + contract_code_info = Factory.contract_code_info_vyper() + + contract_address = insert(:contract_address, contract_code: contract_code_info.bytecode) + + :transaction + |> insert(created_contract_address_hash: contract_address.hash, input: contract_code_info.tx_input) + |> with_block(status: :ok) + + valid_attrs = %{ + "contract_source_code" => contract_code_info.source_code, + "compiler_version" => contract_code_info.version, + "name" => contract_code_info.name + } + + response = Publisher.publish(contract_address.hash, valid_attrs) + assert {:ok, %SmartContract{} = smart_contract} = response + + assert smart_contract.address_hash == contract_address.hash + assert smart_contract.name == valid_attrs["name"] + assert smart_contract.compiler_version == valid_attrs["compiler_version"] + assert smart_contract.contract_source_code == valid_attrs["contract_source_code"] + assert is_nil(smart_contract.constructor_arguments) + assert smart_contract.abi == contract_code_info.abi + end + + test "allows to re-verify vyper contracts" do + contract_code_info = Factory.contract_code_info_vyper() + + contract_address = insert(:contract_address, contract_code: contract_code_info.bytecode) + + :transaction + |> insert(created_contract_address_hash: contract_address.hash, input: contract_code_info.tx_input) + |> with_block(status: :ok) + + valid_attrs = %{ + "contract_source_code" => contract_code_info.source_code, + "compiler_version" => contract_code_info.version, + "name" => contract_code_info.name + } + + response = Publisher.publish(contract_address.hash, valid_attrs) + assert {:ok, %SmartContract{}} = response + + updated_name = "AnotherContractName" + + valid_attrs = + valid_attrs + |> Map.put("name", updated_name) + + response = Publisher.publish(contract_address.hash, valid_attrs) + assert {:ok, %SmartContract{} = smart_contract} = response + + assert smart_contract.name == valid_attrs["name"] + end + end +end diff --git a/apps/explorer/test/support/factory.ex b/apps/explorer/test/support/factory.ex index 4f7f03cb3a6a..c186da66ec7a 100644 --- a/apps/explorer/test/support/factory.ex +++ b/apps/explorer/test/support/factory.ex @@ -451,6 +451,48 @@ defmodule Explorer.Factory do } end + def contract_code_info_vyper do + %{ + bytecode: + "0x5f3560e01c60026001821660011b61005b01601e395f51565b63158ef93e81186100535734610057575f5460405260206040f3610053565b633fa4f245811861005357346100575760015460405260206040f35b5f5ffd5b5f80fd00180037", + tx_input: + "0x3461001c57607b6001555f5f5561005f61002060003961005f6000f35b5f80fd5f3560e01c60026001821660011b61005b01601e395f51565b63158ef93e81186100535734610057575f5460405260206040f3610053565b633fa4f245811861005357346100575760015460405260206040f35b5f5ffd5b5f80fd0018003784185f810400a16576797065728300030a0013", + name: "SimpleContract", + source_code: """ + initialized: public(bool) + value: public(uint256) + + @external + def __init__(): + self.value = 123 + self.initialized = False + """, + abi: [ + %{ + "inputs" => [], + "outputs" => [], + "stateMutability" => "nonpayable", + "type" => "constructor" + }, + %{ + "inputs" => [], + "name" => "initialized", + "outputs" => [%{"name" => "", "type" => "bool"}], + "stateMutability" => "view", + "type" => "function" + }, + %{ + "inputs" => [], + "name" => "value", + "outputs" => [%{"name" => "", "type" => "uint256"}], + "stateMutability" => "view", + "type" => "function" + } + ], + version: "v0.3.10" + } + end + def address_hash do {:ok, address_hash} = "address_hash" From def8a1aed0b1b52c57235f1beac205cc5180838d Mon Sep 17 00:00:00 2001 From: Victor Baranov Date: Tue, 14 May 2024 14:54:10 +0300 Subject: [PATCH 014/150] feat: Clone with immutable arguments proxy pattern (#10039) * feat: Clone with immutable arguments proxy pattern * refactor: rename unverified_proxy_only? to proxy_without_abi? --- .../views/api/v2/smart_contract_view.ex | 2 +- .../api/v2/smart_contract_controller_test.exs | 112 ++++++++++++++++++ apps/explorer/lib/explorer/chain.ex | 2 +- .../lib/explorer/chain/smart_contract.ex | 4 +- .../explorer/chain/smart_contract/proxy.ex | 38 +++++- .../proxy/clone_with_immutable_arguments.ex | 55 +++++++++ .../proxy/models/implementation.ex | 1 + .../lib/explorer/etherscan/contracts.ex | 2 +- ...y_type_clones_with_immutable_arguments.exs | 7 ++ 9 files changed, 215 insertions(+), 8 deletions(-) create mode 100644 apps/explorer/lib/explorer/chain/smart_contract/proxy/clone_with_immutable_arguments.ex create mode 100644 apps/explorer/priv/repo/migrations/20240501131140_new_proxy_type_clones_with_immutable_arguments.exs diff --git a/apps/block_scout_web/lib/block_scout_web/views/api/v2/smart_contract_view.ex b/apps/block_scout_web/lib/block_scout_web/views/api/v2/smart_contract_view.ex index 19ab49aebde6..1d9d2a2832b6 100644 --- a/apps/block_scout_web/lib/block_scout_web/views/api/v2/smart_contract_view.ex +++ b/apps/block_scout_web/lib/block_scout_web/views/api/v2/smart_contract_view.ex @@ -233,7 +233,7 @@ defmodule BlockScoutWeb.API.V2.SmartContractView do end @doc """ - Returns additional sources of the smart-contract or from bytecode twin or from implementation, if it fits minimal proxy pattern (EIP-1167) + Returns additional sources of the smart-contract or from bytecode twin or from implementation, if it fits minimal proxy pattern (EIP-1167, Clone with immutable arguments) """ @spec get_additional_sources(SmartContract.t(), boolean, SmartContract.t() | nil) :: [SmartContractAdditionalSource.t()] | nil diff --git a/apps/block_scout_web/test/block_scout_web/controllers/api/v2/smart_contract_controller_test.exs b/apps/block_scout_web/test/block_scout_web/controllers/api/v2/smart_contract_controller_test.exs index 9880f312a13d..e94225267700 100644 --- a/apps/block_scout_web/test/block_scout_web/controllers/api/v2/smart_contract_controller_test.exs +++ b/apps/block_scout_web/test/block_scout_web/controllers/api/v2/smart_contract_controller_test.exs @@ -496,6 +496,118 @@ defmodule BlockScoutWeb.API.V2.SmartContractControllerTest do end end + test "get smart-contract implementation for 'Clones with immutable arguments' pattern", %{conn: conn} do + implementation_contract = + insert(:smart_contract, + external_libraries: [], + constructor_arguments: "", + abi: [ + %{ + "type" => "constructor", + "inputs" => [ + %{"type" => "address", "name" => "_proxyStorage"}, + %{"type" => "address", "name" => "_implementationAddress"} + ] + }, + %{ + "constant" => false, + "inputs" => [%{"name" => "x", "type" => "uint256"}], + "name" => "set", + "outputs" => [], + "payable" => false, + "stateMutability" => "nonpayable", + "type" => "function" + }, + %{ + "constant" => true, + "inputs" => [], + "name" => "get", + "outputs" => [%{"name" => "", "type" => "uint256"}], + "payable" => false, + "stateMutability" => "view", + "type" => "function" + } + ], + license_type: 9 + ) + + insert(:smart_contract_additional_source, + file_name: "test1", + contract_source_code: "test2", + address_hash: implementation_contract.address_hash + ) + + implementation_contract_address_hash_string = + Base.encode16(implementation_contract.address_hash.bytes, case: :lower) + + proxy_tx_input = + "0x684fbe55000000000000000000000000af1caf51d49b0e63d1ff7e5d4ed6ea26d15f3f9d000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000003" + + proxy_deployed_bytecode = + "0x3d3d3d3d363d3d3761003f603736393661003f013d73" <> + implementation_contract_address_hash_string <> + "5af43d3d93803e603557fd5bf3af1caf51d49b0e63d1ff7e5d4ed6ea26d15f3f9d0000000000000000000000000000000000000000000000000000000000000001000000000000000203003d" + + proxy_address = + insert(:contract_address, + contract_code: proxy_deployed_bytecode + ) + + insert(:transaction, + created_contract_address_hash: proxy_address.hash, + input: proxy_tx_input + ) + |> with_block(status: :ok) + + correct_response = %{ + "verified_twin_address_hash" => Address.checksum(implementation_contract.address_hash), + "is_verified" => false, + "is_changed_bytecode" => false, + "is_partially_verified" => implementation_contract.partially_verified, + "is_fully_verified" => false, + "is_verified_via_sourcify" => false, + "is_vyper_contract" => implementation_contract.is_vyper_contract, + "has_methods_read" => true, + "has_methods_write" => true, + "has_methods_read_proxy" => true, + "has_methods_write_proxy" => true, + "has_custom_methods_read" => false, + "has_custom_methods_write" => false, + "minimal_proxy_address_hash" => Address.checksum(implementation_contract.address_hash), + "sourcify_repo_url" => nil, + "can_be_visualized_via_sol2uml" => false, + "name" => implementation_contract && implementation_contract.name, + "compiler_version" => implementation_contract.compiler_version, + "optimization_enabled" => implementation_contract.optimization, + "optimization_runs" => implementation_contract.optimization_runs, + "evm_version" => implementation_contract.evm_version, + "verified_at" => implementation_contract.inserted_at |> to_string() |> String.replace(" ", "T"), + "source_code" => implementation_contract.contract_source_code, + "file_path" => implementation_contract.file_path, + "additional_sources" => [ + %{"file_path" => "test1", "source_code" => "test2"} + ], + "compiler_settings" => implementation_contract.compiler_settings, + "external_libraries" => [], + "constructor_args" => nil, + "decoded_constructor_args" => nil, + "is_self_destructed" => false, + "deployed_bytecode" => proxy_deployed_bytecode, + "creation_bytecode" => proxy_tx_input, + "abi" => implementation_contract.abi, + "is_verified_via_eth_bytecode_db" => implementation_contract.verified_via_eth_bytecode_db, + "is_verified_via_verifier_alliance" => implementation_contract.verified_via_verifier_alliance, + "language" => smart_contract_language(implementation_contract), + "license_type" => "bsd_3_clause", + "certified" => false + } + + request = get(conn, "/api/v2/smart-contracts/#{Address.checksum(proxy_address.hash)}") + response = json_response(request, 200) + + assert correct_response == response + end + describe "/smart-contracts/{address_hash} <> eth_bytecode_db" do setup do old_interval_env = Application.get_env(:explorer, Explorer.Chain.Fetcher.LookUpSmartContractSourcesOnDemand) diff --git a/apps/explorer/lib/explorer/chain.ex b/apps/explorer/lib/explorer/chain.ex index 4fb2b4cf05c0..4ef2caece1d3 100644 --- a/apps/explorer/lib/explorer/chain.ex +++ b/apps/explorer/lib/explorer/chain.ex @@ -1186,7 +1186,7 @@ defmodule Explorer.Chain do implementation_address_fetched?: false, refetch_necessity_checked?: false }, - Keyword.put(options, :unverified_proxy_only?, true) + Keyword.put(options, :proxy_without_abi?, true) ) add_implementation_and_bytecode_twin_to_result(address_result, implementation_address_hashes, hash, options) diff --git a/apps/explorer/lib/explorer/chain/smart_contract.ex b/apps/explorer/lib/explorer/chain/smart_contract.ex index e13ab9951fce..7d09ef29718d 100644 --- a/apps/explorer/lib/explorer/chain/smart_contract.ex +++ b/apps/explorer/lib/explorer/chain/smart_contract.ex @@ -584,7 +584,7 @@ defmodule Explorer.Chain.SmartContract do {implementation_address_hash, _} = Implementation.get_implementation( smart_contract, - Keyword.put(options, :unverified_proxy_only?, true) + Keyword.put(options, :proxy_without_abi?, true) ) implementation_smart_contract = @@ -959,7 +959,7 @@ defmodule Explorer.Chain.SmartContract do implementation_address_fetched?: false, refetch_necessity_checked?: false }, - Keyword.put(options, :unverified_proxy_only?, true) + Keyword.put(options, :proxy_without_abi?, true) ) {implementation_smart_contract, true} diff --git a/apps/explorer/lib/explorer/chain/smart_contract/proxy.ex b/apps/explorer/lib/explorer/chain/smart_contract/proxy.ex index cb52efa53417..de4df3dfddee 100644 --- a/apps/explorer/lib/explorer/chain/smart_contract/proxy.ex +++ b/apps/explorer/lib/explorer/chain/smart_contract/proxy.ex @@ -6,7 +6,17 @@ defmodule Explorer.Chain.SmartContract.Proxy do alias EthereumJSONRPC.Contract alias Explorer.Chain.{Hash, SmartContract} alias Explorer.Chain.SmartContract.Proxy - alias Explorer.Chain.SmartContract.Proxy.{Basic, EIP1167, EIP1822, EIP1967, EIP2535, EIP930, MasterCopy} + + alias Explorer.Chain.SmartContract.Proxy.{ + Basic, + CloneWithImmutableArguments, + EIP1167, + EIP1822, + EIP1967, + EIP2535, + EIP930, + MasterCopy + } import Explorer.Chain, only: [ @@ -35,7 +45,7 @@ defmodule Explorer.Chain.SmartContract.Proxy do # aaf10f42 = keccak256(getAddress(bytes32)) @get_address_signature "21f8a721" - @typep options :: [{:api?, true | false}, {:unverified_proxy_only?, true | false}] + @typep options :: [{:api?, true | false}, {:proxy_without_abi?, true | false}] @doc """ Fetches into DB proxy contract implementation's address and name from different proxy patterns @@ -45,7 +55,7 @@ defmodule Explorer.Chain.SmartContract.Proxy do def fetch_implementation_address_hash(proxy_address_hash, proxy_abi, options) when not is_nil(proxy_address_hash) do %{implementation_address_hash_strings: implementation_address_hash_strings, proxy_type: proxy_type} = - if options[:unverified_proxy_only?] do + if options[:proxy_without_abi?] do get_implementation_address_hash_string_for_non_verified_proxy(proxy_address_hash) else get_implementation_address_hash_string(proxy_address_hash, proxy_abi) @@ -222,6 +232,28 @@ defmodule Explorer.Chain.SmartContract.Proxy do proxy_abi, go_to_fallback? ], + :get_implementation_address_hash_string_clones_with_immutable_arguments + ) + end + + @doc """ + Returns implementation address by following "Clone with immutable arguments" pattern or tries next proxy pattern + """ + @spec get_implementation_address_hash_string_clones_with_immutable_arguments(Hash.Address.t(), any(), bool()) :: + %{implementation_address_hash_strings: [String.t()] | :error | nil, proxy_type: atom() | :unknown} + def get_implementation_address_hash_string_clones_with_immutable_arguments( + proxy_address_hash, + proxy_abi, + go_to_fallback? \\ true + ) do + get_implementation_address_hash_string_by_module( + CloneWithImmutableArguments, + :clone_with_immutable_arguments, + [ + proxy_address_hash, + proxy_abi, + go_to_fallback? + ], :get_implementation_address_hash_string_eip1967 ) end diff --git a/apps/explorer/lib/explorer/chain/smart_contract/proxy/clone_with_immutable_arguments.ex b/apps/explorer/lib/explorer/chain/smart_contract/proxy/clone_with_immutable_arguments.ex new file mode 100644 index 000000000000..bda77d16ebbf --- /dev/null +++ b/apps/explorer/lib/explorer/chain/smart_contract/proxy/clone_with_immutable_arguments.ex @@ -0,0 +1,55 @@ +defmodule Explorer.Chain.SmartContract.Proxy.CloneWithImmutableArguments do + @moduledoc """ + Module for fetching proxy implementation from https://github.com/wighawag/clones-with-immutable-args + """ + + alias Explorer.Chain + alias Explorer.Chain.{Address, Hash, SmartContract} + alias Explorer.Chain.SmartContract.Proxy + + @doc """ + Get implementation address following "Clone with immutable arguments" pattern + """ + @spec get_implementation_smart_contract(Hash.Address.t(), Keyword.t()) :: SmartContract.t() | nil + def get_implementation_smart_contract(address_hash, options \\ []) do + address_hash + |> get_implementation_address_hash_string(options) + |> Proxy.implementation_to_smart_contract(options) + end + + @doc """ + Get implementation address hash string following "Clone with immutable arguments" pattern + """ + @spec get_implementation_address_hash_string(Hash.Address.t(), Keyword.t()) :: String.t() | nil + def get_implementation_address_hash_string(address_hash, options \\ []) do + case Chain.select_repo(options).get(Address, address_hash) do + nil -> + nil + + target_address -> + contract_code = target_address.contract_code + + case contract_code do + %Chain.Data{bytes: contract_code_bytes} -> + contract_bytecode = Base.encode16(contract_code_bytes, case: :lower) + + contract_bytecode |> get_proxy_clone_with_immutable_arguments() |> Proxy.abi_decode_address_output() + + _ -> + nil + end + end + end + + defp get_proxy_clone_with_immutable_arguments(contract_bytecode) do + case contract_bytecode do + "3d3d3d3d363d3d3761" <> + <<_::binary-size(4)>> <> + "603736393661" <> <<_::binary-size(4)>> <> "013d73" <> <> <> _ -> + "0x" <> template_address + + _ -> + nil + end + end +end diff --git a/apps/explorer/lib/explorer/chain/smart_contract/proxy/models/implementation.ex b/apps/explorer/lib/explorer/chain/smart_contract/proxy/models/implementation.ex index 0b9437a5a140..0bb3fdbd748f 100644 --- a/apps/explorer/lib/explorer/chain/smart_contract/proxy/models/implementation.ex +++ b/apps/explorer/lib/explorer/chain/smart_contract/proxy/models/implementation.ex @@ -42,6 +42,7 @@ defmodule Explorer.Chain.SmartContract.Proxy.Models.Implementation do :basic_get_implementation, :comptroller, :eip2535, + :clone_with_immutable_arguments, :unknown ], null: true diff --git a/apps/explorer/lib/explorer/etherscan/contracts.ex b/apps/explorer/lib/explorer/etherscan/contracts.ex index 64449f858a09..4ecd397f56d5 100644 --- a/apps/explorer/lib/explorer/etherscan/contracts.ex +++ b/apps/explorer/lib/explorer/etherscan/contracts.ex @@ -53,7 +53,7 @@ defmodule Explorer.Etherscan.Contracts do refetch_necessity_checked?: false }, [ - {:unverified_proxy_only?, true} + {:proxy_without_abi?, true} ] ) diff --git a/apps/explorer/priv/repo/migrations/20240501131140_new_proxy_type_clones_with_immutable_arguments.exs b/apps/explorer/priv/repo/migrations/20240501131140_new_proxy_type_clones_with_immutable_arguments.exs new file mode 100644 index 000000000000..55d3d3b9a404 --- /dev/null +++ b/apps/explorer/priv/repo/migrations/20240501131140_new_proxy_type_clones_with_immutable_arguments.exs @@ -0,0 +1,7 @@ +defmodule Explorer.Repo.Migrations.NewProxyTypeClonesWithImmutableArguments do + use Ecto.Migration + + def change do + execute("ALTER TYPE proxy_type ADD VALUE 'clone_with_immutable_arguments' BEFORE 'unknown'") + end +end From 5bbf68e756f49d0d4823da4ca609ace0061b6954 Mon Sep 17 00:00:00 2001 From: Victor Baranov Date: Tue, 14 May 2024 15:28:25 +0300 Subject: [PATCH 015/150] fix: Disallow batched queries in GraphQL endpoint (#10050) * Disallow multiple queries in GraphQL endpoint * Fix mix credo * Add Plug.Parsers to each pipeline * Process review comments * Process review comments --- .../lib/block_scout_web/admin_router.ex | 9 ++++ .../lib/block_scout_web/api_key_v2_router.ex | 9 ++++ .../lib/block_scout_web/api_router.ex | 46 ++++++++++++++++++- .../lib/block_scout_web/endpoint.ex | 9 ---- .../block_scout_web/graphql/body_reader.ex | 35 ++++++++++++++ .../lib/block_scout_web/router.ex | 27 +++++++++++ .../smart_contracts_api_v2_router.ex | 9 ++++ .../block_scout_web/utils_api_v2_router.ex | 9 ++++ .../lib/block_scout_web/web_router.ex | 20 ++++++++ config/runtime.exs | 3 -- 10 files changed, 163 insertions(+), 13 deletions(-) create mode 100644 apps/block_scout_web/lib/block_scout_web/graphql/body_reader.ex diff --git a/apps/block_scout_web/lib/block_scout_web/admin_router.ex b/apps/block_scout_web/lib/block_scout_web/admin_router.ex index 7a1c328ba355..213d4abff444 100644 --- a/apps/block_scout_web/lib/block_scout_web/admin_router.ex +++ b/apps/block_scout_web/lib/block_scout_web/admin_router.ex @@ -9,6 +9,15 @@ defmodule BlockScoutWeb.AdminRouter do alias BlockScoutWeb.Plug.Admin.{CheckOwnerRegistered, RequireAdminRole} pipeline :browser do + plug( + Plug.Parsers, + parsers: [:urlencoded, :multipart, :json], + length: 10_000, + query_string_length: 5_000, + pass: ["*/*"], + json_decoder: Poison + ) + plug(:accepts, ["html"]) plug(:fetch_session) plug(:fetch_flash) diff --git a/apps/block_scout_web/lib/block_scout_web/api_key_v2_router.ex b/apps/block_scout_web/lib/block_scout_web/api_key_v2_router.ex index a1b9943f9bc8..29b6fe114f92 100644 --- a/apps/block_scout_web/lib/block_scout_web/api_key_v2_router.ex +++ b/apps/block_scout_web/lib/block_scout_web/api_key_v2_router.ex @@ -6,6 +6,15 @@ defmodule BlockScoutWeb.APIKeyV2Router do alias BlockScoutWeb.Plug.{CheckApiV2, Logger} pipeline :api_v2 do + plug( + Plug.Parsers, + parsers: [:urlencoded, :multipart, :json], + length: 10_000, + query_string_length: 5_000, + pass: ["*/*"], + json_decoder: Poison + ) + plug(Logger, application: :api_v2) plug(:accepts, ["json"]) plug(CheckApiV2) diff --git a/apps/block_scout_web/lib/block_scout_web/api_router.ex b/apps/block_scout_web/lib/block_scout_web/api_router.ex index 475d89641677..e7ad4338e2f9 100644 --- a/apps/block_scout_web/lib/block_scout_web/api_router.ex +++ b/apps/block_scout_web/lib/block_scout_web/api_router.ex @@ -16,22 +16,52 @@ defmodule BlockScoutWeb.ApiRouter do alias BlockScoutWeb.{AddressTransactionController, APIKeyV2Router, SmartContractsApiV2Router, UtilsApiV2Router} alias BlockScoutWeb.Plug.{CheckAccountAPI, CheckApiV2, RateLimit} + @max_query_string_length 5_000 + forward("/v2/smart-contracts", SmartContractsApiV2Router) forward("/v2/key", APIKeyV2Router) forward("/v2/utils", UtilsApiV2Router) pipeline :api do + plug( + Plug.Parsers, + parsers: [:urlencoded, :multipart, :json], + length: 20_000_000, + query_string_length: @max_query_string_length, + pass: ["*/*"], + json_decoder: Poison + ) + plug(BlockScoutWeb.Plug.Logger, application: :api) plug(:accepts, ["json"]) end pipeline :account_api do + plug( + Plug.Parsers, + parsers: [:urlencoded, :multipart, :json], + length: 100_000, + query_string_length: @max_query_string_length, + pass: ["*/*"], + json_decoder: Poison + ) + + plug(BlockScoutWeb.Plug.Logger, application: :api) + plug(:accepts, ["json"]) plug(:fetch_session) plug(:protect_from_forgery) plug(CheckAccountAPI) end pipeline :api_v2 do + plug( + Plug.Parsers, + parsers: [:urlencoded, :multipart, :json], + query_string_length: @max_query_string_length, + pass: ["*/*"], + json_decoder: Poison + ) + plug(BlockScoutWeb.Plug.Logger, application: :api_v2) plug(:accepts, ["json"]) plug(CheckApiV2) @@ -41,6 +71,14 @@ defmodule BlockScoutWeb.ApiRouter do end pipeline :api_v2_no_session do + plug( + Plug.Parsers, + parsers: [:urlencoded, :multipart, :json], + query_string_length: @max_query_string_length, + pass: ["*/*"], + json_decoder: Poison + ) + plug(BlockScoutWeb.Plug.Logger, application: :api_v2) plug(:accepts, ["json"]) plug(CheckApiV2) @@ -48,6 +86,13 @@ defmodule BlockScoutWeb.ApiRouter do end pipeline :api_v1_graphql do + plug( + Plug.Parsers, + parsers: [:json, Absinthe.Plug.Parser], + json_decoder: Poison, + body_reader: {BlockScoutWeb.GraphQL.BodyReader, :read_body, []} + ) + plug(BlockScoutWeb.Plug.Logger, application: :api) plug(:accepts, ["json"]) plug(RateLimit, graphql?: true) @@ -57,7 +102,6 @@ defmodule BlockScoutWeb.ApiRouter do alias BlockScoutWeb.API.V2 scope "/account/v2", as: :account_v2 do - pipe_through(:api) pipe_through(:account_api) get("/authenticate", AuthenticateController, :authenticate_get) diff --git a/apps/block_scout_web/lib/block_scout_web/endpoint.ex b/apps/block_scout_web/lib/block_scout_web/endpoint.ex index 10d8d99f36b6..734d1e9eee06 100644 --- a/apps/block_scout_web/lib/block_scout_web/endpoint.ex +++ b/apps/block_scout_web/lib/block_scout_web/endpoint.ex @@ -43,15 +43,6 @@ defmodule BlockScoutWeb.Endpoint do plug(Plug.RequestId) - plug( - Plug.Parsers, - parsers: [:urlencoded, :multipart, :json], - length: 20_000_000, - query_string_length: 1_000_000, - pass: ["*/*"], - json_decoder: Poison - ) - plug(Plug.MethodOverride) plug(Plug.Head) diff --git a/apps/block_scout_web/lib/block_scout_web/graphql/body_reader.ex b/apps/block_scout_web/lib/block_scout_web/graphql/body_reader.ex new file mode 100644 index 000000000000..fe12ebfb1ca1 --- /dev/null +++ b/apps/block_scout_web/lib/block_scout_web/graphql/body_reader.ex @@ -0,0 +1,35 @@ +defmodule BlockScoutWeb.GraphQL.BodyReader do + @moduledoc """ + This module is responsible for reading the body of a graphql request and counting the number of queries in the body. + """ + + alias Plug.Conn + + @max_number_of_queries 1 + + def read_body(conn, opts) do + {:ok, body, conn} = Conn.read_body(conn, opts) + updated_conn = update_in(conn.assigns[:raw_body], &[body | &1 || []]) + + json_body = Jason.decode!(body) + + json_body_length = + if is_list(json_body) do + Enum.count(json_body) + else + 1 + end + + error = %{errors: [%{message: "Max batch size is 1"}]} + + if json_body_length > @max_number_of_queries do + {:ok, "", + updated_conn + |> Conn.put_resp_content_type("application/json") + |> Conn.resp(400, Jason.encode!(error)) + |> Conn.halt()} + else + {:ok, body, updated_conn} + end + end +end diff --git a/apps/block_scout_web/lib/block_scout_web/router.ex b/apps/block_scout_web/lib/block_scout_web/router.ex index 86da395f13cf..5e29a520d4bc 100644 --- a/apps/block_scout_web/lib/block_scout_web/router.ex +++ b/apps/block_scout_web/lib/block_scout_web/router.ex @@ -4,11 +4,22 @@ defmodule BlockScoutWeb.Router do alias BlockScoutWeb.Plug.{GraphQL, RateLimit} alias BlockScoutWeb.{ApiRouter, WebRouter} + @max_query_string_length 5_000 + if Application.compile_env(:block_scout_web, :admin_panel_enabled) do forward("/admin", BlockScoutWeb.AdminRouter) end pipeline :browser do + plug( + Plug.Parsers, + parsers: [:urlencoded, :multipart, :json], + length: 100_000, + query_string_length: @max_query_string_length, + pass: ["*/*"], + json_decoder: Poison + ) + plug(BlockScoutWeb.Plug.Logger, application: :block_scout_web) plug(:accepts, ["html"]) plug(:fetch_session) @@ -18,11 +29,27 @@ defmodule BlockScoutWeb.Router do end pipeline :api do + plug( + Plug.Parsers, + parsers: [:urlencoded, :multipart, :json], + length: 20_000_000, + query_string_length: @max_query_string_length, + pass: ["*/*"], + json_decoder: Poison + ) + plug(BlockScoutWeb.Plug.Logger, application: :api) plug(:accepts, ["json"]) end pipeline :api_v1_graphql do + plug( + Plug.Parsers, + parsers: [:json, Absinthe.Plug.Parser], + json_decoder: Poison, + body_reader: {BlockScoutWeb.GraphQL.BodyReader, :read_body, []} + ) + plug(BlockScoutWeb.Plug.Logger, application: :api) plug(:accepts, ["json"]) plug(RateLimit, graphql?: true) diff --git a/apps/block_scout_web/lib/block_scout_web/smart_contracts_api_v2_router.ex b/apps/block_scout_web/lib/block_scout_web/smart_contracts_api_v2_router.ex index aad961f117f5..86ef4f49ff96 100644 --- a/apps/block_scout_web/lib/block_scout_web/smart_contracts_api_v2_router.ex +++ b/apps/block_scout_web/lib/block_scout_web/smart_contracts_api_v2_router.ex @@ -7,6 +7,15 @@ defmodule BlockScoutWeb.SmartContractsApiV2Router do alias BlockScoutWeb.Plug.{CheckApiV2, RateLimit} pipeline :api_v2_no_forgery_protect do + plug( + Plug.Parsers, + parsers: [:urlencoded, :multipart, :json], + length: 20_000_000, + query_string_length: 5_000, + pass: ["*/*"], + json_decoder: Poison + ) + plug(BlockScoutWeb.Plug.Logger, application: :api_v2) plug(:accepts, ["json"]) plug(CheckApiV2) diff --git a/apps/block_scout_web/lib/block_scout_web/utils_api_v2_router.ex b/apps/block_scout_web/lib/block_scout_web/utils_api_v2_router.ex index 572133156dd5..b251f928d7d4 100644 --- a/apps/block_scout_web/lib/block_scout_web/utils_api_v2_router.ex +++ b/apps/block_scout_web/lib/block_scout_web/utils_api_v2_router.ex @@ -7,6 +7,15 @@ defmodule BlockScoutWeb.UtilsApiV2Router do alias BlockScoutWeb.Plug.{CheckApiV2, RateLimit} pipeline :api_v2_no_forgery_protect do + plug( + Plug.Parsers, + parsers: [:urlencoded, :multipart, :json], + length: 100_000, + query_string_length: 5_000, + pass: ["*/*"], + json_decoder: Poison + ) + plug(BlockScoutWeb.Plug.Logger, application: :api_v2) plug(:accepts, ["json"]) plug(CheckApiV2) diff --git a/apps/block_scout_web/lib/block_scout_web/web_router.ex b/apps/block_scout_web/lib/block_scout_web/web_router.ex index 3f3dbed7d10e..2793fd810386 100644 --- a/apps/block_scout_web/lib/block_scout_web/web_router.ex +++ b/apps/block_scout_web/lib/block_scout_web/web_router.ex @@ -7,7 +7,18 @@ defmodule BlockScoutWeb.WebRouter do alias BlockScoutWeb.Plug.CheckAccountWeb + @max_query_string_length 5_000 + pipeline :browser do + plug( + Plug.Parsers, + parsers: [:urlencoded, :multipart, :json], + length: 20_000_000, + query_string_length: @max_query_string_length, + pass: ["*/*"], + json_decoder: Poison + ) + plug(BlockScoutWeb.Plug.Logger, application: :block_scout_web) plug(:accepts, ["html"]) plug(:fetch_session) @@ -18,6 +29,15 @@ defmodule BlockScoutWeb.WebRouter do end pipeline :account do + plug( + Plug.Parsers, + parsers: [:urlencoded, :multipart, :json], + length: 100_000, + query_string_length: @max_query_string_length, + pass: ["*/*"], + json_decoder: Poison + ) + plug(BlockScoutWeb.Plug.Logger, application: :block_scout_web) plug(:accepts, ["html"]) plug(:fetch_session) diff --git a/config/runtime.exs b/config/runtime.exs index 6aacf275b007..32b26f0eccff 100644 --- a/config/runtime.exs +++ b/config/runtime.exs @@ -117,9 +117,6 @@ config :block_scout_web, Api.GraphQL, "0x69e3923eef50eada197c3336d546936d0c994211492c9f947a24c02827568f9f" ), enabled: ConfigHelper.parse_bool_env_var("API_GRAPHQL_ENABLED", "true"), - token_limit: ConfigHelper.parse_integer_env_var("API_GRAPHQL_TOKEN_LIMIT", 1000), - # Needs to be 215 to support the schema introspection for graphiql - max_complexity: ConfigHelper.parse_integer_env_var("API_GRAPHQL_MAX_COMPLEXITY", 215), rate_limit_disabled?: ConfigHelper.parse_bool_env_var("API_GRAPHQL_RATE_LIMIT_DISABLED"), global_limit: ConfigHelper.parse_integer_env_var("API_GRAPHQL_RATE_LIMIT", default_graphql_rate_limit), limit_by_key: ConfigHelper.parse_integer_env_var("API_GRAPHQL_RATE_LIMIT_BY_KEY", default_graphql_rate_limit), From 64b55faea3a47b7956b000982b8c59f08ac21dde Mon Sep 17 00:00:00 2001 From: nikitosing <32202610+nikitosing@users.noreply.github.com> Date: Tue, 14 May 2024 16:10:12 +0300 Subject: [PATCH 016/150] chore: Update hackney pool size: add new fetchers accounting (#9941) --- apps/indexer/lib/indexer/application.ex | 36 ++++++++++++++++++++++--- 1 file changed, 33 insertions(+), 3 deletions(-) diff --git a/apps/indexer/lib/indexer/application.ex b/apps/indexer/lib/indexer/application.ex index 7bbdbf860319..21a5c1b01bf4 100644 --- a/apps/indexer/lib/indexer/application.ex +++ b/apps/indexer/lib/indexer/application.ex @@ -29,9 +29,21 @@ defmodule Indexer.Application do json_rpc_named_arguments = Application.fetch_env!(:indexer, :json_rpc_named_arguments) pool_size = - Application.get_env(:indexer, Indexer.Fetcher.TokenInstance.Retry)[:concurrency] + - Application.get_env(:indexer, Indexer.Fetcher.TokenInstance.Realtime)[:concurrency] + - Application.get_env(:indexer, Indexer.Fetcher.TokenInstance.Sanitize)[:concurrency] + token_instance_fetcher_pool_size( + Indexer.Fetcher.TokenInstance.Realtime, + Indexer.Fetcher.TokenInstance.Realtime.Supervisor + ) + + token_instance_fetcher_pool_size( + Indexer.Fetcher.TokenInstance.Retry, + Indexer.Fetcher.TokenInstance.Retry.Supervisor + ) + + token_instance_fetcher_pool_size( + Indexer.Fetcher.TokenInstance.Sanitize, + Indexer.Fetcher.TokenInstance.Sanitize.Supervisor + ) + + token_instance_fetcher_pool_size(Indexer.Fetcher.TokenInstance.LegacySanitize, nil) + + token_instance_fetcher_pool_size(Indexer.Fetcher.TokenInstance.SanitizeERC1155, nil) + + token_instance_fetcher_pool_size(Indexer.Fetcher.TokenInstance.SanitizeERC721, nil) base_children = [ :hackney_pool.child_spec(:token_instance_fetcher, max_connections: pool_size), @@ -57,4 +69,22 @@ defmodule Indexer.Application do Supervisor.start_link(children, opts) end + + defp token_instance_fetcher_pool_size(fetcher, nil) do + envs = Application.get_env(:indexer, fetcher) + + if envs[:enabled] do + envs[:concurrency] + else + 0 + end + end + + defp token_instance_fetcher_pool_size(fetcher, supervisor) do + if Application.get_env(:indexer, supervisor)[:disabled?] do + 0 + else + Application.get_env(:indexer, fetcher)[:concurrency] + end + end end From fcc5ff2c8921eaeb95dd06ee11dea84094364848 Mon Sep 17 00:00:00 2001 From: Rim Rakhimov Date: Tue, 14 May 2024 18:27:35 +0300 Subject: [PATCH 017/150] feat: Blueprint contracts support (#10058) * Update smart-contract to have 'is_blueprint' flag * Store into the database 'is_blueprint' value retrieved from verification or lookup results * Make use of TestHelper module for smart_contract_controller_test * Make use of TestHelper module for verification_controller_test. Fix invalid 'is_blueprint' value in fixture * Fix spelling. Simplify Vyper.publish method * Add 'Averify' into spelling check * Add PR url to the comment with rationale behind url-encoding ':' symbol --- .../views/api/v2/smart_contract_view.ex | 3 +- .../api/v2/smart_contract_controller_test.exs | 80 ++++++++++++++++++- .../api/v2/verification_controller_test.exs | 76 ++++++++++++++++++ ...r_vyper_multi_part_blueprint_response.json | 21 +++++ .../lib/explorer/chain/smart_contract.ex | 8 +- .../rust_verifier_interface_behaviour.ex | 12 ++- .../smart_contract/solidity/publisher.ex | 4 +- .../smart_contract/vyper/publisher.ex | 19 +++-- ..._smart_contracts_add_is_blueprint_flag.exs | 9 +++ cspell.json | 1 + 10 files changed, 214 insertions(+), 19 deletions(-) create mode 100644 apps/block_scout_web/test/support/fixture/smart_contract/smart_contract_verifier_vyper_multi_part_blueprint_response.json create mode 100644 apps/explorer/priv/repo/migrations/20240509014500_smart_contracts_add_is_blueprint_flag.exs diff --git a/apps/block_scout_web/lib/block_scout_web/views/api/v2/smart_contract_view.ex b/apps/block_scout_web/lib/block_scout_web/views/api/v2/smart_contract_view.ex index 1d9d2a2832b6..ad46ccef531a 100644 --- a/apps/block_scout_web/lib/block_scout_web/views/api/v2/smart_contract_view.ex +++ b/apps/block_scout_web/lib/block_scout_web/views/api/v2/smart_contract_view.ex @@ -216,7 +216,8 @@ defmodule BlockScoutWeb.API.V2.SmartContractView do ), "language" => smart_contract_language(smart_contract), "license_type" => smart_contract.license_type, - "certified" => if(smart_contract.certified, do: smart_contract.certified, else: false) + "certified" => if(smart_contract.certified, do: smart_contract.certified, else: false), + "is_blueprint" => if(smart_contract.is_blueprint, do: smart_contract.is_blueprint, else: false) } |> Map.merge(bytecode_info(address)) end diff --git a/apps/block_scout_web/test/block_scout_web/controllers/api/v2/smart_contract_controller_test.exs b/apps/block_scout_web/test/block_scout_web/controllers/api/v2/smart_contract_controller_test.exs index e94225267700..d72079979e91 100644 --- a/apps/block_scout_web/test/block_scout_web/controllers/api/v2/smart_contract_controller_test.exs +++ b/apps/block_scout_web/test/block_scout_web/controllers/api/v2/smart_contract_controller_test.exs @@ -155,7 +155,8 @@ defmodule BlockScoutWeb.API.V2.SmartContractControllerTest do "is_verified_via_verifier_alliance" => target_contract.verified_via_verifier_alliance, "language" => smart_contract_language(target_contract), "license_type" => "none", - "certified" => false + "certified" => false, + "is_blueprint" => false } implementation_address = insert(:address) @@ -260,7 +261,8 @@ defmodule BlockScoutWeb.API.V2.SmartContractControllerTest do "is_verified_via_verifier_alliance" => target_contract.verified_via_verifier_alliance, "language" => smart_contract_language(target_contract), "license_type" => "gnu_agpl_v3", - "certified" => false + "certified" => false, + "is_blueprint" => false } TestHelper.get_eip1967_implementation_error_response() @@ -365,7 +367,8 @@ defmodule BlockScoutWeb.API.V2.SmartContractControllerTest do "is_verified_via_verifier_alliance" => target_contract.verified_via_verifier_alliance, "language" => smart_contract_language(target_contract), "license_type" => "none", - "certified" => false + "certified" => false, + "is_blueprint" => false } TestHelper.get_eip1967_implementation_zero_addresses() @@ -486,7 +489,8 @@ defmodule BlockScoutWeb.API.V2.SmartContractControllerTest do "is_verified_via_verifier_alliance" => implementation_contract.verified_via_verifier_alliance, "language" => smart_contract_language(implementation_contract), "license_type" => "bsd_3_clause", - "certified" => false + "certified" => false, + "is_blueprint" => false } request = get(conn, "/api/v2/smart-contracts/#{Address.checksum(proxy_address.hash)}") @@ -494,6 +498,74 @@ defmodule BlockScoutWeb.API.V2.SmartContractControllerTest do assert correct_response == response end + + test "get smart-contract which is blueprint", %{conn: conn} do + target_contract = + insert(:smart_contract, + is_blueprint: true + ) + + insert(:transaction, + created_contract_address_hash: target_contract.address_hash, + input: + "0x608060405234801561001057600080fd5b5060df8061001f6000396000f3006080604052600436106049576000357c0100000000000000000000000000000000000000000000000000000000900463ffffffff16806360fe47b114604e5780636d4ce63c146078575b600080fd5b348015605957600080fd5b5060766004803603810190808035906020019092919050505060a0565b005b348015608357600080fd5b50608a60aa565b6040518082815260200191505060405180910390f35b8060008190555050565b600080549050905600a165627a7a7230582061b7676067d537e410bb704932a9984739a959416170ea17bda192ac1218d2790029" + ) + |> with_block() + + correct_response = %{ + "verified_twin_address_hash" => nil, + "is_verified" => true, + "is_changed_bytecode" => false, + "is_partially_verified" => target_contract.partially_verified, + "is_fully_verified" => true, + "is_verified_via_sourcify" => target_contract.verified_via_sourcify, + "is_vyper_contract" => target_contract.is_vyper_contract, + "has_methods_read" => true, + "has_methods_write" => true, + "has_methods_read_proxy" => false, + "has_methods_write_proxy" => false, + "has_custom_methods_read" => false, + "has_custom_methods_write" => false, + "minimal_proxy_address_hash" => nil, + "sourcify_repo_url" => + if(target_contract.verified_via_sourcify, + do: AddressContractView.sourcify_repo_url(target_contract.address_hash, target_contract.partially_verified) + ), + "can_be_visualized_via_sol2uml" => false, + "name" => target_contract && target_contract.name, + "compiler_version" => target_contract.compiler_version, + "optimization_enabled" => target_contract.optimization, + "optimization_runs" => target_contract.optimization_runs, + "evm_version" => target_contract.evm_version, + "verified_at" => target_contract.inserted_at |> to_string() |> String.replace(" ", "T"), + "source_code" => target_contract.contract_source_code, + "file_path" => target_contract.file_path, + "additional_sources" => [], + "compiler_settings" => target_contract.compiler_settings, + "external_libraries" => target_contract.external_libraries, + "constructor_args" => target_contract.constructor_arguments, + "decoded_constructor_args" => nil, + "is_self_destructed" => false, + "deployed_bytecode" => + "0x6080604052600436106049576000357c0100000000000000000000000000000000000000000000000000000000900463ffffffff16806360fe47b114604e5780636d4ce63c146078575b600080fd5b348015605957600080fd5b5060766004803603810190808035906020019092919050505060a0565b005b348015608357600080fd5b50608a60aa565b6040518082815260200191505060405180910390f35b8060008190555050565b600080549050905600a165627a7a7230582061b7676067d537e410bb704932a9984739a959416170ea17bda192ac1218d2790029", + "creation_bytecode" => + "0x608060405234801561001057600080fd5b5060df8061001f6000396000f3006080604052600436106049576000357c0100000000000000000000000000000000000000000000000000000000900463ffffffff16806360fe47b114604e5780636d4ce63c146078575b600080fd5b348015605957600080fd5b5060766004803603810190808035906020019092919050505060a0565b005b348015608357600080fd5b50608a60aa565b6040518082815260200191505060405180910390f35b8060008190555050565b600080549050905600a165627a7a7230582061b7676067d537e410bb704932a9984739a959416170ea17bda192ac1218d2790029", + "abi" => target_contract.abi, + "is_verified_via_eth_bytecode_db" => target_contract.verified_via_eth_bytecode_db, + "is_verified_via_verifier_alliance" => target_contract.verified_via_verifier_alliance, + "language" => smart_contract_language(target_contract), + "license_type" => "none", + "certified" => false, + "is_blueprint" => true + } + + TestHelper.get_eip1967_implementation_zero_addresses() + + request = get(conn, "/api/v2/smart-contracts/#{Address.checksum(target_contract.address_hash)}") + response = json_response(request, 200) + + assert correct_response == response + end end test "get smart-contract implementation for 'Clones with immutable arguments' pattern", %{conn: conn} do diff --git a/apps/block_scout_web/test/block_scout_web/controllers/api/v2/verification_controller_test.exs b/apps/block_scout_web/test/block_scout_web/controllers/api/v2/verification_controller_test.exs index ddefb7e3b1c9..a2199d26c7c1 100644 --- a/apps/block_scout_web/test/block_scout_web/controllers/api/v2/verification_controller_test.exs +++ b/apps/block_scout_web/test/block_scout_web/controllers/api/v2/verification_controller_test.exs @@ -3,7 +3,10 @@ defmodule BlockScoutWeb.API.V2.VerificationControllerTest do use BlockScoutWeb.ChannelCase, async: false alias BlockScoutWeb.UserSocketV2 + alias Explorer.Chain.Address + alias Explorer.TestHelper alias Tesla.Multipart + alias Plug.Conn @moduletag timeout: :infinity @@ -349,6 +352,79 @@ defmodule BlockScoutWeb.API.V2.VerificationControllerTest do Application.put_env(:explorer, :solc_bin_api_url, before) end + + test "blueprint contract verification", %{conn: conn} do + bypass = Bypass.open() + + sc_verifier_response = + File.read!( + "./test/support/fixture/smart_contract/smart_contract_verifier_vyper_multi_part_blueprint_response.json" + ) + + old_env = Application.get_env(:explorer, Explorer.SmartContract.RustVerifierInterfaceBehaviour) + + Application.put_env(:explorer, Explorer.SmartContract.RustVerifierInterfaceBehaviour, + service_url: "http://localhost:#{bypass.port}", + enabled: true, + type: "sc_verifier", + eth_bytecode_db?: true + ) + + Bypass.expect_once(bypass, "POST", "/api/v2//verifier/vyper/sources%3Averify-multi-part", fn conn -> + Conn.resp(conn, 200, sc_verifier_response) + end) + + bytecode = + "0xfe7100346100235760206100995f395f516001555f5f5561005f61002760003961005f6000f35b5f80fd5f3560e01c60026001821660011b61005b01601e395f51565b63158ef93e81186100535734610057575f5460405260206040f3610053565b633fa4f245811861005357346100575760015460405260206040f35b5f5ffd5b5f80fd0018003784185f810400a16576797065728300030a0013" + + input = + "0x61009c3d81600a3d39f3fe7100346100235760206100995f395f516001555f5f5561005f61002760003961005f6000f35b5f80fd5f3560e01c60026001821660011b61005b01601e395f51565b63158ef93e81186100535734610057575f5460405260206040f3610053565b633fa4f245811861005357346100575760015460405260206040f35b5f5ffd5b5f80fd0018003784185f810400a16576797065728300030a0013" + + contract_address = insert(:contract_address, contract_code: bytecode) + + :transaction + |> insert( + created_contract_address_hash: contract_address.hash, + input: input + ) + |> with_block(status: :ok) + + topic = "addresses:#{contract_address.hash}" + + {:ok, _reply, _socket} = + BlockScoutWeb.UserSocketV2 + |> socket("no_id", %{}) + |> subscribe_and_join(topic) + + # We can actually use any params here, as verification service response is defined in `sc_verifier_response` + params = %{ + "source_code" => "some_valid_source_code", + "compiler_version" => "v0.3.10", + "contract_name" => "abc" + } + + request = post(conn, "/api/v2/smart-contracts/#{contract_address.hash}/verification/via/vyper-code", params) + + assert %{"message" => "Smart-contract verification started"} = json_response(request, 200) + + assert_receive %Phoenix.Socket.Message{ + payload: %{status: "success"}, + event: "verification_result", + topic: ^topic + }, + :timer.seconds(300) + + # Assert that the `is_blueprint=true` is stored in the database after verification + TestHelper.get_eip1967_implementation_zero_addresses() + + request = get(conn, "/api/v2/smart-contracts/#{Address.checksum(contract_address.hash)}") + response = json_response(request, 200) + + assert response["is_blueprint"] == true + + Application.put_env(:explorer, Explorer.SmartContract.RustVerifierInterfaceBehaviour, old_env) + Bypass.down(bypass) + end end describe "/api/v2/smart-contracts/{address_hash}/verification/via/vyper-multi-part" do diff --git a/apps/block_scout_web/test/support/fixture/smart_contract/smart_contract_verifier_vyper_multi_part_blueprint_response.json b/apps/block_scout_web/test/support/fixture/smart_contract/smart_contract_verifier_vyper_multi_part_blueprint_response.json new file mode 100644 index 000000000000..403376f7514e --- /dev/null +++ b/apps/block_scout_web/test/support/fixture/smart_contract/smart_contract_verifier_vyper_multi_part_blueprint_response.json @@ -0,0 +1,21 @@ +{ + "message": "OK", + "status": "SUCCESS", + "source": { + "fileName": "Test.vy", + "contractName": "Test", + "compilerVersion": "v0.3.10+commit.91361694", + "compilerSettings": "{\"outputSelection\":{\"*\":[\"abi\",\"evm.bytecode\",\"evm.deployedBytecode\",\"evm.methodIdentifiers\"]}}", + "sourceType": "VYPER", + "sourceFiles": { + "Test.vy": "initialized: public(bool)\nvalue: public(uint256)\n\n@external\ndef __init__(_value: uint256):\n self.value = _value\n self.initialized = False" + }, + "abi": "[{\"inputs\":[{\"name\":\"_value\",\"type\":\"uint256\"}],\"outputs\":[],\"stateMutability\":\"nonpayable\",\"type\":\"constructor\"},{\"inputs\":[],\"name\":\"initialized\",\"outputs\":[{\"name\":\"\",\"type\":\"bool\"}],\"stateMutability\":\"view\",\"type\":\"function\"},{\"inputs\":[],\"name\":\"value\",\"outputs\":[{\"name\":\"\",\"type\":\"uint256\"}],\"stateMutability\":\"view\",\"type\":\"function\"}]", + "constructorArguments": null, + "matchType": "PARTIAL", + "compilationArtifacts": "{\"abi\":[{\"inputs\":[{\"name\":\"_value\",\"type\":\"uint256\"}],\"outputs\":[],\"stateMutability\":\"nonpayable\",\"type\":\"constructor\"},{\"inputs\":[],\"name\":\"initialized\",\"outputs\":[{\"name\":\"\",\"type\":\"bool\"}],\"stateMutability\":\"view\",\"type\":\"function\"},{\"inputs\":[],\"name\":\"value\",\"outputs\":[{\"name\":\"\",\"type\":\"uint256\"}],\"stateMutability\":\"view\",\"type\":\"function\"}],\"sources\":{\"Test.vy\":{\"id\":0}}}", + "creationInputArtifacts": "{}", + "deployedBytecodeArtifacts": "{\"sourceMap\":\"-1:-1:0:-;;;;;;;;;;;;;;;;:::-;:::-;;;;;:::-;;;:::-;;;;;;20:4;;-1:-1;:::-;:::-;;;;;:::-;;;:::-;;;;;;40:7;;-1:-1::-;;;;:::-;;;\"}", + "isBlueprint": true + } +} \ No newline at end of file diff --git a/apps/explorer/lib/explorer/chain/smart_contract.ex b/apps/explorer/lib/explorer/chain/smart_contract.ex index 7d09ef29718d..d295288b6078 100644 --- a/apps/explorer/lib/explorer/chain/smart_contract.ex +++ b/apps/explorer/lib/explorer/chain/smart_contract.ex @@ -270,6 +270,7 @@ defmodule Explorer.Chain.SmartContract do * `autodetect_constructor_args` - field was added for storing user's choice * `is_yul` - field was added for storing user's choice * `certified` - boolean flag, which can be set for set of smart-contracts via runtime env variable to prioritize those smart-contracts in the search. + * `is_blueprint` - boolean flag, determines if contract is ERC-5202 compatible blueprint contract or not. """ typed_schema "smart_contracts" do field(:name, :string, null: false) @@ -296,6 +297,7 @@ defmodule Explorer.Chain.SmartContract do field(:metadata_from_verified_bytecode_twin, :boolean, virtual: true) field(:license_type, Ecto.Enum, values: @license_enum, default: :none) field(:certified, :boolean) + field(:is_blueprint, :boolean) has_many( :decompiled_smart_contracts, @@ -347,7 +349,8 @@ defmodule Explorer.Chain.SmartContract do :contract_code_md5, :compiler_settings, :license_type, - :certified + :certified, + :is_blueprint ]) |> validate_required([ :name, @@ -390,7 +393,8 @@ defmodule Explorer.Chain.SmartContract do :contract_code_md5, :autodetect_constructor_args, :license_type, - :certified + :certified, + :is_blueprint ]) |> (&if(verification_with_files?, do: &1, diff --git a/apps/explorer/lib/explorer/smart_contract/rust_verifier_interface_behaviour.ex b/apps/explorer/lib/explorer/smart_contract/rust_verifier_interface_behaviour.ex index 40acb5b64302..97e7cc83be22 100644 --- a/apps/explorer/lib/explorer/smart_contract/rust_verifier_interface_behaviour.ex +++ b/apps/explorer/lib/explorer/smart_contract/rust_verifier_interface_behaviour.ex @@ -154,16 +154,20 @@ defmodule Explorer.SmartContract.RustVerifierInterfaceBehaviour do def process_verifier_response(other, _), do: {:error, other} + # Uses url encoded ("%3A") version of ':', as ':' symbol breaks `Bypass` library during tests. + # https://github.com/PSPDFKit-labs/bypass/issues/122 + def solidity_multiple_files_verification_url, - do: "#{base_api_url()}" <> "/verifier/solidity/sources:verify-multi-part" + do: "#{base_api_url()}" <> "/verifier/solidity/sources%3Averify-multi-part" - def vyper_multiple_files_verification_url, do: "#{base_api_url()}" <> "/verifier/vyper/sources:verify-multi-part" + def vyper_multiple_files_verification_url, + do: "#{base_api_url()}" <> "/verifier/vyper/sources%3Averify-multi-part" def vyper_standard_json_verification_url, - do: "#{base_api_url()}" <> "/verifier/vyper/sources:verify-standard-json" + do: "#{base_api_url()}" <> "/verifier/vyper/sources%3Averify-standard-json" def solidity_standard_json_verification_url, - do: "#{base_api_url()}" <> "/verifier/solidity/sources:verify-standard-json" + do: "#{base_api_url()}" <> "/verifier/solidity/sources%3Averify-standard-json" def versions_list_url, do: "#{base_api_url()}" <> "/verifier/solidity/versions" diff --git a/apps/explorer/lib/explorer/smart_contract/solidity/publisher.ex b/apps/explorer/lib/explorer/smart_contract/solidity/publisher.ex index f0f6d0c8edb5..2cebbaa9b089 100644 --- a/apps/explorer/lib/explorer/smart_contract/solidity/publisher.ex +++ b/apps/explorer/lib/explorer/smart_contract/solidity/publisher.ex @@ -180,6 +180,7 @@ defmodule Explorer.SmartContract.Solidity.Publisher do |> Map.put("verified_via_eth_bytecode_db", automatically_verified?) |> Map.put("verified_via_verifier_alliance", source["verifier_alliance?"]) |> Map.put("license_type", initial_params["license_type"]) + |> Map.put("is_blueprint", source["isBlueprint"]) publish_smart_contract(address_hash, prepared_params, Jason.decode!(abi_string || "null")) end @@ -299,7 +300,8 @@ defmodule Explorer.SmartContract.Solidity.Publisher do autodetect_constructor_args: params["autodetect_constructor_args"], is_yul: params["is_yul"] || false, compiler_settings: clean_compiler_settings, - license_type: prepare_license_type(params["license_type"]) || :none + license_type: prepare_license_type(params["license_type"]) || :none, + is_blueprint: params["is_blueprint"] || false } end diff --git a/apps/explorer/lib/explorer/smart_contract/vyper/publisher.ex b/apps/explorer/lib/explorer/smart_contract/vyper/publisher.ex index d8938b503c3d..da7c92854acd 100644 --- a/apps/explorer/lib/explorer/smart_contract/vyper/publisher.ex +++ b/apps/explorer/lib/explorer/smart_contract/vyper/publisher.ex @@ -118,6 +118,7 @@ defmodule Explorer.SmartContract.Vyper.Publisher do ) |> Map.put("compiler_settings", if(standard_json?, do: compiler_settings)) |> Map.put("license_type", initial_params["license_type"]) + |> Map.put("is_blueprint", source["isBlueprint"]) publish_smart_contract(address_hash, prepared_params, Jason.decode!(abi_string)) end @@ -186,12 +187,7 @@ defmodule Explorer.SmartContract.Vyper.Publisher do constructor_arguments = params["constructor_arguments"] compiler_settings = params["compiler_settings"] - clean_constructor_arguments = - if constructor_arguments != nil && constructor_arguments != "" do - constructor_arguments - else - nil - end + clean_constructor_arguments = clear_constructor_arguments(constructor_arguments) clean_compiler_settings = if compiler_settings in ["", nil, %{}] do @@ -223,7 +219,16 @@ defmodule Explorer.SmartContract.Vyper.Publisher do is_vyper_contract: true, file_path: params["file_path"], compiler_settings: clean_compiler_settings, - license_type: prepare_license_type(params["license_type"]) || :none + license_type: prepare_license_type(params["license_type"]) || :none, + is_blueprint: params["is_blueprint"] || false } end + + defp clear_constructor_arguments(constructor_arguments) do + if constructor_arguments != nil && constructor_arguments != "" do + constructor_arguments + else + nil + end + end end diff --git a/apps/explorer/priv/repo/migrations/20240509014500_smart_contracts_add_is_blueprint_flag.exs b/apps/explorer/priv/repo/migrations/20240509014500_smart_contracts_add_is_blueprint_flag.exs new file mode 100644 index 000000000000..f5d362e519a1 --- /dev/null +++ b/apps/explorer/priv/repo/migrations/20240509014500_smart_contracts_add_is_blueprint_flag.exs @@ -0,0 +1,9 @@ +defmodule Explorer.Repo.Migrations.SmartContractsAddIsBlueprintFlag do + use Ecto.Migration + + def change do + alter table(:smart_contracts) do + add(:is_blueprint, :boolean, null: true) + end + end +end diff --git a/cspell.json b/cspell.json index 14815e397628..50353346b5a0 100644 --- a/cspell.json +++ b/cspell.json @@ -18,6 +18,7 @@ "Asfpp", "Autodetection", "Autonity", + "Averify", "bitmask", "Blockchair", "CALLCODE", From 2e4e2ec051df3a6998e903c9c80b4dcdee783dd4 Mon Sep 17 00:00:00 2001 From: Qwerty5Uiop <105209995+Qwerty5Uiop@users.noreply.github.com> Date: Tue, 14 May 2024 19:30:30 +0400 Subject: [PATCH 018/150] fix: Separate indexer setup (#10032) * fix: Separate indexer setup * Configure CI for sepate images for indexer and API * Change CI for every push to master branch (build separate indexer/api images) * Add DISABLE_API env var * Indexing status improvements for api instance --------- Co-authored-by: Viktor Baranov --- .github/workflows/config.yml | 6 +- .github/workflows/pre-release-eth.yml | 87 +++++++++++++++++ .github/workflows/pre-release-optimism.yml | 87 +++++++++++++++++ .github/workflows/pre-release-shibarium.yml | 87 +++++++++++++++++ .github/workflows/pre-release-zksync.yml | 90 +++++++++++++++++ .../{prerelease.yml => pre-release.yml} | 96 +++++++------------ .../publish-docker-image-every-push.yml | 50 +++++++++- .../publish-docker-image-for-eth-goerli.yml | 40 -------- .github/workflows/release-arbitrum.yml | 48 +++++++++- .github/workflows/release-eth.yml | 48 +++++++++- .github/workflows/release-filecoin.yml | 48 +++++++++- .github/workflows/release-fuse.yml | 88 +++++++++++++++++ .github/workflows/release-gnosis.yml | 52 +++++++++- .github/workflows/release-optimism.yml | 48 +++++++++- ...dditional.yml => release-polygon-edge.yml} | 59 ++++-------- .github/workflows/release-polygon-zkevm.yml | 88 +++++++++++++++++ .github/workflows/release-rootstock.yml | 87 +++++++++++++++++ .github/workflows/release-shibarium.yml | 48 +++++++++- .github/workflows/release-stability.yml | 88 +++++++++++++++++ .github/workflows/release-suave.yml | 88 +++++++++++++++++ .github/workflows/release-zetachain.yml | 48 +++++++++- .github/workflows/release-zksync.yml | 48 +++++++++- .github/workflows/release.yml | 69 +++++++------ .../lib/block_scout_web/application.ex | 67 +++++++------ .../lib/block_scout_web/router.ex | 6 -- apps/explorer/lib/explorer/chain.ex | 40 +++++--- config/runtime.exs | 16 ++-- 27 files changed, 1366 insertions(+), 261 deletions(-) create mode 100644 .github/workflows/pre-release-eth.yml create mode 100644 .github/workflows/pre-release-optimism.yml create mode 100644 .github/workflows/pre-release-shibarium.yml create mode 100644 .github/workflows/pre-release-zksync.yml rename .github/workflows/{prerelease.yml => pre-release.yml} (53%) delete mode 100644 .github/workflows/publish-docker-image-for-eth-goerli.yml create mode 100644 .github/workflows/release-fuse.yml rename .github/workflows/{release-additional.yml => release-polygon-edge.yml} (60%) create mode 100644 .github/workflows/release-polygon-zkevm.yml create mode 100644 .github/workflows/release-rootstock.yml create mode 100644 .github/workflows/release-stability.yml create mode 100644 .github/workflows/release-suave.yml diff --git a/.github/workflows/config.yml b/.github/workflows/config.yml index a7f759e30465..420c0827048b 100644 --- a/.github/workflows/config.yml +++ b/.github/workflows/config.yml @@ -4,10 +4,10 @@ on: push: branches: - master + - production-arbitrum - production-core - - production-eth-experimental - - production-eth-goerli - production-eth-sepolia + - production-filecoin - production-fuse - production-optimism - production-immutable @@ -29,8 +29,6 @@ on: types: [opened, synchronize, reopened, labeled] branches: - master - - production-optimism - - production-zksync env: MIX_ENV: test diff --git a/.github/workflows/pre-release-eth.yml b/.github/workflows/pre-release-eth.yml new file mode 100644 index 000000000000..be8212be70e6 --- /dev/null +++ b/.github/workflows/pre-release-eth.yml @@ -0,0 +1,87 @@ +name: Pre-release for Ethereum + +on: + workflow_dispatch: + inputs: + number: + type: number + required: true + +env: + OTP_VERSION: ${{ vars.OTP_VERSION }} + ELIXIR_VERSION: ${{ vars.ELIXIR_VERSION }} + +jobs: + push_to_registry: + name: Push Docker image to Docker Hub + runs-on: ubuntu-latest + env: + RELEASE_VERSION: ${{ vars.RELEASE_VERSION }} + steps: + - uses: actions/checkout@v4 + - name: Setup repo + uses: ./.github/actions/setup-repo + with: + docker-username: ${{ secrets.DOCKER_USERNAME }} + docker-password: ${{ secrets.DOCKER_PASSWORD }} + + - name: Build and push Docker image for Ethereum (indexer + API) + uses: docker/build-push-action@v5 + with: + context: . + file: ./docker/Dockerfile + push: true + tags: blockscout/blockscout-ethereum:${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }} + platforms: | + linux/amd64 + build-args: | + DISABLE_WEBAPP=false + API_V1_READ_METHODS_DISABLED=false + API_V1_WRITE_METHODS_DISABLED=false + CACHE_EXCHANGE_RATES_PERIOD= + CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= + CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= + ADMIN_PANEL_ENABLED=false + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }} + RELEASE_VERSION=${{ env.RELEASE_VERSION }} + CHAIN_TYPE=ethereum + + - name: Build and push Docker image for Ethereum (indexer) + uses: docker/build-push-action@v5 + with: + context: . + file: ./docker/Dockerfile + push: true + tags: blockscout/blockscout-ethereum:${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }}-indexer + platforms: | + linux/amd64 + build-args: | + DISABLE_API=true + DISABLE_WEBAPP=true + CACHE_EXCHANGE_RATES_PERIOD= + CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= + CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= + ADMIN_PANEL_ENABLED=false + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }} + RELEASE_VERSION=${{ env.RELEASE_VERSION }} + CHAIN_TYPE=ethereum + + - name: Build and push Docker image for Ethereum (API) + uses: docker/build-push-action@v5 + with: + context: . + file: ./docker/Dockerfile + push: true + tags: blockscout/blockscout-ethereum:${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }}-api + platforms: | + linux/amd64 + build-args: | + DISABLE_INDEXER=true + DISABLE_WEBAPP=true + CACHE_EXCHANGE_RATES_PERIOD= + CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= + CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= + ADMIN_PANEL_ENABLED=false + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }} + RELEASE_VERSION=${{ env.RELEASE_VERSION }} + CHAIN_TYPE=ethereum \ No newline at end of file diff --git a/.github/workflows/pre-release-optimism.yml b/.github/workflows/pre-release-optimism.yml new file mode 100644 index 000000000000..2005777fced3 --- /dev/null +++ b/.github/workflows/pre-release-optimism.yml @@ -0,0 +1,87 @@ +name: Pre-release for Optimism + +on: + workflow_dispatch: + inputs: + number: + type: number + required: true + +env: + OTP_VERSION: ${{ vars.OTP_VERSION }} + ELIXIR_VERSION: ${{ vars.ELIXIR_VERSION }} + +jobs: + push_to_registry: + name: Push Docker image to Docker Hub + runs-on: ubuntu-latest + env: + RELEASE_VERSION: ${{ vars.RELEASE_VERSION }} + steps: + - uses: actions/checkout@v4 + - name: Setup repo + uses: ./.github/actions/setup-repo + with: + docker-username: ${{ secrets.DOCKER_USERNAME }} + docker-password: ${{ secrets.DOCKER_PASSWORD }} + + - name: Build and push Docker image for Optimism (indexer + API) + uses: docker/build-push-action@v5 + with: + context: . + file: ./docker/Dockerfile + push: true + tags: blockscout/blockscout-optimism:${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }} + platforms: | + linux/amd64 + build-args: | + DISABLE_WEBAPP=false + API_V1_READ_METHODS_DISABLED=false + API_V1_WRITE_METHODS_DISABLED=false + CACHE_EXCHANGE_RATES_PERIOD= + CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= + CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= + ADMIN_PANEL_ENABLED=false + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }} + RELEASE_VERSION=${{ env.RELEASE_VERSION }} + CHAIN_TYPE=optimism + + - name: Build and push Docker image for Optimism (indexer) + uses: docker/build-push-action@v5 + with: + context: . + file: ./docker/Dockerfile + push: true + tags: blockscout/blockscout-optimism:${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }}-indexer + platforms: | + linux/amd64 + build-args: | + DISABLE_API=true + DISABLE_WEBAPP=true + CACHE_EXCHANGE_RATES_PERIOD= + CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= + CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= + ADMIN_PANEL_ENABLED=false + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }} + RELEASE_VERSION=${{ env.RELEASE_VERSION }} + CHAIN_TYPE=optimism + + - name: Build and push Docker image for Optimism (API) + uses: docker/build-push-action@v5 + with: + context: . + file: ./docker/Dockerfile + push: true + tags: blockscout/blockscout-optimism:${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }}-api + platforms: | + linux/amd64 + build-args: | + DISABLE_INDEXER=true + DISABLE_WEBAPP=true + CACHE_EXCHANGE_RATES_PERIOD= + CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= + CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= + ADMIN_PANEL_ENABLED=false + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }} + RELEASE_VERSION=${{ env.RELEASE_VERSION }} + CHAIN_TYPE=optimism \ No newline at end of file diff --git a/.github/workflows/pre-release-shibarium.yml b/.github/workflows/pre-release-shibarium.yml new file mode 100644 index 000000000000..36080b3090ce --- /dev/null +++ b/.github/workflows/pre-release-shibarium.yml @@ -0,0 +1,87 @@ +name: Pre-release for Shibarium + +on: + workflow_dispatch: + inputs: + number: + type: number + required: true + +env: + OTP_VERSION: ${{ vars.OTP_VERSION }} + ELIXIR_VERSION: ${{ vars.ELIXIR_VERSION }} + +jobs: + push_to_registry: + name: Push Docker image to Docker Hub + runs-on: ubuntu-latest + env: + RELEASE_VERSION: ${{ vars.RELEASE_VERSION }} + steps: + - uses: actions/checkout@v4 + - name: Setup repo + uses: ./.github/actions/setup-repo + with: + docker-username: ${{ secrets.DOCKER_USERNAME }} + docker-password: ${{ secrets.DOCKER_PASSWORD }} + + - name: Build and push Docker image for Shibarium (indexer + API) + uses: docker/build-push-action@v5 + with: + context: . + file: ./docker/Dockerfile + push: true + tags: blockscout/blockscout-shibarium:${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }} + platforms: | + linux/amd64 + build-args: | + DISABLE_WEBAPP=false + API_V1_READ_METHODS_DISABLED=false + API_V1_WRITE_METHODS_DISABLED=false + CACHE_EXCHANGE_RATES_PERIOD= + CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= + CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= + ADMIN_PANEL_ENABLED=false + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }} + RELEASE_VERSION=${{ env.RELEASE_VERSION }} + CHAIN_TYPE=shibarium + + - name: Build and push Docker image for Shibarium (indexer) + uses: docker/build-push-action@v5 + with: + context: . + file: ./docker/Dockerfile + push: true + tags: blockscout/blockscout-shibarium:${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }}-indexer + platforms: | + linux/amd64 + build-args: | + DISABLE_API=true + DISABLE_WEBAPP=true + CACHE_EXCHANGE_RATES_PERIOD= + CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= + CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= + ADMIN_PANEL_ENABLED=false + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }} + RELEASE_VERSION=${{ env.RELEASE_VERSION }} + CHAIN_TYPE=shibarium + + - name: Build and push Docker image for Shibarium (API) + uses: docker/build-push-action@v5 + with: + context: . + file: ./docker/Dockerfile + push: true + tags: blockscout/blockscout-shibarium:${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }}-api + platforms: | + linux/amd64 + build-args: | + DISABLE_INDEXER=true + DISABLE_WEBAPP=true + CACHE_EXCHANGE_RATES_PERIOD= + CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= + CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= + ADMIN_PANEL_ENABLED=false + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }} + RELEASE_VERSION=${{ env.RELEASE_VERSION }} + CHAIN_TYPE=shibarium \ No newline at end of file diff --git a/.github/workflows/pre-release-zksync.yml b/.github/workflows/pre-release-zksync.yml new file mode 100644 index 000000000000..84cf12af3651 --- /dev/null +++ b/.github/workflows/pre-release-zksync.yml @@ -0,0 +1,90 @@ +name: Pre-release for ZkSync + +on: + workflow_dispatch: + inputs: + number: + type: number + required: true + +env: + OTP_VERSION: ${{ vars.OTP_VERSION }} + ELIXIR_VERSION: ${{ vars.ELIXIR_VERSION }} + +jobs: + push_to_registry: + name: Push Docker image to Docker Hub + runs-on: ubuntu-latest + env: + RELEASE_VERSION: ${{ vars.RELEASE_VERSION }} + steps: + - uses: actions/checkout@v4 + - name: Setup repo + uses: ./.github/actions/setup-repo + with: + docker-username: ${{ secrets.DOCKER_USERNAME }} + docker-password: ${{ secrets.DOCKER_PASSWORD }} + + - name: Build and push Docker image for ZkSync (indexer + API) + uses: docker/build-push-action@v5 + with: + context: . + file: ./docker/Dockerfile + push: true + tags: blockscout/blockscout-zksync:${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }} + platforms: | + linux/amd64 + linux/arm64/v8 + build-args: | + DISABLE_WEBAPP=false + API_V1_READ_METHODS_DISABLED=false + API_V1_WRITE_METHODS_DISABLED=false + CACHE_EXCHANGE_RATES_PERIOD= + CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= + CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= + ADMIN_PANEL_ENABLED=false + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }} + RELEASE_VERSION=${{ env.RELEASE_VERSION }} + CHAIN_TYPE=zksync + + - name: Build and push Docker image for ZkSync (indexer) + uses: docker/build-push-action@v5 + with: + context: . + file: ./docker/Dockerfile + push: true + tags: blockscout/blockscout-zksync:${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }}-indexer + platforms: | + linux/amd64 + linux/arm64/v8 + build-args: | + DISABLE_API=true + DISABLE_WEBAPP=true + CACHE_EXCHANGE_RATES_PERIOD= + CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= + CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= + ADMIN_PANEL_ENABLED=false + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }} + RELEASE_VERSION=${{ env.RELEASE_VERSION }} + CHAIN_TYPE=zksync + + - name: Build and push Docker image for ZkSync (API) + uses: docker/build-push-action@v5 + with: + context: . + file: ./docker/Dockerfile + push: true + tags: blockscout/blockscout-zksync:${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }}-api + platforms: | + linux/amd64 + linux/arm64/v8 + build-args: | + DISABLE_INDEXER=true + DISABLE_WEBAPP=true + CACHE_EXCHANGE_RATES_PERIOD= + CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= + CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= + ADMIN_PANEL_ENABLED=false + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }} + RELEASE_VERSION=${{ env.RELEASE_VERSION }} + CHAIN_TYPE=zksync \ No newline at end of file diff --git a/.github/workflows/prerelease.yml b/.github/workflows/pre-release.yml similarity index 53% rename from .github/workflows/prerelease.yml rename to .github/workflows/pre-release.yml index 42233668dffc..52baad99de4d 100644 --- a/.github/workflows/prerelease.yml +++ b/.github/workflows/pre-release.yml @@ -1,4 +1,4 @@ -name: Pre-release master +name: Pre-release on: workflow_dispatch: @@ -25,93 +25,62 @@ jobs: docker-username: ${{ secrets.DOCKER_USERNAME }} docker-password: ${{ secrets.DOCKER_PASSWORD }} - - name: Build and push Docker image for Ethereum + - name: Build & Push Core Docker image (indexer + API) uses: docker/build-push-action@v5 with: context: . file: ./docker/Dockerfile push: true - tags: blockscout/blockscout-ethereum:${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }} + cache-from: type=registry,ref=blockscout/blockscout:buildcache + cache-to: type=registry,ref=blockscout/blockscout:buildcache,mode=max + tags: blockscout/blockscout:master, blockscout/blockscout:${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }} platforms: | linux/amd64 + linux/arm64/v8 build-args: | - CACHE_EXCHANGE_RATES_PERIOD= - API_V1_READ_METHODS_DISABLED=false DISABLE_WEBAPP=false - API_V1_WRITE_METHODS_DISABLED=false - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - ADMIN_PANEL_ENABLED=false - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }} - RELEASE_VERSION=${{ env.RELEASE_VERSION }} - CHAIN_TYPE=ethereum - - - name: Build & Push Docker image for Shibarium - uses: docker/build-push-action@v5 - with: - context: . - file: ./docker/Dockerfile - push: true - cache-from: type=registry,ref=blockscout/blockscout-shibarium:buildcache - cache-to: type=registry,ref=blockscout/blockscout-shibarium:buildcache,mode=max - tags: blockscout/blockscout-shibarium:${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }} - platforms: | - linux/amd64 - build-args: | - CACHE_EXCHANGE_RATES_PERIOD= API_V1_READ_METHODS_DISABLED=false - DISABLE_WEBAPP=false API_V1_WRITE_METHODS_DISABLED=false - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - ADMIN_PANEL_ENABLED=false - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }} - RELEASE_VERSION=${{ env.RELEASE_VERSION }} - CHAIN_TYPE=shibarium - - - name: Build and push Docker image for ZkSync - uses: docker/build-push-action@v5 - with: - context: . - file: ./docker/Dockerfile - push: true - tags: blockscout/blockscout-zksync:${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }} - platforms: | - linux/amd64 - build-args: | CACHE_EXCHANGE_RATES_PERIOD= - API_V1_READ_METHODS_DISABLED=false - DISABLE_WEBAPP=false - API_V1_WRITE_METHODS_DISABLED=false CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - ADMIN_PANEL_ENABLED=false CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= + ADMIN_PANEL_ENABLED=false + DECODE_NOT_A_CONTRACT_CALLS=false + MIXPANEL_URL= + MIXPANEL_TOKEN= + AMPLITUDE_URL= + AMPLITUDE_API_KEY= BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} - CHAIN_TYPE=zksync - - name: Build and push Docker image for Optimism + - name: Build & Push Core Docker image (indexer) uses: docker/build-push-action@v5 with: context: . file: ./docker/Dockerfile push: true - tags: blockscout/blockscout-optimism:${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }} + cache-from: type=registry,ref=blockscout/blockscout:buildcache + cache-to: type=registry,ref=blockscout/blockscout:buildcache,mode=max + tags: blockscout/blockscout:${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }}-indexer platforms: | linux/amd64 + linux/arm64/v8 build-args: | + DISABLE_API=true + DISABLE_WEBAPP=true CACHE_EXCHANGE_RATES_PERIOD= - API_V1_READ_METHODS_DISABLED=false - DISABLE_WEBAPP=false - API_V1_WRITE_METHODS_DISABLED=false CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - ADMIN_PANEL_ENABLED=false CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta.+commit.${{ env.SHORT_SHA }} + ADMIN_PANEL_ENABLED=false + DECODE_NOT_A_CONTRACT_CALLS=false + MIXPANEL_URL= + MIXPANEL_TOKEN= + AMPLITUDE_URL= + AMPLITUDE_API_KEY= + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} - CHAIN_TYPE=optimism - - name: Build & Push Docker image + - name: Build & Push Core Docker image (API) uses: docker/build-push-action@v5 with: context: . @@ -119,22 +88,23 @@ jobs: push: true cache-from: type=registry,ref=blockscout/blockscout:buildcache cache-to: type=registry,ref=blockscout/blockscout:buildcache,mode=max - tags: blockscout/blockscout:master, blockscout/blockscout:${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }} + tags: blockscout/blockscout:${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }}-indexer platforms: | linux/amd64 linux/arm64/v8 build-args: | - CACHE_EXCHANGE_RATES_PERIOD= + DISABLE_INDEXER=true + DISABLE_WEBAPP=true API_V1_READ_METHODS_DISABLED=false - DISABLE_WEBAPP=false API_V1_WRITE_METHODS_DISABLED=false + CACHE_EXCHANGE_RATES_PERIOD= CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= + CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= ADMIN_PANEL_ENABLED=false DECODE_NOT_A_CONTRACT_CALLS=false MIXPANEL_URL= MIXPANEL_TOKEN= AMPLITUDE_URL= AMPLITUDE_API_KEY= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }} - RELEASE_VERSION=${{ env.RELEASE_VERSION }} \ No newline at end of file + RELEASE_VERSION=${{ env.RELEASE_VERSION }} diff --git a/.github/workflows/publish-docker-image-every-push.yml b/.github/workflows/publish-docker-image-every-push.yml index 6ec06ee0e85d..17213d3d0b22 100644 --- a/.github/workflows/publish-docker-image-every-push.yml +++ b/.github/workflows/publish-docker-image-every-push.yml @@ -35,7 +35,7 @@ jobs: echo "::set-output name=short-sha::${{ env.SHORT_SHA }}" id: output-step - - name: Build and push Docker image + - name: Build and push Docker image (indexer + API) uses: docker/build-push-action@v5 with: context: . @@ -45,18 +45,62 @@ jobs: cache-to: type=registry,ref=blockscout/blockscout:buildcache,mode=max tags: blockscout/blockscout:master, blockscout/blockscout:${{ env.RELEASE_VERSION }}.commit.${{ env.SHORT_SHA }} build-args: | - CACHE_EXCHANGE_RATES_PERIOD= - API_V1_READ_METHODS_DISABLED=false DISABLE_WEBAPP=false + API_V1_READ_METHODS_DISABLED=false API_V1_WRITE_METHODS_DISABLED=false + CACHE_EXCHANGE_RATES_PERIOD= + CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= + CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= + ADMIN_PANEL_ENABLED=false + DECODE_NOT_A_CONTRACT_CALLS=false + MIXPANEL_URL= + MIXPANEL_TOKEN= + AMPLITUDE_URL= + AMPLITUDE_API_KEY= + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta.+commit.${{ env.SHORT_SHA }} + RELEASE_VERSION=${{ env.RELEASE_VERSION }} + + - name: Build and push Docker image (indexer) + uses: docker/build-push-action@v5 + with: + context: . + file: ./docker/Dockerfile + push: true + tags: blockscout/blockscout:${{ env.RELEASE_VERSION }}.commit.${{ env.SHORT_SHA }}-indexer + build-args: | + DISABLE_API=true + DISABLE_WEBAPP=true + CACHE_EXCHANGE_RATES_PERIOD= CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= + CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= ADMIN_PANEL_ENABLED=false DECODE_NOT_A_CONTRACT_CALLS=false MIXPANEL_URL= MIXPANEL_TOKEN= AMPLITUDE_URL= AMPLITUDE_API_KEY= + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta.+commit.${{ env.SHORT_SHA }} + RELEASE_VERSION=${{ env.RELEASE_VERSION }} + + - name: Build and push Docker image (API) + uses: docker/build-push-action@v5 + with: + context: . + file: ./docker/Dockerfile + push: true + tags: blockscout/blockscout:${{ env.RELEASE_VERSION }}.commit.${{ env.SHORT_SHA }}-api + build-args: | + DISABLE_INDEXER=true + DISABLE_WEBAPP=true + CACHE_EXCHANGE_RATES_PERIOD= + CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= + ADMIN_PANEL_ENABLED=false + DECODE_NOT_A_CONTRACT_CALLS=false + MIXPANEL_URL= + MIXPANEL_TOKEN= + AMPLITUDE_URL= + AMPLITUDE_API_KEY= BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta.+commit.${{ env.SHORT_SHA }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} diff --git a/.github/workflows/publish-docker-image-for-eth-goerli.yml b/.github/workflows/publish-docker-image-for-eth-goerli.yml deleted file mode 100644 index 262802e27ec2..000000000000 --- a/.github/workflows/publish-docker-image-for-eth-goerli.yml +++ /dev/null @@ -1,40 +0,0 @@ -name: ETH Goerli Publish Docker image - -on: - workflow_dispatch: - push: - branches: - - production-eth-goerli -jobs: - push_to_registry: - name: Push Docker image to Docker Hub - runs-on: ubuntu-latest - env: - RELEASE_VERSION: ${{ vars.RELEASE_VERSION }} - DOCKER_CHAIN_NAME: eth-goerli - steps: - - uses: actions/checkout@v4 - - name: Setup repo - uses: ./.github/actions/setup-repo-and-short-sha - with: - docker-username: ${{ secrets.DOCKER_USERNAME }} - docker-password: ${{ secrets.DOCKER_PASSWORD }} - - - name: Build and push Docker image - uses: docker/build-push-action@v5 - with: - context: . - file: ./docker/Dockerfile - push: true - tags: blockscout/blockscout-${{ env.DOCKER_CHAIN_NAME }}:latest, blockscout/blockscout-${{ env.DOCKER_CHAIN_NAME }}:${{ env.RELEASE_VERSION }}-postrelease-${{ env.SHORT_SHA }} - build-args: | - CHAIN_TYPE=ethereum - CACHE_EXCHANGE_RATES_PERIOD= - API_V1_READ_METHODS_DISABLED=false - DISABLE_WEBAPP=false - API_V1_WRITE_METHODS_DISABLED=false - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - ADMIN_PANEL_ENABLED=false - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta.+commit.${{ env.SHORT_SHA }} - RELEASE_VERSION=${{ env.RELEASE_VERSION }} \ No newline at end of file diff --git a/.github/workflows/release-arbitrum.yml b/.github/workflows/release-arbitrum.yml index 8ba7fafae2c3..2f1147d2a6a5 100644 --- a/.github/workflows/release-arbitrum.yml +++ b/.github/workflows/release-arbitrum.yml @@ -22,7 +22,7 @@ jobs: docker-username: ${{ secrets.DOCKER_USERNAME }} docker-password: ${{ secrets.DOCKER_PASSWORD }} - - name: Build and push Docker image for Arbitrum + - name: Build and push Docker image for Arbitrum (indexer + API) uses: docker/build-push-action@v5 with: context: . @@ -33,10 +33,52 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - CACHE_EXCHANGE_RATES_PERIOD= - API_V1_READ_METHODS_DISABLED=false DISABLE_WEBAPP=false + API_V1_READ_METHODS_DISABLED=false API_V1_WRITE_METHODS_DISABLED=false + CACHE_EXCHANGE_RATES_PERIOD= + CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= + CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= + ADMIN_PANEL_ENABLED=false + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + RELEASE_VERSION=${{ env.RELEASE_VERSION }} + CHAIN_TYPE=arbitrum + + - name: Build and push Docker image for Arbitrum (indexer) + uses: docker/build-push-action@v5 + with: + context: . + file: ./docker/Dockerfile + push: true + tags: blockscout/blockscout-arbitrum:${{ env.RELEASE_VERSION }}-indexer + platforms: | + linux/amd64 + linux/arm64/v8 + build-args: | + DISABLE_API=true + DISABLE_WEBAPP=true + CACHE_EXCHANGE_RATES_PERIOD= + CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= + CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= + ADMIN_PANEL_ENABLED=false + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + RELEASE_VERSION=${{ env.RELEASE_VERSION }} + CHAIN_TYPE=arbitrum + + - name: Build and push Docker image for Arbitrum (API) + uses: docker/build-push-action@v5 + with: + context: . + file: ./docker/Dockerfile + push: true + tags: blockscout/blockscout-arbitrum:${{ env.RELEASE_VERSION }}-api + platforms: | + linux/amd64 + linux/arm64/v8 + build-args: | + DISABLE_INDEXER=true + DISABLE_WEBAPP=true + CACHE_EXCHANGE_RATES_PERIOD= CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= ADMIN_PANEL_ENABLED=false CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= diff --git a/.github/workflows/release-eth.yml b/.github/workflows/release-eth.yml index b6d1b6743e49..90f35e2fc1ea 100644 --- a/.github/workflows/release-eth.yml +++ b/.github/workflows/release-eth.yml @@ -22,7 +22,7 @@ jobs: docker-username: ${{ secrets.DOCKER_USERNAME }} docker-password: ${{ secrets.DOCKER_PASSWORD }} - - name: Build and push Docker image for Ethereum + - name: Build and push Docker image for Ethereum (indexer + API) uses: docker/build-push-action@v5 with: context: . @@ -33,13 +33,55 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - CACHE_EXCHANGE_RATES_PERIOD= - API_V1_READ_METHODS_DISABLED=false DISABLE_WEBAPP=false + API_V1_READ_METHODS_DISABLED=false API_V1_WRITE_METHODS_DISABLED=false + CACHE_EXCHANGE_RATES_PERIOD= + CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= + CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= + ADMIN_PANEL_ENABLED=false + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + RELEASE_VERSION=${{ env.RELEASE_VERSION }} + CHAIN_TYPE=ethereum + + - name: Build and push Docker image for Ethereum (indexer) + uses: docker/build-push-action@v5 + with: + context: . + file: ./docker/Dockerfile + push: true + tags: blockscout/blockscout-ethereum:${{ env.RELEASE_VERSION }}-indexer + platforms: | + linux/amd64 + linux/arm64/v8 + build-args: | + DISABLE_API=true + DISABLE_WEBAPP=true + CACHE_EXCHANGE_RATES_PERIOD= CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= + CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= ADMIN_PANEL_ENABLED=false + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + RELEASE_VERSION=${{ env.RELEASE_VERSION }} + CHAIN_TYPE=ethereum + + - name: Build and push Docker image for Ethereum (API) + uses: docker/build-push-action@v5 + with: + context: . + file: ./docker/Dockerfile + push: true + tags: blockscout/blockscout-ethereum:${{ env.RELEASE_VERSION }}-api + platforms: | + linux/amd64 + linux/arm64/v8 + build-args: | + DISABLE_INDEXER=true + DISABLE_WEBAPP=true + CACHE_EXCHANGE_RATES_PERIOD= + CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= + ADMIN_PANEL_ENABLED=false BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=ethereum \ No newline at end of file diff --git a/.github/workflows/release-filecoin.yml b/.github/workflows/release-filecoin.yml index d8b77901a75d..60fbea53c7cb 100644 --- a/.github/workflows/release-filecoin.yml +++ b/.github/workflows/release-filecoin.yml @@ -22,7 +22,7 @@ jobs: docker-username: ${{ secrets.DOCKER_USERNAME }} docker-password: ${{ secrets.DOCKER_PASSWORD }} - - name: Build and push Docker image for Filecoin + - name: Build and push Docker image for Filecoin (indexer + API) uses: docker/build-push-action@v5 with: context: . @@ -33,13 +33,55 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - CACHE_EXCHANGE_RATES_PERIOD= - API_V1_READ_METHODS_DISABLED=false DISABLE_WEBAPP=false + API_V1_READ_METHODS_DISABLED=false API_V1_WRITE_METHODS_DISABLED=false + CACHE_EXCHANGE_RATES_PERIOD= + CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= + CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= + ADMIN_PANEL_ENABLED=false + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + RELEASE_VERSION=${{ env.RELEASE_VERSION }} + CHAIN_TYPE=filecoin + + - name: Build and push Docker image for Filecoin (indexer) + uses: docker/build-push-action@v5 + with: + context: . + file: ./docker/Dockerfile + push: true + tags: blockscout/blockscout-filecoin:${{ env.RELEASE_VERSION }}-indexer + platforms: | + linux/amd64 + linux/arm64/v8 + build-args: | + DISABLE_API=true + DISABLE_WEBAPP=true + CACHE_EXCHANGE_RATES_PERIOD= CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= + CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= ADMIN_PANEL_ENABLED=false + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + RELEASE_VERSION=${{ env.RELEASE_VERSION }} + CHAIN_TYPE=filecoin + + - name: Build and push Docker image for Filecoin (API) + uses: docker/build-push-action@v5 + with: + context: . + file: ./docker/Dockerfile + push: true + tags: blockscout/blockscout-filecoin:${{ env.RELEASE_VERSION }}-api + platforms: | + linux/amd64 + linux/arm64/v8 + build-args: | + DISABLE_INDEXER=true + DISABLE_WEBAPP=true + CACHE_EXCHANGE_RATES_PERIOD= + CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= + ADMIN_PANEL_ENABLED=false BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=filecoin \ No newline at end of file diff --git a/.github/workflows/release-fuse.yml b/.github/workflows/release-fuse.yml new file mode 100644 index 000000000000..3bd751519452 --- /dev/null +++ b/.github/workflows/release-fuse.yml @@ -0,0 +1,88 @@ +name: Release for Fuse + +on: + workflow_dispatch: + release: + types: [published] + +env: + OTP_VERSION: ${{ vars.OTP_VERSION }} + ELIXIR_VERSION: ${{ vars.ELIXIR_VERSION }} + +jobs: + push_to_registry: + name: Push Docker image to Docker Hub + runs-on: ubuntu-latest + env: + RELEASE_VERSION: ${{ vars.RELEASE_VERSION }} + steps: + - uses: actions/checkout@v4 + - name: Setup repo + uses: ./.github/actions/setup-repo + with: + docker-username: ${{ secrets.DOCKER_USERNAME }} + docker-password: ${{ secrets.DOCKER_PASSWORD }} + + - name: Build and push Docker image for Fuse (indexer + API) + uses: docker/build-push-action@v5 + with: + context: . + file: ./docker/Dockerfile + push: true + tags: blockscout/blockscout-fuse:latest, blockscout/blockscout-fuse:${{ env.RELEASE_VERSION }} + platforms: | + linux/amd64 + linux/arm64/v8 + build-args: | + DISABLE_WEBAPP=false + API_V1_READ_METHODS_DISABLED=false + API_V1_WRITE_METHODS_DISABLED=false + CACHE_EXCHANGE_RATES_PERIOD= + CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= + CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= + ADMIN_PANEL_ENABLED=false + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + RELEASE_VERSION=${{ env.RELEASE_VERSION }} + BRIDGED_TOKENS_ENABLED=true + + - name: Build and push Docker image for Fuse (indexer) + uses: docker/build-push-action@v5 + with: + context: . + file: ./docker/Dockerfile + push: true + tags: blockscout/blockscout-fuse:${{ env.RELEASE_VERSION }}-indexer + platforms: | + linux/amd64 + linux/arm64/v8 + build-args: | + DISABLE_API=true + DISABLE_WEBAPP=true + CACHE_EXCHANGE_RATES_PERIOD= + CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= + CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= + ADMIN_PANEL_ENABLED=false + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + RELEASE_VERSION=${{ env.RELEASE_VERSION }} + BRIDGED_TOKENS_ENABLED=true + + - name: Build and push Docker image for Fuse (API) + uses: docker/build-push-action@v5 + with: + context: . + file: ./docker/Dockerfile + push: true + tags: blockscout/blockscout-fuse:${{ env.RELEASE_VERSION }}-api + platforms: | + linux/amd64 + linux/arm64/v8 + build-args: | + DISABLE_INDEXER=true + DISABLE_WEBAPP=true + CACHE_EXCHANGE_RATES_PERIOD= + CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= + CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= + ADMIN_PANEL_ENABLED=false + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + RELEASE_VERSION=${{ env.RELEASE_VERSION }} + BRIDGED_TOKENS_ENABLED=true \ No newline at end of file diff --git a/.github/workflows/release-gnosis.yml b/.github/workflows/release-gnosis.yml index bdabb752b213..e39c327e84a5 100644 --- a/.github/workflows/release-gnosis.yml +++ b/.github/workflows/release-gnosis.yml @@ -22,7 +22,7 @@ jobs: docker-username: ${{ secrets.DOCKER_USERNAME }} docker-password: ${{ secrets.DOCKER_PASSWORD }} - - name: Build and push Docker image for Gnosis chain + - name: Build and push Docker image for Gnosis chain (indexer + API) uses: docker/build-push-action@v5 with: context: . @@ -33,14 +33,58 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - BRIDGED_TOKENS_ENABLED=true - CACHE_EXCHANGE_RATES_PERIOD= - API_V1_READ_METHODS_DISABLED=false DISABLE_WEBAPP=false + API_V1_READ_METHODS_DISABLED=false API_V1_WRITE_METHODS_DISABLED=false + CACHE_EXCHANGE_RATES_PERIOD= + CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= + CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= + ADMIN_PANEL_ENABLED=false + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + RELEASE_VERSION=${{ env.RELEASE_VERSION }} + BRIDGED_TOKENS_ENABLED=true + CHAIN_TYPE=ethereum + + - name: Build and push Docker image for Gnosis chain (indexer) + uses: docker/build-push-action@v5 + with: + context: . + file: ./docker/Dockerfile + push: true + tags: blockscout/blockscout-xdai:${{ env.RELEASE_VERSION }}-indexer + platforms: | + linux/amd64 + linux/arm64/v8 + build-args: | + DISABLE_API=true + DISABLE_WEBAPP=true + CACHE_EXCHANGE_RATES_PERIOD= CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= + CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= ADMIN_PANEL_ENABLED=false + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + RELEASE_VERSION=${{ env.RELEASE_VERSION }} + BRIDGED_TOKENS_ENABLED=true + CHAIN_TYPE=ethereum + + - name: Build and push Docker image for Gnosis chain (API) + uses: docker/build-push-action@v5 + with: + context: . + file: ./docker/Dockerfile + push: true + tags: blockscout/blockscout-xdai:${{ env.RELEASE_VERSION }}-api + platforms: | + linux/amd64 + linux/arm64/v8 + build-args: | + DISABLE_INDEXER=true + DISABLE_WEBAPP=true + CACHE_EXCHANGE_RATES_PERIOD= + CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= + ADMIN_PANEL_ENABLED=false BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta RELEASE_VERSION=${{ env.RELEASE_VERSION }} + BRIDGED_TOKENS_ENABLED=true CHAIN_TYPE=ethereum \ No newline at end of file diff --git a/.github/workflows/release-optimism.yml b/.github/workflows/release-optimism.yml index 486d73967cfd..fea08eada83b 100644 --- a/.github/workflows/release-optimism.yml +++ b/.github/workflows/release-optimism.yml @@ -22,7 +22,7 @@ jobs: docker-username: ${{ secrets.DOCKER_USERNAME }} docker-password: ${{ secrets.DOCKER_PASSWORD }} - - name: Build and push Docker image for Optimism + - name: Build and push Docker image for Optimism (indexer + API) uses: docker/build-push-action@v5 with: context: . @@ -33,13 +33,55 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - CACHE_EXCHANGE_RATES_PERIOD= - API_V1_READ_METHODS_DISABLED=false DISABLE_WEBAPP=false + API_V1_READ_METHODS_DISABLED=false API_V1_WRITE_METHODS_DISABLED=false + CACHE_EXCHANGE_RATES_PERIOD= + CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= + CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= + ADMIN_PANEL_ENABLED=false + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + RELEASE_VERSION=${{ env.RELEASE_VERSION }} + CHAIN_TYPE=optimism + + - name: Build and push Docker image for Optimism (indexer) + uses: docker/build-push-action@v5 + with: + context: . + file: ./docker/Dockerfile + push: true + tags: blockscout/blockscout-optimism:${{ env.RELEASE_VERSION }}-indexer + platforms: | + linux/amd64 + linux/arm64/v8 + build-args: | + DISABLE_API=true + DISABLE_WEBAPP=true + CACHE_EXCHANGE_RATES_PERIOD= CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= + CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= ADMIN_PANEL_ENABLED=false + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + RELEASE_VERSION=${{ env.RELEASE_VERSION }} + CHAIN_TYPE=optimism + + - name: Build and push Docker image for Optimism (API) + uses: docker/build-push-action@v5 + with: + context: . + file: ./docker/Dockerfile + push: true + tags: blockscout/blockscout-optimism:${{ env.RELEASE_VERSION }}-api + platforms: | + linux/amd64 + linux/arm64/v8 + build-args: | + DISABLE_INDEXER=true + DISABLE_WEBAPP=true + CACHE_EXCHANGE_RATES_PERIOD= + CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= + ADMIN_PANEL_ENABLED=false BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=optimism \ No newline at end of file diff --git a/.github/workflows/release-additional.yml b/.github/workflows/release-polygon-edge.yml similarity index 60% rename from .github/workflows/release-additional.yml rename to .github/workflows/release-polygon-edge.yml index be7db50858fa..58f92636e208 100644 --- a/.github/workflows/release-additional.yml +++ b/.github/workflows/release-polygon-edge.yml @@ -1,6 +1,7 @@ -name: Release additional +name: Release for Polygon Edge on: + workflow_dispatch: release: types: [published] @@ -22,90 +23,66 @@ jobs: docker-username: ${{ secrets.DOCKER_USERNAME }} docker-password: ${{ secrets.DOCKER_PASSWORD }} - - name: Build and push Docker image for Rootstock + - name: Build and push Docker image for Polygon Edge (indexer + api) uses: docker/build-push-action@v5 with: context: . file: ./docker/Dockerfile push: true - tags: blockscout/blockscout-rsk:latest, blockscout/blockscout-rsk:${{ env.RELEASE_VERSION }} + tags: blockscout/blockscout-polygon-edge:latest, blockscout/blockscout-polygon-edge:${{ env.RELEASE_VERSION }} platforms: | linux/amd64 linux/arm64/v8 build-args: | - CACHE_EXCHANGE_RATES_PERIOD= - API_V1_READ_METHODS_DISABLED=false DISABLE_WEBAPP=false - API_V1_WRITE_METHODS_DISABLED=false - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - ADMIN_PANEL_ENABLED=false - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta - RELEASE_VERSION=${{ env.RELEASE_VERSION }} - CHAIN_TYPE=rsk - - - name: Build and push Docker image for Stability - uses: docker/build-push-action@v5 - with: - context: . - file: ./docker/Dockerfile - push: true - tags: blockscout/blockscout-stability:latest, blockscout/blockscout-stability:${{ env.RELEASE_VERSION }} - platforms: | - linux/amd64 - linux/arm64/v8 - build-args: | - CACHE_EXCHANGE_RATES_PERIOD= API_V1_READ_METHODS_DISABLED=false - DISABLE_WEBAPP=false API_V1_WRITE_METHODS_DISABLED=false + CACHE_EXCHANGE_RATES_PERIOD= CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - ADMIN_PANEL_ENABLED=false CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= + ADMIN_PANEL_ENABLED=false BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta RELEASE_VERSION=${{ env.RELEASE_VERSION }} - CHAIN_TYPE=stability + CHAIN_TYPE=polygon_edge - - name: Build and push Docker image for Fuse + - name: Build and push Docker image for Polygon Edge (indexer) uses: docker/build-push-action@v5 with: context: . file: ./docker/Dockerfile push: true - tags: blockscout/blockscout-fuse:latest, blockscout/blockscout-fuse:${{ env.RELEASE_VERSION }} + tags: blockscout/blockscout-polygon-edge:${{ env.RELEASE_VERSION }}-indexer platforms: | linux/amd64 linux/arm64/v8 build-args: | - BRIDGED_TOKENS_ENABLED=true + DISABLE_API=true + DISABLE_WEBAPP=true CACHE_EXCHANGE_RATES_PERIOD= - API_V1_READ_METHODS_DISABLED=false - DISABLE_WEBAPP=false - API_V1_WRITE_METHODS_DISABLED=false CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - ADMIN_PANEL_ENABLED=false CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= + ADMIN_PANEL_ENABLED=false BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta RELEASE_VERSION=${{ env.RELEASE_VERSION }} + CHAIN_TYPE=polygon_edge - - name: Build and push Docker image for Polygon Edge + - name: Build and push Docker image for Polygon Edge (API) uses: docker/build-push-action@v5 with: context: . file: ./docker/Dockerfile push: true - tags: blockscout/blockscout-polygon-edge:latest, blockscout/blockscout-polygon-edge:${{ env.RELEASE_VERSION }} + tags: blockscout/blockscout-polygon-edge:${{ env.RELEASE_VERSION }}-api platforms: | linux/amd64 linux/arm64/v8 build-args: | + DISABLE_INDEXER=true + DISABLE_WEBAPP=true CACHE_EXCHANGE_RATES_PERIOD= - API_V1_READ_METHODS_DISABLED=false - DISABLE_WEBAPP=false - API_V1_WRITE_METHODS_DISABLED=false CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - ADMIN_PANEL_ENABLED=false CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= + ADMIN_PANEL_ENABLED=false BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=polygon_edge \ No newline at end of file diff --git a/.github/workflows/release-polygon-zkevm.yml b/.github/workflows/release-polygon-zkevm.yml new file mode 100644 index 000000000000..c3d1be27d15a --- /dev/null +++ b/.github/workflows/release-polygon-zkevm.yml @@ -0,0 +1,88 @@ +name: Release for Polygon zkEVM + +on: + workflow_dispatch: + release: + types: [published] + +env: + OTP_VERSION: ${{ vars.OTP_VERSION }} + ELIXIR_VERSION: ${{ vars.ELIXIR_VERSION }} + +jobs: + push_to_registry: + name: Push Docker image to Docker Hub + runs-on: ubuntu-latest + env: + RELEASE_VERSION: ${{ vars.RELEASE_VERSION }} + steps: + - uses: actions/checkout@v4 + - name: Setup repo + uses: ./.github/actions/setup-repo + with: + docker-username: ${{ secrets.DOCKER_USERNAME }} + docker-password: ${{ secrets.DOCKER_PASSWORD }} + + - name: Build and push Docker image for Polygon zkEVM (indexer + API) + uses: docker/build-push-action@v5 + with: + context: . + file: ./docker/Dockerfile + push: true + tags: blockscout/blockscout-zkevm:latest, blockscout/blockscout-zkevm:${{ env.RELEASE_VERSION }} + platforms: | + linux/amd64 + linux/arm64/v8 + build-args: | + DISABLE_WEBAPP=false + API_V1_READ_METHODS_DISABLED=false + API_V1_WRITE_METHODS_DISABLED=false + CACHE_EXCHANGE_RATES_PERIOD= + CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= + CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= + ADMIN_PANEL_ENABLED=false + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + RELEASE_VERSION=${{ env.RELEASE_VERSION }} + CHAIN_TYPE=polygon_zkevm + + - name: Build and push Docker image for Polygon zkEVM (indexer) + uses: docker/build-push-action@v5 + with: + context: . + file: ./docker/Dockerfile + push: true + tags: blockscout/blockscout-zkevm:${{ env.RELEASE_VERSION }}-indexer + platforms: | + linux/amd64 + linux/arm64/v8 + build-args: | + DISABLE_API=true + DISABLE_WEBAPP=true + CACHE_EXCHANGE_RATES_PERIOD= + CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= + CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= + ADMIN_PANEL_ENABLED=false + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + RELEASE_VERSION=${{ env.RELEASE_VERSION }} + CHAIN_TYPE=polygon_zkevm + + - name: Build and push Docker image for Polygon zkEVM (API) + uses: docker/build-push-action@v5 + with: + context: . + file: ./docker/Dockerfile + push: true + tags: blockscout/blockscout-zkevm:${{ env.RELEASE_VERSION }}-api + platforms: | + linux/amd64 + linux/arm64/v8 + build-args: | + DISABLE_INDEXER=true + DISABLE_WEBAPP=true + CACHE_EXCHANGE_RATES_PERIOD= + CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= + CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= + ADMIN_PANEL_ENABLED=false + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + RELEASE_VERSION=${{ env.RELEASE_VERSION }} + CHAIN_TYPE=polygon_zkevm \ No newline at end of file diff --git a/.github/workflows/release-rootstock.yml b/.github/workflows/release-rootstock.yml new file mode 100644 index 000000000000..b017003ea4ac --- /dev/null +++ b/.github/workflows/release-rootstock.yml @@ -0,0 +1,87 @@ +name: Release for Rootstock + +on: + release: + types: [published] + +env: + OTP_VERSION: ${{ vars.OTP_VERSION }} + ELIXIR_VERSION: ${{ vars.ELIXIR_VERSION }} + +jobs: + push_to_registry: + name: Push Docker image to Docker Hub + runs-on: ubuntu-latest + env: + RELEASE_VERSION: ${{ vars.RELEASE_VERSION }} + steps: + - uses: actions/checkout@v4 + - name: Setup repo + uses: ./.github/actions/setup-repo + with: + docker-username: ${{ secrets.DOCKER_USERNAME }} + docker-password: ${{ secrets.DOCKER_PASSWORD }} + + - name: Build and push Docker image for Rootstock (indexer + API) + uses: docker/build-push-action@v5 + with: + context: . + file: ./docker/Dockerfile + push: true + tags: blockscout/blockscout-rsk:latest, blockscout/blockscout-rsk:${{ env.RELEASE_VERSION }} + platforms: | + linux/amd64 + linux/arm64/v8 + build-args: | + DISABLE_WEBAPP=false + API_V1_READ_METHODS_DISABLED=false + API_V1_WRITE_METHODS_DISABLED=false + CACHE_EXCHANGE_RATES_PERIOD= + CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= + CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= + ADMIN_PANEL_ENABLED=false + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + RELEASE_VERSION=${{ env.RELEASE_VERSION }} + CHAIN_TYPE=rsk + + - name: Build and push Docker image for Rootstock (indexer) + uses: docker/build-push-action@v5 + with: + context: . + file: ./docker/Dockerfile + push: true + tags: blockscout/blockscout-rsk:${{ env.RELEASE_VERSION }}-indexer + platforms: | + linux/amd64 + linux/arm64/v8 + build-args: | + DISABLE_API=true + DISABLE_WEBAPP=true + CACHE_EXCHANGE_RATES_PERIOD= + CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= + CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= + ADMIN_PANEL_ENABLED=false + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + RELEASE_VERSION=${{ env.RELEASE_VERSION }} + CHAIN_TYPE=rsk + + - name: Build and push Docker image for Rootstock (API) + uses: docker/build-push-action@v5 + with: + context: . + file: ./docker/Dockerfile + push: true + tags: blockscout/blockscout-rsk:${{ env.RELEASE_VERSION }}-api + platforms: | + linux/amd64 + linux/arm64/v8 + build-args: | + DISABLE_INDEXER=true + DISABLE_WEBAPP=true + CACHE_EXCHANGE_RATES_PERIOD= + CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= + CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= + ADMIN_PANEL_ENABLED=false + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + RELEASE_VERSION=${{ env.RELEASE_VERSION }} + CHAIN_TYPE=rsk diff --git a/.github/workflows/release-shibarium.yml b/.github/workflows/release-shibarium.yml index 8ed678ee7cb9..2f147322bd29 100644 --- a/.github/workflows/release-shibarium.yml +++ b/.github/workflows/release-shibarium.yml @@ -22,7 +22,7 @@ jobs: docker-username: ${{ secrets.DOCKER_USERNAME }} docker-password: ${{ secrets.DOCKER_PASSWORD }} - - name: Build and push Docker image for Shibarium + - name: Build and push Docker image for Shibarium (indexer + API) uses: docker/build-push-action@v5 with: context: . @@ -33,13 +33,55 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - CACHE_EXCHANGE_RATES_PERIOD= - API_V1_READ_METHODS_DISABLED=false DISABLE_WEBAPP=false + API_V1_READ_METHODS_DISABLED=false API_V1_WRITE_METHODS_DISABLED=false + CACHE_EXCHANGE_RATES_PERIOD= + CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= + CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= + ADMIN_PANEL_ENABLED=false + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + RELEASE_VERSION=${{ env.RELEASE_VERSION }} + CHAIN_TYPE=shibarium + + - name: Build and push Docker image for Shibarium (indexer) + uses: docker/build-push-action@v5 + with: + context: . + file: ./docker/Dockerfile + push: true + tags: blockscout/blockscout-shibarium:${{ env.RELEASE_VERSION }}-indexer + platforms: | + linux/amd64 + linux/arm64/v8 + build-args: | + DISABLE_API=true + DISABLE_WEBAPP=true + CACHE_EXCHANGE_RATES_PERIOD= CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= + CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= ADMIN_PANEL_ENABLED=false + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + RELEASE_VERSION=${{ env.RELEASE_VERSION }} + CHAIN_TYPE=shibarium + + - name: Build and push Docker image for Shibarium (API) + uses: docker/build-push-action@v5 + with: + context: . + file: ./docker/Dockerfile + push: true + tags: blockscout/blockscout-shibarium:${{ env.RELEASE_VERSION }}-api + platforms: | + linux/amd64 + linux/arm64/v8 + build-args: | + DISABLE_INDEXER=true + DISABLE_WEBAPP=true + CACHE_EXCHANGE_RATES_PERIOD= + CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= + ADMIN_PANEL_ENABLED=false BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=shibarium \ No newline at end of file diff --git a/.github/workflows/release-stability.yml b/.github/workflows/release-stability.yml new file mode 100644 index 000000000000..af32ab8c8375 --- /dev/null +++ b/.github/workflows/release-stability.yml @@ -0,0 +1,88 @@ +name: Release for Stability + +on: + workflow_dispatch: + release: + types: [published] + +env: + OTP_VERSION: ${{ vars.OTP_VERSION }} + ELIXIR_VERSION: ${{ vars.ELIXIR_VERSION }} + +jobs: + push_to_registry: + name: Push Docker image to Docker Hub + runs-on: ubuntu-latest + env: + RELEASE_VERSION: ${{ vars.RELEASE_VERSION }} + steps: + - uses: actions/checkout@v4 + - name: Setup repo + uses: ./.github/actions/setup-repo + with: + docker-username: ${{ secrets.DOCKER_USERNAME }} + docker-password: ${{ secrets.DOCKER_PASSWORD }} + + - name: Build and push Docker image for Stability (indexer + API) + uses: docker/build-push-action@v5 + with: + context: . + file: ./docker/Dockerfile + push: true + tags: blockscout/blockscout-stability:latest, blockscout/blockscout-stability:${{ env.RELEASE_VERSION }} + platforms: | + linux/amd64 + linux/arm64/v8 + build-args: | + DISABLE_WEBAPP=false + API_V1_READ_METHODS_DISABLED=false + API_V1_WRITE_METHODS_DISABLED=false + CACHE_EXCHANGE_RATES_PERIOD= + CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= + CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= + ADMIN_PANEL_ENABLED=false + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + RELEASE_VERSION=${{ env.RELEASE_VERSION }} + CHAIN_TYPE=stability + + - name: Build and push Docker image for Stability (indexer) + uses: docker/build-push-action@v5 + with: + context: . + file: ./docker/Dockerfile + push: true + tags: blockscout/blockscout-stability:${{ env.RELEASE_VERSION }}-indexer + platforms: | + linux/amd64 + linux/arm64/v8 + build-args: | + DISABLE_API=true + DISABLE_WEBAPP=true + CACHE_EXCHANGE_RATES_PERIOD= + CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= + CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= + ADMIN_PANEL_ENABLED=false + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + RELEASE_VERSION=${{ env.RELEASE_VERSION }} + CHAIN_TYPE=stability + + - name: Build and push Docker image for Stability (API) + uses: docker/build-push-action@v5 + with: + context: . + file: ./docker/Dockerfile + push: true + tags: blockscout/blockscout-stability:${{ env.RELEASE_VERSION }}-api + platforms: | + linux/amd64 + linux/arm64/v8 + build-args: | + DISABLE_INDEXER=true + DISABLE_WEBAPP=true + CACHE_EXCHANGE_RATES_PERIOD= + CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= + CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= + ADMIN_PANEL_ENABLED=false + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + RELEASE_VERSION=${{ env.RELEASE_VERSION }} + CHAIN_TYPE=stability \ No newline at end of file diff --git a/.github/workflows/release-suave.yml b/.github/workflows/release-suave.yml new file mode 100644 index 000000000000..b521e158caec --- /dev/null +++ b/.github/workflows/release-suave.yml @@ -0,0 +1,88 @@ +name: Release for SUAVE + +on: + workflow_dispatch: + release: + types: [published] + +env: + OTP_VERSION: ${{ vars.OTP_VERSION }} + ELIXIR_VERSION: ${{ vars.ELIXIR_VERSION }} + +jobs: + push_to_registry: + name: Push Docker image to Docker Hub + runs-on: ubuntu-latest + env: + RELEASE_VERSION: ${{ vars.RELEASE_VERSION }} + steps: + - uses: actions/checkout@v4 + - name: Setup repo + uses: ./.github/actions/setup-repo + with: + docker-username: ${{ secrets.DOCKER_USERNAME }} + docker-password: ${{ secrets.DOCKER_PASSWORD }} + + - name: Build and push Docker image for SUAVE (indexer + API) + uses: docker/build-push-action@v5 + with: + context: . + file: ./docker/Dockerfile + push: true + tags: blockscout/blockscout-suave:latest, blockscout/blockscout-suave:${{ env.RELEASE_VERSION }} + platforms: | + linux/amd64 + linux/arm64/v8 + build-args: | + DISABLE_WEBAPP=false + API_V1_READ_METHODS_DISABLED=false + API_V1_WRITE_METHODS_DISABLED=false + CACHE_EXCHANGE_RATES_PERIOD= + CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= + CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= + ADMIN_PANEL_ENABLED=false + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + RELEASE_VERSION=${{ env.RELEASE_VERSION }} + CHAIN_TYPE=suave + + - name: Build and push Docker image for SUAVE (indexer) + uses: docker/build-push-action@v5 + with: + context: . + file: ./docker/Dockerfile + push: true + tags: blockscout/blockscout-suave:${{ env.RELEASE_VERSION }}-indexer + platforms: | + linux/amd64 + linux/arm64/v8 + build-args: | + DISABLE_API=true + DISABLE_WEBAPP=true + CACHE_EXCHANGE_RATES_PERIOD= + CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= + CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= + ADMIN_PANEL_ENABLED=false + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + RELEASE_VERSION=${{ env.RELEASE_VERSION }} + CHAIN_TYPE=suave + + - name: Build and push Docker image for SUAVE (API) + uses: docker/build-push-action@v5 + with: + context: . + file: ./docker/Dockerfile + push: true + tags: blockscout/blockscout-suave:${{ env.RELEASE_VERSION }}-api + platforms: | + linux/amd64 + linux/arm64/v8 + build-args: | + DISABLE_INDEXER=true + DISABLE_WEBAPP=true + CACHE_EXCHANGE_RATES_PERIOD= + CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= + CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= + ADMIN_PANEL_ENABLED=false + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + RELEASE_VERSION=${{ env.RELEASE_VERSION }} + CHAIN_TYPE=suave \ No newline at end of file diff --git a/.github/workflows/release-zetachain.yml b/.github/workflows/release-zetachain.yml index cd9c5abc14c9..2f021f9c743e 100644 --- a/.github/workflows/release-zetachain.yml +++ b/.github/workflows/release-zetachain.yml @@ -22,7 +22,7 @@ jobs: docker-username: ${{ secrets.DOCKER_USERNAME }} docker-password: ${{ secrets.DOCKER_PASSWORD }} - - name: Build and push Docker image for Zetachain + - name: Build and push Docker image for Zetachain (indexer + API) uses: docker/build-push-action@v5 with: context: . @@ -33,13 +33,55 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - CACHE_EXCHANGE_RATES_PERIOD= - API_V1_READ_METHODS_DISABLED=false DISABLE_WEBAPP=false + API_V1_READ_METHODS_DISABLED=false API_V1_WRITE_METHODS_DISABLED=false + CACHE_EXCHANGE_RATES_PERIOD= + CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= + CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= + ADMIN_PANEL_ENABLED=false + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + RELEASE_VERSION=${{ env.RELEASE_VERSION }} + CHAIN_TYPE=zetachain + + - name: Build and push Docker image for Zetachain (indexer) + uses: docker/build-push-action@v5 + with: + context: . + file: ./docker/Dockerfile + push: true + tags: blockscout/blockscout-zetachain:${{ env.RELEASE_VERSION }}-indexer + platforms: | + linux/amd64 + linux/arm64/v8 + build-args: | + DISABLE_API=true + DISABLE_WEBAPP=true + CACHE_EXCHANGE_RATES_PERIOD= CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= + CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= ADMIN_PANEL_ENABLED=false + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + RELEASE_VERSION=${{ env.RELEASE_VERSION }} + CHAIN_TYPE=zetachain + + - name: Build and push Docker image for Zetachain (API) + uses: docker/build-push-action@v5 + with: + context: . + file: ./docker/Dockerfile + push: true + tags: blockscout/blockscout-zetachain:${{ env.RELEASE_VERSION }}-api + platforms: | + linux/amd64 + linux/arm64/v8 + build-args: | + DISABLE_INDEXER=true + DISABLE_WEBAPP=true + CACHE_EXCHANGE_RATES_PERIOD= + CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= + ADMIN_PANEL_ENABLED=false BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=zetachain \ No newline at end of file diff --git a/.github/workflows/release-zksync.yml b/.github/workflows/release-zksync.yml index 15ed069ca941..3216baf4ca92 100644 --- a/.github/workflows/release-zksync.yml +++ b/.github/workflows/release-zksync.yml @@ -23,7 +23,7 @@ jobs: docker-username: ${{ secrets.DOCKER_USERNAME }} docker-password: ${{ secrets.DOCKER_PASSWORD }} - - name: Build and push Docker image for ZkSync + - name: Build and push Docker image for ZkSync (indexer + API) uses: docker/build-push-action@v5 with: context: . @@ -34,13 +34,55 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - CACHE_EXCHANGE_RATES_PERIOD= - API_V1_READ_METHODS_DISABLED=false DISABLE_WEBAPP=false + API_V1_READ_METHODS_DISABLED=false API_V1_WRITE_METHODS_DISABLED=false + CACHE_EXCHANGE_RATES_PERIOD= + CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= + CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= + ADMIN_PANEL_ENABLED=false + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + RELEASE_VERSION=${{ env.RELEASE_VERSION }} + CHAIN_TYPE=zksync + + - name: Build and push Docker image for ZkSync (indexer) + uses: docker/build-push-action@v5 + with: + context: . + file: ./docker/Dockerfile + push: true + tags: blockscout/blockscout-zksync:${{ env.RELEASE_VERSION }}-indexer + platforms: | + linux/amd64 + linux/arm64/v8 + build-args: | + DISABLE_API=true + DISABLE_WEBAPP=true + CACHE_EXCHANGE_RATES_PERIOD= CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= + CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= ADMIN_PANEL_ENABLED=false + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + RELEASE_VERSION=${{ env.RELEASE_VERSION }} + CHAIN_TYPE=zksync + + - name: Build and push Docker image for ZkSync (API) + uses: docker/build-push-action@v5 + with: + context: . + file: ./docker/Dockerfile + push: true + tags: blockscout/blockscout-zksync:${{ env.RELEASE_VERSION }}-api + platforms: | + linux/amd64 + linux/arm64/v8 + build-args: | + DISABLE_INDEXER=true + DISABLE_WEBAPP=true + CACHE_EXCHANGE_RATES_PERIOD= + CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= + ADMIN_PANEL_ENABLED=false BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=zksync \ No newline at end of file diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index a5b98c420728..880ea35cd376 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -22,7 +22,7 @@ jobs: docker-username: ${{ secrets.DOCKER_USERNAME }} docker-password: ${{ secrets.DOCKER_PASSWORD }} - - name: Build & Push Core Docker image + - name: Build & Push Core Docker image (indexer + API) uses: docker/build-push-action@v5 with: context: . @@ -35,75 +35,88 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - CACHE_EXCHANGE_RATES_PERIOD= - API_V1_READ_METHODS_DISABLED=false DISABLE_WEBAPP=false + API_V1_READ_METHODS_DISABLED=false API_V1_WRITE_METHODS_DISABLED=false + CACHE_EXCHANGE_RATES_PERIOD= CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= + CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= ADMIN_PANEL_ENABLED=false DECODE_NOT_A_CONTRACT_CALLS=false MIXPANEL_URL= MIXPANEL_TOKEN= AMPLITUDE_URL= AMPLITUDE_API_KEY= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta RELEASE_VERSION=${{ env.RELEASE_VERSION }} - - name: Build and push Docker image for zkEVM + - name: Build & Push Core Docker image (indexer) uses: docker/build-push-action@v5 with: context: . file: ./docker/Dockerfile push: true - tags: blockscout/blockscout-zkevm:latest, blockscout/blockscout-zkevm:${{ env.RELEASE_VERSION }} + cache-from: type=registry,ref=blockscout/blockscout:buildcache + cache-to: type=registry,ref=blockscout/blockscout:buildcache,mode=max + tags: blockscout/blockscout:${{ env.RELEASE_VERSION }}-indexer platforms: | linux/amd64 linux/arm64/v8 build-args: | + DISABLE_API=true + DISABLE_WEBAPP=true CACHE_EXCHANGE_RATES_PERIOD= - API_V1_READ_METHODS_DISABLED=false - DISABLE_WEBAPP=false - API_V1_WRITE_METHODS_DISABLED=false CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - ADMIN_PANEL_ENABLED=false CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= + ADMIN_PANEL_ENABLED=false + DECODE_NOT_A_CONTRACT_CALLS=false + MIXPANEL_URL= + MIXPANEL_TOKEN= + AMPLITUDE_URL= + AMPLITUDE_API_KEY= BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta RELEASE_VERSION=${{ env.RELEASE_VERSION }} - CHAIN_TYPE=polygon_zkevm - - name: Build and push Docker image for SUAVE + - name: Build & Push Core Docker image (API) uses: docker/build-push-action@v5 with: context: . file: ./docker/Dockerfile push: true - tags: blockscout/blockscout-suave:latest, blockscout/blockscout-suave:${{ env.RELEASE_VERSION }} + cache-from: type=registry,ref=blockscout/blockscout:buildcache + cache-to: type=registry,ref=blockscout/blockscout:buildcache,mode=max + tags: blockscout/blockscout:${{ env.RELEASE_VERSION }}-indexer platforms: | linux/amd64 linux/arm64/v8 build-args: | - CACHE_EXCHANGE_RATES_PERIOD= + DISABLE_INDEXER=true + DISABLE_WEBAPP=true API_V1_READ_METHODS_DISABLED=false - DISABLE_WEBAPP=false API_V1_WRITE_METHODS_DISABLED=false + CACHE_EXCHANGE_RATES_PERIOD= CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - ADMIN_PANEL_ENABLED=false CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= + ADMIN_PANEL_ENABLED=false + DECODE_NOT_A_CONTRACT_CALLS=false + MIXPANEL_URL= + MIXPANEL_TOKEN= + AMPLITUDE_URL= + AMPLITUDE_API_KEY= BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta RELEASE_VERSION=${{ env.RELEASE_VERSION }} - CHAIN_TYPE=suave - - name: Send release announcement to Slack workflow - id: slack - uses: slackapi/slack-github-action@v1.24.0 - with: - payload: | - { - "release-version": "${{ env.RELEASE_VERSION }}", - "release-link": "https://github.com/blockscout/blockscout/releases/tag/v${{ env.RELEASE_VERSION }}-beta" - } - env: - SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }} + + # - name: Send release announcement to Slack workflow + # id: slack + # uses: slackapi/slack-github-action@v1.24.0 + # with: + # payload: | + # { + # "release-version": "${{ env.RELEASE_VERSION }}", + # "release-link": "https://github.com/blockscout/blockscout/releases/tag/v${{ env.RELEASE_VERSION }}-beta" + # } + # env: + # SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }} # merge-master-after-release: # name: Merge 'master' to specific branch after release diff --git a/apps/block_scout_web/lib/block_scout_web/application.ex b/apps/block_scout_web/lib/block_scout_web/application.ex index 6c33c11900a6..78b3c4fdfcb2 100644 --- a/apps/block_scout_web/lib/block_scout_web/application.ex +++ b/apps/block_scout_web/lib/block_scout_web/application.ex @@ -12,37 +12,7 @@ defmodule BlockScoutWeb.Application do alias BlockScoutWeb.Utility.EventHandlersMetrics def start(_type, _args) do - import Supervisor - - PhoenixInstrumenter.setup() - Exporter.setup() - - APILogger.message( - "Current global API rate limit #{inspect(Application.get_env(:block_scout_web, :api_rate_limit)[:global_limit])} reqs/sec" - ) - - APILogger.message( - "Current API rate limit by key #{inspect(Application.get_env(:block_scout_web, :api_rate_limit)[:limit_by_key])} reqs/sec" - ) - - APILogger.message( - "Current API rate limit by IP #{inspect(Application.get_env(:block_scout_web, :api_rate_limit)[:limit_by_ip])} reqs/sec" - ) - - # Define workers and child supervisors to be supervised - children = [ - # Start the endpoint when the application starts - {Phoenix.PubSub, name: BlockScoutWeb.PubSub}, - child_spec(Endpoint, []), - {Absinthe.Subscription, Endpoint}, - {MainPageRealtimeEventHandler, name: MainPageRealtimeEventHandler}, - {RealtimeEventHandler, name: RealtimeEventHandler}, - {SmartContractRealtimeEventHandler, name: SmartContractRealtimeEventHandler}, - {BlocksIndexedCounter, name: BlocksIndexedCounter}, - {InternalTransactionsIndexedCounter, name: InternalTransactionsIndexedCounter}, - {EventHandlersMetrics, []} - ] - + children = setup_and_define_children() opts = [strategy: :one_for_one, name: BlockScoutWeb.Supervisor, max_restarts: 1_000] Supervisor.start_link(children, opts) end @@ -53,4 +23,39 @@ defmodule BlockScoutWeb.Application do Endpoint.config_change(changed, removed) :ok end + + defp setup_and_define_children do + if Application.get_env(:block_scout_web, :disable_api?) do + [] + else + PhoenixInstrumenter.setup() + Exporter.setup() + + APILogger.message( + "Current global API rate limit #{inspect(Application.get_env(:block_scout_web, :api_rate_limit)[:global_limit])} reqs/sec" + ) + + APILogger.message( + "Current API rate limit by key #{inspect(Application.get_env(:block_scout_web, :api_rate_limit)[:limit_by_key])} reqs/sec" + ) + + APILogger.message( + "Current API rate limit by IP #{inspect(Application.get_env(:block_scout_web, :api_rate_limit)[:limit_by_ip])} reqs/sec" + ) + + # Define workers and child supervisors to be supervised + [ + # Start the endpoint when the application starts + {Phoenix.PubSub, name: BlockScoutWeb.PubSub}, + Supervisor.child_spec(Endpoint, []), + {Absinthe.Subscription, Endpoint}, + {MainPageRealtimeEventHandler, name: MainPageRealtimeEventHandler}, + {RealtimeEventHandler, name: RealtimeEventHandler}, + {SmartContractRealtimeEventHandler, name: SmartContractRealtimeEventHandler}, + {BlocksIndexedCounter, name: BlocksIndexedCounter}, + {InternalTransactionsIndexedCounter, name: InternalTransactionsIndexedCounter}, + {EventHandlersMetrics, []} + ] + end + end end diff --git a/apps/block_scout_web/lib/block_scout_web/router.ex b/apps/block_scout_web/lib/block_scout_web/router.ex index 5e29a520d4bc..4376054f7dae 100644 --- a/apps/block_scout_web/lib/block_scout_web/router.ex +++ b/apps/block_scout_web/lib/block_scout_web/router.ex @@ -94,11 +94,5 @@ defmodule BlockScoutWeb.Router do if Application.compile_env(:block_scout_web, WebRouter)[:enabled] do forward("/", BlockScoutWeb.WebRouter) - else - scope "/", BlockScoutWeb do - pipe_through(:browser) - - forward("/", APIDocsController, :index) - end end end diff --git a/apps/explorer/lib/explorer/chain.ex b/apps/explorer/lib/explorer/chain.ex index 4ef2caece1d3..34480ed6f362 100644 --- a/apps/explorer/lib/explorer/chain.ex +++ b/apps/explorer/lib/explorer/chain.ex @@ -865,13 +865,7 @@ defmodule Explorer.Chain do """ @spec finished_indexing_internal_transactions?([api?]) :: boolean() def finished_indexing_internal_transactions?(options \\ []) do - internal_transactions_disabled? = - Application.get_env(:indexer, Indexer.Fetcher.InternalTransaction.Supervisor)[:disabled?] or - not Application.get_env(:indexer, Indexer.Supervisor)[:enabled] - - if internal_transactions_disabled? do - true - else + if indexer_running?() and internal_transactions_fetcher_running?() do json_rpc_named_arguments = Application.fetch_env!(:indexer, :json_rpc_named_arguments) variant = Keyword.fetch!(json_rpc_named_arguments, :variant) @@ -880,6 +874,8 @@ defmodule Explorer.Chain do else check_left_blocks_to_index_internal_transactions(options) end + else + true end end @@ -930,7 +926,7 @@ defmodule Explorer.Chain do """ @spec finished_indexing?([api?]) :: boolean() def finished_indexing?(options \\ []) do - if Application.get_env(:indexer, Indexer.Supervisor)[:enabled] do + if indexer_running?() do indexed_ratio = indexed_ratio_blocks() case finished_indexing_from_ratio?(indexed_ratio) do @@ -1489,7 +1485,7 @@ defmodule Explorer.Chain do """ @spec indexed_ratio_blocks() :: Decimal.t() def indexed_ratio_blocks do - if Application.get_env(:indexer, Indexer.Supervisor)[:enabled] do + if indexer_running?() do %{min: min_saved_block_number, max: max_saved_block_number} = BlockNumber.get_all() min_blockchain_block_number = Application.get_env(:indexer, :first_block) @@ -1519,8 +1515,7 @@ defmodule Explorer.Chain do @spec indexed_ratio_internal_transactions() :: Decimal.t() def indexed_ratio_internal_transactions do - if Application.get_env(:indexer, Indexer.Supervisor)[:enabled] && - not Application.get_env(:indexer, Indexer.Fetcher.InternalTransaction.Supervisor)[:disabled?] do + if indexer_running?() and internal_transactions_fetcher_running?() do %{max: max_saved_block_number} = BlockNumber.get_all() pbo_count = PendingBlockOperationCache.estimated_count() @@ -2146,6 +2141,15 @@ defmodule Explorer.Chain do select_repo(options).one!(query) end + def indexer_running? do + Application.get_env(:indexer, Indexer.Supervisor)[:enabled] or match?({:ok, _, _}, last_db_block_status()) + end + + def internal_transactions_fetcher_running? do + not Application.get_env(:indexer, Indexer.Fetcher.InternalTransaction.Supervisor)[:disabled?] or + match?({:ok, _, _}, last_db_internal_transaction_block_status()) + end + def last_db_block_status do query = from(block in Block, @@ -2160,6 +2164,20 @@ defmodule Explorer.Chain do |> block_status() end + def last_db_internal_transaction_block_status do + query = + from(it in InternalTransaction, + join: block in assoc(it, :block), + select: {block.number, block.timestamp}, + order_by: [desc: block.number], + limit: 1 + ) + + query + |> Repo.one() + |> block_status() + end + def last_cache_block_status do [ paging_options: %PagingOptions{page_size: 1} diff --git a/config/runtime.exs b/config/runtime.exs index 32b26f0eccff..e56f62b8109e 100644 --- a/config/runtime.exs +++ b/config/runtime.exs @@ -8,6 +8,8 @@ import Config ### BlockScout Web ### ###################### +disable_api? = ConfigHelper.parse_bool_env_var("DISABLE_API") + config :block_scout_web, version: System.get_env("BLOCKSCOUT_VERSION"), release_link: System.get_env("RELEASE_LINK"), @@ -32,7 +34,8 @@ config :block_scout_web, display_token_icons: ConfigHelper.parse_bool_env_var("DISPLAY_TOKEN_ICONS"), hide_block_miner: ConfigHelper.parse_bool_env_var("HIDE_BLOCK_MINER"), show_tenderly_link: ConfigHelper.parse_bool_env_var("SHOW_TENDERLY_LINK"), - sensitive_endpoints_api_key: System.get_env("API_SENSITIVE_ENDPOINTS_KEY") + sensitive_endpoints_api_key: System.get_env("API_SENSITIVE_ENDPOINTS_KEY"), + disable_api?: disable_api? config :block_scout_web, :recaptcha, v2_client_key: System.get_env("RE_CAPTCHA_CLIENT_KEY"), @@ -219,7 +222,7 @@ config :explorer, include_uncles_in_average_block_time: ConfigHelper.parse_bool_env_var("UNCLES_IN_AVERAGE_BLOCK_TIME"), healthy_blocks_period: ConfigHelper.parse_time_env_var("HEALTHY_BLOCKS_PERIOD", "5m"), realtime_events_sender: - if(disable_webapp?, + if(disable_api? or disable_webapp?, do: Explorer.Chain.Events.DBSender, else: Explorer.Chain.Events.SimpleSender ), @@ -236,12 +239,7 @@ config :explorer, :proxy, fallback_cached_implementation_data_ttl: :timer.seconds(4), implementation_data_fetching_timeout: :timer.seconds(2) -config :explorer, Explorer.Chain.Events.Listener, - enabled: - if(disable_webapp? && disable_indexer?, - do: false, - else: true - ) +config :explorer, Explorer.Chain.Events.Listener, enabled: disable_indexer? precompiled_config_base_dir = case config_env() do @@ -754,7 +752,7 @@ config :indexer, Indexer.Fetcher.InternalTransaction, batch_size: ConfigHelper.parse_integer_env_var("INDEXER_INTERNAL_TRANSACTIONS_BATCH_SIZE", 10), concurrency: ConfigHelper.parse_integer_env_var("INDEXER_INTERNAL_TRANSACTIONS_CONCURRENCY", 4), indexing_finished_threshold: - ConfigHelper.parse_integer_env_var("INDEXER_INTERNAL_TRANSACTIONS_INDEXING_FINISHED_THRESHOLD", 1000) + ConfigHelper.parse_integer_env_var("API_INTERNAL_TRANSACTIONS_INDEXING_FINISHED_THRESHOLD", 1000) coin_balances_batch_size = ConfigHelper.parse_integer_env_var("INDEXER_COIN_BALANCES_BATCH_SIZE", 100) coin_balances_concurrency = ConfigHelper.parse_integer_env_var("INDEXER_COIN_BALANCES_CONCURRENCY", 4) From 379e81afc223c8f1da3d3b7ca49b31b384f13f3b Mon Sep 17 00:00:00 2001 From: nikitosing <32202610+nikitosing@users.noreply.github.com> Date: Tue, 14 May 2024 18:31:00 +0300 Subject: [PATCH 019/150] =?UTF-8?q?feat:=20Add=20optional=20retry=20of=20N?= =?UTF-8?q?FT=20metadata=20fetch=20in=20Indexer.Fetcher.Tok=E2=80=A6=20(#1?= =?UTF-8?q?0036)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: Add optional retry of NFT metadata fetch in Indexer.Fetcher.TokenInstance.Realtime * Rename env * Process review comments * Rename var --- apps/indexer/lib/indexer/buffered_task.ex | 4 + .../indexer/fetcher/token_instance/helper.ex | 8 +- .../fetcher/token_instance/realtime.ex | 56 +++++++- .../fetcher/token_instance/helper_test.exs | 1 + .../fetcher/token_instance/realtime_test.exs | 123 ++++++++++++++++++ config/runtime.exs | 4 +- 6 files changed, 192 insertions(+), 4 deletions(-) create mode 100644 apps/indexer/test/indexer/fetcher/token_instance/realtime_test.exs diff --git a/apps/indexer/lib/indexer/buffered_task.ex b/apps/indexer/lib/indexer/buffered_task.ex index e728e802c621..51b9d995afe7 100644 --- a/apps/indexer/lib/indexer/buffered_task.ex +++ b/apps/indexer/lib/indexer/buffered_task.ex @@ -277,6 +277,10 @@ defmodule Indexer.BufferedTask do {:noreply, drop_task_and_retry(state, ref)} end + def handle_info({:buffer, entries}, state) do + {:noreply, buffer_entries(state, entries)} + end + def handle_call({:buffer, entries}, _from, state) do {:reply, :ok, buffer_entries(state, entries)} end diff --git a/apps/indexer/lib/indexer/fetcher/token_instance/helper.ex b/apps/indexer/lib/indexer/fetcher/token_instance/helper.ex index 83c405af2726..2608d377aca6 100644 --- a/apps/indexer/lib/indexer/fetcher/token_instance/helper.ex +++ b/apps/indexer/lib/indexer/fetcher/token_instance/helper.ex @@ -286,7 +286,13 @@ defmodule Indexer.Fetcher.TokenInstance.Helper do rescue error in Postgrex.Error -> if retrying? do - Logger.warn(["Failed to upsert token instance: #{inspect(error)}"], fetcher: :token_instances) + Logger.warn( + [ + "Failed to upsert token instance: {#{to_string(token_contract_address_hash)}, #{token_id}}, error: #{inspect(error)}" + ], + fetcher: :token_instances + ) + nil else token_id diff --git a/apps/indexer/lib/indexer/fetcher/token_instance/realtime.ex b/apps/indexer/lib/indexer/fetcher/token_instance/realtime.ex index 14375b16018a..3c5eabc0b400 100644 --- a/apps/indexer/lib/indexer/fetcher/token_instance/realtime.ex +++ b/apps/indexer/lib/indexer/fetcher/token_instance/realtime.ex @@ -9,6 +9,7 @@ defmodule Indexer.Fetcher.TokenInstance.Realtime do import Indexer.Fetcher.TokenInstance.Helper alias Explorer.Chain + alias Explorer.Chain.Token.Instance alias Indexer.BufferedTask @behaviour BufferedTask @@ -16,6 +17,8 @@ defmodule Indexer.Fetcher.TokenInstance.Realtime do @default_max_batch_size 1 @default_max_concurrency 10 + @errors_whitelisted_for_retry ["request error: 404", "request error: 500"] + @doc false def child_spec([init_options, gen_server_options]) do merged_init_opts = @@ -33,11 +36,16 @@ defmodule Indexer.Fetcher.TokenInstance.Realtime do @impl BufferedTask def run(token_instances, _) when is_list(token_instances) do + retry? = Application.get_env(:indexer, Indexer.Fetcher.TokenInstance.Realtime)[:retry_with_cooldown?] + + token_instances_retry_map = token_instance_to_retry_map(retry?, token_instances) + token_instances - |> Enum.filter(fn %{contract_address_hash: hash, token_id: token_id} -> - Chain.token_instance_with_unfetched_metadata?(token_id, hash) + |> Enum.filter(fn %{contract_address_hash: hash, token_id: token_id} = instance -> + instance[:retry?] || Chain.token_instance_with_unfetched_metadata?(token_id, hash) end) |> batch_fetch_instances() + |> retry_some_instances(retry?, token_instances_retry_map) :ok end @@ -73,6 +81,50 @@ defmodule Indexer.Fetcher.TokenInstance.Realtime do BufferedTask.buffer(__MODULE__, data) end + @spec retry_some_instances([map()], boolean(), map()) :: any() + defp retry_some_instances(token_instances, true, token_instances_retry_map) do + token_instances_to_refetch = + Enum.flat_map(token_instances, fn + {:ok, %Instance{metadata: nil, error: error} = instance} + when error in @errors_whitelisted_for_retry -> + if token_instances_retry_map[{instance.token_contract_address_hash.bytes, instance.token_id}] do + [] + else + [ + %{ + contract_address_hash: instance.token_contract_address_hash, + token_id: instance.token_id, + retry?: true + } + ] + end + + _ -> + [] + end) + + if token_instances_to_refetch != [] do + timeout = Application.get_env(:indexer, Indexer.Fetcher.TokenInstance.Realtime)[:retry_timeout] + Process.send_after(__MODULE__, {:buffer, token_instances_to_refetch}, timeout) + end + end + + defp retry_some_instances(_, _, _), do: nil + + defp token_instance_to_retry_map(false, _token_instances), do: nil + + defp token_instance_to_retry_map(true, token_instances) do + token_instances + |> Enum.flat_map(fn + %{contract_address_hash: hash, token_id: token_id, retry?: true} -> + [{{hash.bytes, token_id}, true}] + + _ -> + [] + end) + |> Enum.into(%{}) + end + defp defaults do [ flush_interval: 100, diff --git a/apps/indexer/test/indexer/fetcher/token_instance/helper_test.exs b/apps/indexer/test/indexer/fetcher/token_instance/helper_test.exs index 2b250a6d5906..3c13ce2abea3 100644 --- a/apps/indexer/test/indexer/fetcher/token_instance/helper_test.exs +++ b/apps/indexer/test/indexer/fetcher/token_instance/helper_test.exs @@ -16,6 +16,7 @@ defmodule Indexer.Fetcher.TokenInstance.HelperTest do bypass = Bypass.open() on_exit(fn -> Bypass.down(bypass) end) + {:ok, bypass: bypass} end diff --git a/apps/indexer/test/indexer/fetcher/token_instance/realtime_test.exs b/apps/indexer/test/indexer/fetcher/token_instance/realtime_test.exs new file mode 100644 index 000000000000..542b2954ad6d --- /dev/null +++ b/apps/indexer/test/indexer/fetcher/token_instance/realtime_test.exs @@ -0,0 +1,123 @@ +defmodule Indexer.Fetcher.TokenInstance.RealtimeTest do + use EthereumJSONRPC.Case + use Explorer.DataCase + + import Mox + + alias Explorer.Repo + alias Explorer.Chain.Token.Instance + alias Indexer.Fetcher.TokenInstance.Realtime, as: TokenInstanceRealtime + alias Plug.Conn + + setup :verify_on_exit! + setup :set_mox_global + + describe "Check how works retry in realtime" do + setup do + config = Application.get_env(:indexer, Indexer.Fetcher.TokenInstance.Realtime) + new_config = config |> Keyword.put(:retry_with_cooldown?, true) |> Keyword.put(:retry_timeout, 100) + + Application.put_env(:indexer, Indexer.Fetcher.TokenInstance.Realtime, new_config) + + on_exit(fn -> + Application.put_env(:indexer, Indexer.Fetcher.TokenInstance.Realtime, config) + end) + + :ok + end + + test "retry once after timeout" do + bypass = Bypass.open() + + [] + |> TokenInstanceRealtime.Supervisor.child_spec() + |> ExUnit.Callbacks.start_supervised!() + + json = """ + { + "name": "name" + } + """ + + encoded_url = + "0x" <> + (ABI.TypeEncoder.encode(["http://localhost:#{bypass.port}/api/card/{id}"], %ABI.FunctionSelector{ + function: nil, + types: [ + :string + ] + }) + |> Base.encode16(case: :lower)) + + EthereumJSONRPC.Mox + |> expect(:json_rpc, fn [ + %{ + id: 0, + jsonrpc: "2.0", + method: "eth_call", + params: [ + %{ + data: + "0x0e89341c0000000000000000000000000000000000000000000000000000000000000309", + to: "0x5caebd3b32e210e85ce3e9d51638b9c445481567" + }, + "latest" + ] + } + ], + _options -> + {:ok, + [ + %{ + id: 0, + jsonrpc: "2.0", + result: encoded_url + } + ]} + end) + + Bypass.expect_once( + bypass, + "GET", + "/api/card/0000000000000000000000000000000000000000000000000000000000000309", + fn conn -> + Conn.resp(conn, 404, "Not found") + end + ) + + Bypass.expect_once( + bypass, + "GET", + "/api/card/0000000000000000000000000000000000000000000000000000000000000309", + fn conn -> + Conn.resp(conn, 200, json) + end + ) + + token = + insert(:token, + contract_address: build(:address, hash: "0x5caebd3b32e210e85ce3e9d51638b9c445481567"), + type: "ERC-1155" + ) + + insert(:token_instance, + token_id: 777, + token_contract_address_hash: token.contract_address_hash, + metadata: nil, + error: nil + ) + + TokenInstanceRealtime.async_fetch([ + %{token_contract_address_hash: token.contract_address_hash, token_ids: [Decimal.new(777)]} + ]) + + :timer.sleep(150) + + [instance] = Repo.all(Instance) + + assert is_nil(instance.error) + assert instance.metadata == %{"name" => "name"} + Bypass.down(bypass) + end + end +end diff --git a/config/runtime.exs b/config/runtime.exs index e56f62b8109e..bd08ca5b8bfb 100644 --- a/config/runtime.exs +++ b/config/runtime.exs @@ -728,7 +728,9 @@ config :indexer, Indexer.Fetcher.TokenInstance.Retry, config :indexer, Indexer.Fetcher.TokenInstance.Realtime, concurrency: ConfigHelper.parse_integer_env_var("INDEXER_TOKEN_INSTANCE_REALTIME_CONCURRENCY", 10), - batch_size: ConfigHelper.parse_integer_env_var("INDEXER_TOKEN_INSTANCE_REALTIME_BATCH_SIZE", 1) + batch_size: ConfigHelper.parse_integer_env_var("INDEXER_TOKEN_INSTANCE_REALTIME_BATCH_SIZE", 1), + retry_with_cooldown?: ConfigHelper.parse_bool_env_var("INDEXER_TOKEN_INSTANCE_REALTIME_RETRY_ENABLED"), + retry_timeout: ConfigHelper.parse_time_env_var("INDEXER_TOKEN_INSTANCE_REALTIME_RETRY_TIMEOUT", "5s") config :indexer, Indexer.Fetcher.TokenInstance.Sanitize, concurrency: ConfigHelper.parse_integer_env_var("INDEXER_TOKEN_INSTANCE_SANITIZE_CONCURRENCY", 10), From 4d4f355b0d1d64a14925bd861368e92658a75afb Mon Sep 17 00:00:00 2001 From: Victor Baranov Date: Wed, 15 May 2024 12:30:16 +0300 Subject: [PATCH 020/150] fix: Eliminate from_address_hash == #{address_hash} clause for transactions query in case of smart-contracts (#9469) * Eliminate from_address_hash equal to address clause for transactions query in case of smart-contracts * Fix tests * Fix web tests * Fix web tests * Process review comments * Update apps/explorer/lib/explorer/etherscan/logs.ex Co-authored-by: Kirill Fedoseev * mix format * Remove or_where import * Update smart-contract controller test --------- Co-authored-by: Kirill Fedoseev --- .../api/v2/smart_contract_controller_test.exs | 3 ++- .../lib/explorer/chain/address/counters.ex | 8 ++++---- .../lib/explorer/chain/transaction.ex | 18 ++++++++++++++++++ apps/explorer/lib/explorer/etherscan/logs.ex | 19 +++++++++++++------ apps/explorer/lib/explorer/graphql.ex | 5 +++-- 5 files changed, 40 insertions(+), 13 deletions(-) diff --git a/apps/block_scout_web/test/block_scout_web/controllers/api/v2/smart_contract_controller_test.exs b/apps/block_scout_web/test/block_scout_web/controllers/api/v2/smart_contract_controller_test.exs index d72079979e91..99f30a208555 100644 --- a/apps/block_scout_web/test/block_scout_web/controllers/api/v2/smart_contract_controller_test.exs +++ b/apps/block_scout_web/test/block_scout_web/controllers/api/v2/smart_contract_controller_test.exs @@ -671,7 +671,8 @@ defmodule BlockScoutWeb.API.V2.SmartContractControllerTest do "is_verified_via_verifier_alliance" => implementation_contract.verified_via_verifier_alliance, "language" => smart_contract_language(implementation_contract), "license_type" => "bsd_3_clause", - "certified" => false + "certified" => false, + "is_blueprint" => false } request = get(conn, "/api/v2/smart-contracts/#{Address.checksum(proxy_address.hash)}") diff --git a/apps/explorer/lib/explorer/chain/address/counters.ex b/apps/explorer/lib/explorer/chain/address/counters.ex index bdd08ecb72c9..be88c3c33d5a 100644 --- a/apps/explorer/lib/explorer/chain/address/counters.ex +++ b/apps/explorer/lib/explorer/chain/address/counters.ex @@ -128,10 +128,10 @@ defmodule Explorer.Chain.Address.Counters do end def address_hash_to_transaction_count_query(address_hash) do - from( - transaction in Transaction, - where: transaction.to_address_hash == ^address_hash or transaction.from_address_hash == ^address_hash - ) + dynamic = Transaction.where_transactions_to_from(address_hash) + + Transaction + |> where([transaction], ^dynamic) end @spec address_hash_to_transaction_count(Hash.Address.t()) :: non_neg_integer() diff --git a/apps/explorer/lib/explorer/chain/transaction.ex b/apps/explorer/lib/explorer/chain/transaction.ex index 8b5b962e8cf1..18ceb8a0b607 100644 --- a/apps/explorer/lib/explorer/chain/transaction.ex +++ b/apps/explorer/lib/explorer/chain/transaction.ex @@ -1749,4 +1749,22 @@ defmodule Explorer.Chain.Transaction do |> Decimal.min(max_fee_per_gas |> Wei.sub(base_fee_per_gas) |> Wei.to(:wei)) |> Wei.from(:wei) end + + @doc """ + Dynamically adds to/from for `transactions` query based on whether the target address EOA or smart-contract + todo: pay attention to [EIP-5003](https://eips.ethereum.org/EIPS/eip-5003): if it will be included, this logic should be rolled back. + """ + @spec where_transactions_to_from(Hash.Address.t()) :: any() + def where_transactions_to_from(address_hash) do + with {:ok, address} <- Chain.hash_to_address(address_hash), + true <- Chain.contract?(address) do + dynamic([transaction], transaction.to_address_hash == ^address_hash) + else + _ -> + dynamic( + [transaction], + transaction.from_address_hash == ^address_hash or transaction.to_address_hash == ^address_hash + ) + end + end end diff --git a/apps/explorer/lib/explorer/etherscan/logs.ex b/apps/explorer/lib/explorer/etherscan/logs.ex index c3b79a439fc8..3eecb33b5e4b 100644 --- a/apps/explorer/lib/explorer/etherscan/logs.ex +++ b/apps/explorer/lib/explorer/etherscan/logs.ex @@ -5,7 +5,7 @@ defmodule Explorer.Etherscan.Logs do """ - import Ecto.Query, only: [from: 2, limit: 2, where: 3, subquery: 1, order_by: 3, union: 2] + import Ecto.Query, only: [dynamic: 2, from: 2, limit: 2, where: 3, subquery: 1, order_by: 3, union: 2] alias Explorer.{Chain, Repo} alias Explorer.Chain.{Block, DenormalizationHelper, InternalTransaction, Log, Transaction} @@ -145,16 +145,12 @@ defmodule Explorer.Etherscan.Logs do |> union(^query_from_address_hash_wrapped) |> union(^query_created_contract_address_hash_wrapped) - all_transaction_logs_query = + all_transaction_logs_query_base = from(transaction in Transaction, join: log in ^logs_query, on: log.transaction_hash == transaction.hash, where: transaction.block_number >= ^prepared_filter.from_block, where: transaction.block_number <= ^prepared_filter.to_block, - where: - transaction.to_address_hash == ^address_hash or - transaction.from_address_hash == ^address_hash or - transaction.created_contract_address_hash == ^address_hash, select: map(log, ^@log_fields), select_merge: %{ gas_price: transaction.gas_price, @@ -165,6 +161,17 @@ defmodule Explorer.Etherscan.Logs do union: ^internal_transaction_log_query ) + dynamic = + dynamic( + [transaction], + ^Transaction.where_transactions_to_from(address_hash) or + transaction.created_contract_address_hash == ^address_hash + ) + + all_transaction_logs_query = + all_transaction_logs_query_base + |> where([transaction], ^dynamic) + query_with_blocks = from(log_transaction_data in subquery(all_transaction_logs_query), join: block in Block, diff --git a/apps/explorer/lib/explorer/graphql.ex b/apps/explorer/lib/explorer/graphql.ex index d577a7052bbc..feb318fde3ec 100644 --- a/apps/explorer/lib/explorer/graphql.ex +++ b/apps/explorer/lib/explorer/graphql.ex @@ -28,9 +28,10 @@ defmodule Explorer.GraphQL do """ @spec address_to_transactions_query(Hash.Address.t(), :desc | :asc) :: Ecto.Query.t() def address_to_transactions_query(address_hash, order) do + dynamic = Transaction.where_transactions_to_from(address_hash) + Transaction - |> where([transaction], transaction.to_address_hash == ^address_hash) - |> or_where([transaction], transaction.from_address_hash == ^address_hash) + |> where([transaction], ^dynamic) |> or_where([transaction], transaction.created_contract_address_hash == ^address_hash) |> order_by([transaction], [{^order, transaction.block_number}, {^order, transaction.index}]) end From 7f6ee61c109f254b3abefc7b2a8b5de8430e04b5 Mon Sep 17 00:00:00 2001 From: Kirill Fedoseev Date: Wed, 15 May 2024 17:27:01 +0400 Subject: [PATCH 021/150] fix: Rework revert_reason (#9212) * fix: rework revert_reason * fix: format * fix: tests * fix: chain_type_fields * chore: refactor * fix: decode functions refactor --- .../_function_response.html.eex | 8 +- .../templates/transaction/overview.html.eex | 2 +- .../views/api/v2/smart_contract_view.ex | 4 +- .../views/smart_contract_view.ex | 10 -- .../block_scout_web/views/transaction_view.ex | 28 ++-- .../api/rpc/transaction_controller_test.exs | 63 +++++++- .../views/transaction_view_test.exs | 16 ++- .../lib/ethereum_jsonrpc/nethermind/trace.ex | 46 +++--- .../trace_replay_block_transactions.ex | 6 +- .../lib/ethereum_jsonrpc/transaction.ex | 16 +-- apps/explorer/lib/explorer/chain.ex | 136 ++++++++++-------- .../import/runner/internal_transactions.ex | 1 + .../explorer/chain/internal_transaction.ex | 39 ++--- .../lib/explorer/chain/transaction.ex | 61 ++++++-- apps/explorer/test/explorer/chain_test.exs | 54 ++++++- 15 files changed, 317 insertions(+), 173 deletions(-) diff --git a/apps/block_scout_web/lib/block_scout_web/templates/smart_contract/_function_response.html.eex b/apps/block_scout_web/lib/block_scout_web/templates/smart_contract/_function_response.html.eex index 166e434cad02..90d2da90221c 100644 --- a/apps/block_scout_web/lib/block_scout_web/templates/smart_contract/_function_response.html.eex +++ b/apps/block_scout_web/lib/block_scout_web/templates/smart_contract/_function_response.html.eex @@ -4,8 +4,8 @@ <%= case @outputs do %> - <% {:error, %{code: code, message: message, data: data}} -> %> - <% revert_reason = Chain.format_revert_reason_message(data) %> + <% {:error, %{code: code, message: message, data: _data} = error} -> %> + <% revert_reason = Chain.parse_revert_reason_from_error(error) %> <%= case decode_revert_reason(@smart_contract_address, revert_reason) do %> <% {:ok, _identifier, text, mapping} -> %>
<%= raw(values_with_type(text, :error, nil)) %>
@@ -15,7 +15,7 @@ <% end %>
<% {:error, _contract_verified, []} -> %> - <% decoded_revert_reason = decode_hex_revert_reason(revert_reason) %> + <% decoded_revert_reason = BlockScoutWeb.TransactionView.decode_hex_revert_reason_as_utf8(revert_reason) %>
<%= "(#{code}) #{message} (#{if String.valid?(decoded_revert_reason), do: decoded_revert_reason, else: revert_reason})" %>
<% {:error, _contract_verified, candidates} -> %> <% {:ok, _identifier, text, mapping} = Enum.at(candidates, 0) %> @@ -26,7 +26,7 @@ <% end %> <% _ -> %> - <% decoded_revert_reason = decode_hex_revert_reason(revert_reason) %> + <% decoded_revert_reason = BlockScoutWeb.TransactionView.decode_hex_revert_reason_as_utf8(revert_reason) %>
<%= "(#{code}) #{message} (#{if String.valid?(decoded_revert_reason), do: decoded_revert_reason, else: revert_reason})" %>
<% end %> <% {:error, %{code: code, message: message}} -> %> diff --git a/apps/block_scout_web/lib/block_scout_web/templates/transaction/overview.html.eex b/apps/block_scout_web/lib/block_scout_web/templates/transaction/overview.html.eex index 21229d73ade2..a73d4701200e 100644 --- a/apps/block_scout_web/lib/block_scout_web/templates/transaction/overview.html.eex +++ b/apps/block_scout_web/lib/block_scout_web/templates/transaction/overview.html.eex @@ -145,7 +145,7 @@ <%= render(BlockScoutWeb.TransactionView, "_decoded_input_body.html", method_id: method_id, text: text, mapping: mapping, error: true) %> <% _ -> %> <% hex = BlockScoutWeb.TransactionView.get_pure_transaction_revert_reason(@transaction) %> - <% utf8 = BlockScoutWeb.TransactionView.decoded_revert_reason(@transaction) %> + <% utf8 = BlockScoutWeb.TransactionView.decode_revert_reason_as_utf8(hex) %>
Raw:<%= raw("\t") %><%= hex %><%= raw("\n") %>UTF-8:<%= raw("\t") %><%= utf8 %>
diff --git a/apps/block_scout_web/lib/block_scout_web/views/api/v2/smart_contract_view.ex b/apps/block_scout_web/lib/block_scout_web/views/api/v2/smart_contract_view.ex index ad46ccef531a..d208826869fc 100644 --- a/apps/block_scout_web/lib/block_scout_web/views/api/v2/smart_contract_view.ex +++ b/apps/block_scout_web/lib/block_scout_web/views/api/v2/smart_contract_view.ex @@ -55,8 +55,8 @@ defmodule BlockScoutWeb.API.V2.SmartContractView do def prepare_function_response(outputs, names, contract_address_hash) do case outputs do - {:error, %{code: code, message: message, data: data}} -> - revert_reason = Chain.format_revert_reason_message(data) + {:error, %{code: code, message: message, data: _data} = error} -> + revert_reason = Chain.parse_revert_reason_from_error(error) case SmartContractView.decode_revert_reason(contract_address_hash, revert_reason, @api_true) do {:ok, method_id, text, mapping} -> diff --git a/apps/block_scout_web/lib/block_scout_web/views/smart_contract_view.ex b/apps/block_scout_web/lib/block_scout_web/views/smart_contract_view.ex index 3174a9130637..6ceeeb6561fe 100644 --- a/apps/block_scout_web/lib/block_scout_web/views/smart_contract_view.ex +++ b/apps/block_scout_web/lib/block_scout_web/views/smart_contract_view.ex @@ -221,16 +221,6 @@ defmodule BlockScoutWeb.SmartContractView do ) end - def decode_hex_revert_reason(hex_revert_reason) do - case Integer.parse(hex_revert_reason, 16) do - {number, ""} -> - :binary.encode_unsigned(number) - - _ -> - hex_revert_reason - end - end - def not_last_element?(length, index), do: length > 1 and index < length - 1 def cut_rpc_url(error) do diff --git a/apps/block_scout_web/lib/block_scout_web/views/transaction_view.ex b/apps/block_scout_web/lib/block_scout_web/views/transaction_view.ex index 587c57cf062f..713a890d66e1 100644 --- a/apps/block_scout_web/lib/block_scout_web/views/transaction_view.ex +++ b/apps/block_scout_web/lib/block_scout_web/views/transaction_view.ex @@ -609,28 +609,26 @@ defmodule BlockScoutWeb.TransactionView do end # Function decodes revert reason of the transaction - @spec decoded_revert_reason(Transaction.t() | nil) :: binary() | nil - def decoded_revert_reason(transaction) do - revert_reason = get_pure_transaction_revert_reason(transaction) - + @spec decode_revert_reason_as_utf8(binary() | nil) :: binary() | nil + def decode_revert_reason_as_utf8(revert_reason) do case revert_reason do + nil -> + nil + "0x" <> hex_part -> - process_hex_revert_reason(hex_part) + decode_hex_revert_reason_as_utf8(hex_part) hex_part -> - process_hex_revert_reason(hex_part) + decode_hex_revert_reason_as_utf8(hex_part) end end - # Function converts hex revert reason to the binary - @spec process_hex_revert_reason(nil) :: nil - defp process_hex_revert_reason(nil), do: nil - - @spec process_hex_revert_reason(binary()) :: binary() - defp process_hex_revert_reason(hex_revert_reason) do - case Integer.parse(hex_revert_reason, 16) do - {number, ""} -> - :binary.encode_unsigned(number) + # Function converts hex revert reason to the utf8 binary + @spec decode_hex_revert_reason_as_utf8(binary()) :: binary() + def decode_hex_revert_reason_as_utf8(hex_revert_reason) do + case Base.decode16(hex_revert_reason, case: :mixed) do + {:ok, revert_reason} -> + revert_reason _ -> hex_revert_reason diff --git a/apps/block_scout_web/test/block_scout_web/controllers/api/rpc/transaction_controller_test.exs b/apps/block_scout_web/test/block_scout_web/controllers/api/rpc/transaction_controller_test.exs index 8e30a3e68046..bda682e10388 100644 --- a/apps/block_scout_web/test/block_scout_web/controllers/api/rpc/transaction_controller_test.exs +++ b/apps/block_scout_web/test/block_scout_web/controllers/api/rpc/transaction_controller_test.exs @@ -660,11 +660,25 @@ defmodule BlockScoutWeb.API.RPC.TransactionControllerTest do insert(:address) + # Error("No credit of that type") + hex_reason = + "0x08c379a0000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000164e6f20637265646974206f662074686174207479706500000000000000000000" + + # fail to trace_replayTransaction expect( EthereumJSONRPC.Mox, :json_rpc, fn _json, [] -> - {:error, %{code: -32015, message: "VM execution error.", data: "revert: No credit of that type"}} + {:error, :econnrefused} + end + ) + + # fallback to eth_call + expect( + EthereumJSONRPC.Mox, + :json_rpc, + fn _json, [] -> + {:error, %{code: -32015, message: "VM execution error.", data: hex_reason}} end ) @@ -679,7 +693,7 @@ defmodule BlockScoutWeb.API.RPC.TransactionControllerTest do |> get("/api", params) |> json_response(200) - assert response["result"]["revertReason"] == "No credit of that type" + assert response["result"]["revertReason"] == hex_reason assert response["status"] == "1" assert response["message"] == "OK" end @@ -707,7 +721,38 @@ defmodule BlockScoutWeb.API.RPC.TransactionControllerTest do EthereumJSONRPC.Mox, :json_rpc, fn _json, [] -> - {:error, %{code: -32015, message: "VM execution error.", data: ""}} + {:ok, + [ + %{ + id: 0, + result: %{ + "output" => "0x", + "stateDiff" => nil, + "trace" => [ + %{ + "action" => %{ + "callType" => "call", + "from" => "0x6a17ca3bbf83764791f4a9f2b4dbbaebbc8b3e0d", + "gas" => "0x5208", + "input" => "0x01", + "to" => "0x7ed1e469fcb3ee19c0366d829e291451be638e59", + "value" => "0x0" + }, + "error" => "Reverted", + "result" => %{ + "gasUsed" => "0x5208", + "output" => "0x" + }, + "subtraces" => 0, + "traceAddress" => [], + "type" => "call" + } + ], + "transactionHash" => "0xac2a7dab94d965893199e7ee01649e2d66f0787a4c558b3118c09e80d4df8269", + "vmTrace" => nil + } + } + ]} end ) @@ -722,7 +767,7 @@ defmodule BlockScoutWeb.API.RPC.TransactionControllerTest do |> get("/api", params) |> json_response(200) - assert response["result"]["revertReason"] == "" + assert response["result"]["revertReason"] == "0x" assert response["status"] == "1" assert response["message"] == "OK" end @@ -745,6 +790,16 @@ defmodule BlockScoutWeb.API.RPC.TransactionControllerTest do insert(:address) + # fail to trace_replayTransaction + expect( + EthereumJSONRPC.Mox, + :json_rpc, + fn _json, [] -> + {:error, :econnrefused} + end + ) + + # fallback to eth_call expect( EthereumJSONRPC.Mox, :json_rpc, diff --git a/apps/block_scout_web/test/block_scout_web/views/transaction_view_test.exs b/apps/block_scout_web/test/block_scout_web/views/transaction_view_test.exs index 653020462e8e..7ce84f652ae9 100644 --- a/apps/block_scout_web/test/block_scout_web/views/transaction_view_test.exs +++ b/apps/block_scout_web/test/block_scout_web/views/transaction_view_test.exs @@ -293,6 +293,16 @@ defmodule BlockScoutWeb.TransactionViewTest do test "handles transactions with gas_price set to nil" do transaction = insert(:transaction, gas_price: nil, error: "execution reverted") + # fail to trace_replayTransaction + EthereumJSONRPC.Mox + |> expect( + :json_rpc, + fn _json, [] -> + {:error, :econnrefused} + end + ) + + # fallback to eth_call EthereumJSONRPC.Mox |> expect(:json_rpc, fn %{ id: 0, @@ -312,7 +322,11 @@ defmodule BlockScoutWeb.TransactionViewTest do revert_reason = TransactionView.transaction_revert_reason(transaction, nil) - assert revert_reason == {:error, :not_a_contract_call} + assert revert_reason == + {:ok, "08c379a0", "Error(string reason)", + [ + {"reason", "string", "UniswapV2Router: INSUFFICIENT_OUTPUT_AMOUNT"} + ]} end end end diff --git a/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/nethermind/trace.ex b/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/nethermind/trace.ex index 047a4d6bbd8f..6e8cf42af866 100644 --- a/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/nethermind/trace.ex +++ b/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/nethermind/trace.ex @@ -4,6 +4,7 @@ defmodule EthereumJSONRPC.Nethermind.Trace do [`trace_replayTransaction`](https://openethereum.github.io/JSONRPC-trace-module#trace_replaytransaction). """ + import EthereumJSONRPC.Transaction, only: [put_if_present: 3] alias EthereumJSONRPC.Nethermind.Trace.{Action, Result} @doc """ @@ -232,7 +233,13 @@ defmodule EthereumJSONRPC.Nethermind.Trace do input: input, value: value } - |> put_call_error_or_result(elixir) + |> put_if_present(elixir, [ + {"error", :error} + ]) + |> put_if_present(elixir |> Map.get("result", %{}), [ + {"gasUsed", :gas_used}, + {"output", :output} + ]) end def elixir_to_params(%{"type" => "create" = type} = elixir) do @@ -257,7 +264,14 @@ defmodule EthereumJSONRPC.Nethermind.Trace do value: value, transaction_index: transaction_index } - |> put_create_error_or_result(elixir) + |> put_if_present(elixir, [ + {"error", :error} + ]) + |> put_if_present(elixir |> Map.get("result", %{}), [ + {"gasUsed", :gas_used}, + {"code", :created_contract_code}, + {"address", :created_contract_address_hash} + ]) end def elixir_to_params(%{"type" => "suicide"} = elixir) do @@ -470,32 +484,4 @@ defmodule EthereumJSONRPC.Nethermind.Trace do end defp entry_to_elixir({"transactionIndex", index} = entry) when is_integer(index), do: entry - - defp put_call_error_or_result(params, %{ - "result" => %{"gasUsed" => gas_used, "output" => output} - }) do - Map.merge(params, %{gas_used: gas_used, output: output}) - end - - defp put_call_error_or_result(params, %{"error" => error}) do - Map.put(params, :error, error) - end - - defp put_create_error_or_result(params, %{ - "result" => %{ - "address" => created_contract_address_hash, - "code" => code, - "gasUsed" => gas_used - } - }) do - Map.merge(params, %{ - created_contract_code: code, - created_contract_address_hash: created_contract_address_hash, - gas_used: gas_used - }) - end - - defp put_create_error_or_result(params, %{"error" => error}) do - Map.put(params, :error, error) - end end diff --git a/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/trace_replay_block_transactions.ex b/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/trace_replay_block_transactions.ex index 8fc6adfead5d..53eda23e438e 100644 --- a/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/trace_replay_block_transactions.ex +++ b/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/trace_replay_block_transactions.ex @@ -1,6 +1,6 @@ defmodule EthereumJSONRPC.TraceReplayBlockTransactions do @moduledoc """ - Methods for processing the data from `trace_replayBlockTransactions` JSON RPC method + Methods for processing the data from `trace_replayTransaction` and `trace_replayBlockTransactions` JSON RPC methods """ require Logger import EthereumJSONRPC, only: [id_to_params: 1, integer_to_quantity: 1, json_rpc: 2, request: 1] @@ -144,8 +144,8 @@ defmodule EthereumJSONRPC.TraceReplayBlockTransactions do request(%{id: id, method: "trace_replayBlockTransactions", params: [integer_to_quantity(block_number), ["trace"]]}) end - def trace_replay_transaction_responses_to_first_trace_params(responses, id_to_params, traces_module) - when is_list(responses) and is_map(id_to_params) do + defp trace_replay_transaction_responses_to_first_trace_params(responses, id_to_params, traces_module) + when is_list(responses) and is_map(id_to_params) do with {:ok, traces} <- trace_replay_transaction_responses_to_first_trace(responses, id_to_params) do params = traces diff --git a/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/transaction.ex b/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/transaction.ex index 778c141877f6..4d60d448db9e 100644 --- a/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/transaction.ex +++ b/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/transaction.ex @@ -290,7 +290,7 @@ defmodule EthereumJSONRPC.Transaction do max_fee_per_gas: max_fee_per_gas } - put_if_present(transaction, result, [ + put_if_present(result, transaction, [ {"creates", :created_contract_address_hash}, {"block_timestamp", :block_timestamp}, {"r", :r}, @@ -336,7 +336,7 @@ defmodule EthereumJSONRPC.Transaction do max_fee_per_gas: max_fee_per_gas } - put_if_present(transaction, result, [ + put_if_present(result, transaction, [ {"creates", :created_contract_address_hash}, {"block_timestamp", :block_timestamp}, {"r", :r}, @@ -378,7 +378,7 @@ defmodule EthereumJSONRPC.Transaction do type: type } - put_if_present(transaction, result, [ + put_if_present(result, transaction, [ {"creates", :created_contract_address_hash}, {"block_timestamp", :block_timestamp}, {"r", :r}, @@ -418,7 +418,7 @@ defmodule EthereumJSONRPC.Transaction do transaction_index: index } - put_if_present(transaction, result, [ + put_if_present(result, transaction, [ {"creates", :created_contract_address_hash}, {"block_timestamp", :block_timestamp}, {"r", :r}, @@ -459,7 +459,7 @@ defmodule EthereumJSONRPC.Transaction do type: type } - put_if_present(transaction, result, [ + put_if_present(result, transaction, [ {"creates", :created_contract_address_hash}, {"block_timestamp", :block_timestamp}, {"r", :r}, @@ -471,14 +471,14 @@ defmodule EthereumJSONRPC.Transaction do defp chain_type_fields(params, elixir) do case Application.get_env(:explorer, :chain_type) do :ethereum -> - put_if_present(elixir, params, [ + put_if_present(params, elixir, [ {"blobVersionedHashes", :blob_versioned_hashes}, {"maxFeePerBlobGas", :max_fee_per_blob_gas} ]) :optimism -> # we need to put blobVersionedHashes for Indexer.Fetcher.Optimism.TxnBatch module - put_if_present(elixir, params, [ + put_if_present(params, elixir, [ {"l1TxOrigin", :l1_tx_origin}, {"l1BlockNumber", :l1_block_number}, {"blobVersionedHashes", :blob_versioned_hashes} @@ -670,7 +670,7 @@ defmodule EthereumJSONRPC.Transaction do {nil, nil} end - defp put_if_present(transaction, result, keys) do + def put_if_present(result, transaction, keys) do Enum.reduce(keys, result, fn {from_key, to_key}, acc -> value = transaction[from_key] diff --git a/apps/explorer/lib/explorer/chain.ex b/apps/explorer/lib/explorer/chain.ex index 34480ed6f362..590017fda803 100644 --- a/apps/explorer/lib/explorer/chain.ex +++ b/apps/explorer/lib/explorer/chain.ex @@ -118,8 +118,6 @@ defmodule Explorer.Chain do @revert_msg_prefix_4 "Reverted " # Geth-like node @revert_msg_prefix_5 "execution reverted: " - # keccak256("Error(string)") - @revert_error_method_id "08c379a0" @limit_showing_transactions 10_000 @default_page_size 50 @@ -2956,17 +2954,58 @@ defmodule Explorer.Chain do end end - def fetch_tx_revert_reason( - %Transaction{ - block_number: block_number, - to_address_hash: to_address_hash, - from_address_hash: from_address_hash, - input: data, - gas: gas, - gas_price: gas_price, - value: value - } = transaction - ) do + def fetch_tx_revert_reason(transaction) do + json_rpc_named_arguments = Application.get_env(:explorer, :json_rpc_named_arguments) + + hash_string = to_string(transaction.hash) + + response = + fetch_first_trace( + [ + %{ + block_hash: transaction.block_hash, + block_number: transaction.block_number, + hash_data: hash_string, + transaction_index: transaction.index + } + ], + json_rpc_named_arguments + ) + + revert_reason = + case response do + {:ok, first_trace_params} -> + first_trace_params |> Enum.at(0) |> Map.get(:output, %Data{bytes: <<>>}) |> to_string() + + {:error, reason} -> + Logger.error(fn -> + ["Error while fetching first trace for tx: #{hash_string} error reason: ", reason] + end) + + fetch_tx_revert_reason_using_call(transaction) + + :ignore -> + fetch_tx_revert_reason_using_call(transaction) + end + + if !is_nil(revert_reason) do + transaction + |> Changeset.change(%{revert_reason: revert_reason}) + |> Repo.update() + end + + revert_reason + end + + defp fetch_tx_revert_reason_using_call(%Transaction{ + block_number: block_number, + to_address_hash: to_address_hash, + from_address_hash: from_address_hash, + input: data, + gas: gas, + gas_price: gas_price, + value: value + }) do json_rpc_named_arguments = Application.get_env(:explorer, :json_rpc_named_arguments) req = @@ -2981,31 +3020,33 @@ defmodule Explorer.Chain do Wei.hex_format(value) ) - revert_reason = - case EthereumJSONRPC.json_rpc(req, json_rpc_named_arguments) do - {:error, %{data: data}} -> - data + case EthereumJSONRPC.json_rpc(req, json_rpc_named_arguments) do + {:error, error} -> + parse_revert_reason_from_error(error) - {:error, %{message: message}} -> - message + _ -> + nil + end + end - _ -> - "" - end + def parse_revert_reason_from_error(%{data: data}), do: format_revert_data(data) - formatted_revert_reason = - revert_reason |> format_revert_reason_message() |> (&if(String.valid?(&1), do: &1, else: revert_reason)).() + def parse_revert_reason_from_error(%{message: message}), do: format_revert_reason_message(message) - if byte_size(formatted_revert_reason) > 0 do - transaction - |> Changeset.change(%{revert_reason: formatted_revert_reason}) - |> Repo.update() - end + defp format_revert_data(revert_data) do + case revert_data do + "revert" -> + "0x" - formatted_revert_reason + "0x" <> _ -> + revert_data + + _ -> + nil + end end - def format_revert_reason_message(revert_reason) do + defp format_revert_reason_message(revert_reason) do case revert_reason do @revert_msg_prefix_1 <> rest -> rest @@ -3014,41 +3055,16 @@ defmodule Explorer.Chain do rest @revert_msg_prefix_3 <> rest -> - extract_revert_reason_message_wrapper(rest) + rest @revert_msg_prefix_4 <> rest -> - extract_revert_reason_message_wrapper(rest) + rest @revert_msg_prefix_5 <> rest -> - extract_revert_reason_message_wrapper(rest) - - revert_reason_full -> - revert_reason_full - end - end - - defp extract_revert_reason_message_wrapper(revert_reason_message) do - case revert_reason_message do - "0x" <> hex -> - extract_revert_reason_message(hex) - - _ -> - revert_reason_message - end - end - - defp extract_revert_reason_message(hex) do - case hex do - @revert_error_method_id <> msg_with_offset -> - [msg] = - msg_with_offset - |> Base.decode16!(case: :mixed) - |> TypeDecoder.decode_raw([:string]) - - msg + rest _ -> - hex + nil end end diff --git a/apps/explorer/lib/explorer/chain/import/runner/internal_transactions.ex b/apps/explorer/lib/explorer/chain/import/runner/internal_transactions.ex index d4184f6074a0..5fcd7024283d 100644 --- a/apps/explorer/lib/explorer/chain/import/runner/internal_transactions.ex +++ b/apps/explorer/lib/explorer/chain/import/runner/internal_transactions.ex @@ -661,6 +661,7 @@ defmodule Explorer.Chain.Import.Runner.InternalTransactions do updated_at: timestamps.updated_at ] + # we don't save reverted trace outputs, but if we did, we could also set :revert_reason here set = default_set |> put_status_in_update_set(first_trace, transaction_from_db) diff --git a/apps/explorer/lib/explorer/chain/internal_transaction.ex b/apps/explorer/lib/explorer/chain/internal_transaction.ex index 8b6c2adb5d8b..0678dfc97f75 100644 --- a/apps/explorer/lib/explorer/chain/internal_transaction.ex +++ b/apps/explorer/lib/explorer/chain/internal_transaction.ex @@ -232,6 +232,7 @@ defmodule Explorer.Chain.InternalTransaction do Failed `:call`s are not allowed to set `gas_used` or `output` because they are part of the successful `result` object in the Nethermind JSONRPC response. They still need `input`, however. + The changeset will be fixed by `validate_call_error_or_result`, therefore the changeset is still valid. iex> changeset = Explorer.Chain.InternalTransaction.changeset( ...> %Explorer.Chain.InternalTransaction{}, @@ -256,11 +257,7 @@ defmodule Explorer.Chain.InternalTransaction do ...> } ...> ) iex> changeset.valid? - false - iex> changeset.errors - [ - output: {"can't be present for failed call", []} - ] + true Likewise, successful `:call`s require `input`, `gas_used` and `output` to be set. @@ -293,6 +290,7 @@ defmodule Explorer.Chain.InternalTransaction do For failed `:create`, `created_contract_code`, `created_contract_address_hash`, and `gas_used` are not allowed to be set because they come from `result` object, which shouldn't be returned from Nethermind. + The changeset will be fixed by `validate_create_error_or_result`, therefore the changeset is still valid. iex> changeset = Explorer.Chain.InternalTransaction.changeset( ...> %Explorer.Chain.InternalTransaction{}, @@ -316,13 +314,7 @@ defmodule Explorer.Chain.InternalTransaction do ...> } iex> ) iex> changeset.valid? - false - iex> changeset.errors - [ - gas_used: {"can't be present for failed create", []}, - created_contract_address_hash: {"can't be present for failed create", []}, - created_contract_code: {"can't be present for failed create", []} - ] + true For successful `:create`, `created_contract_code`, `created_contract_address_hash`, and `gas_used` are required. @@ -420,6 +412,7 @@ defmodule Explorer.Chain.InternalTransaction do changeset |> cast(attrs, @call_allowed_fields) |> validate_required(@call_required_fields) + # TODO consider removing |> validate_call_error_or_result() |> check_constraint(:call_type, message: ~S|can't be blank when type is 'call'|, name: :call_has_call_type) |> check_constraint(:input, message: ~S|can't be blank when type is 'call'|, name: :call_has_input) @@ -435,6 +428,7 @@ defmodule Explorer.Chain.InternalTransaction do changeset |> cast(attrs, @create_allowed_fields) |> validate_required(@create_required_fields) + # TODO consider removing |> validate_create_error_or_result() |> check_constraint(:init, message: ~S|can't be blank when type is 'create'|, name: :create_has_init) |> foreign_key_constraint(:transaction_hash) @@ -481,8 +475,14 @@ defmodule Explorer.Chain.InternalTransaction do # Validates that :call `type` changeset either has an `error` or both `gas_used` and `output` defp validate_call_error_or_result(changeset) do case get_field(changeset, :error) do - nil -> validate_required(changeset, [:gas_used, :output], message: "can't be blank for successful call") - _ -> validate_disallowed(changeset, [:output], message: "can't be present for failed call") + nil -> + validate_required(changeset, [:gas_used, :output], message: "can't be blank for successful call") + + _ -> + changeset + |> delete_change(:gas_used) + |> delete_change(:output) + |> validate_disallowed([:output], message: "can't be present for failed call") end end @@ -492,8 +492,15 @@ defmodule Explorer.Chain.InternalTransaction do # `:created_contract_address_hash` defp validate_create_error_or_result(changeset) do case get_field(changeset, :error) do - nil -> validate_required(changeset, @create_success_fields, message: "can't be blank for successful create") - _ -> validate_disallowed(changeset, @create_success_fields, message: "can't be present for failed create") + nil -> + validate_required(changeset, @create_success_fields, message: "can't be blank for successful create") + + _ -> + changeset + |> delete_change(:created_contract_code) + |> delete_change(:created_contract_address_hash) + |> delete_change(:gas_used) + |> validate_disallowed(@create_success_fields, message: "can't be present for failed create") end end diff --git a/apps/explorer/lib/explorer/chain/transaction.ex b/apps/explorer/lib/explorer/chain/transaction.ex index 18ceb8a0b607..64eb8d07801c 100644 --- a/apps/explorer/lib/explorer/chain/transaction.ex +++ b/apps/explorer/lib/explorer/chain/transaction.ex @@ -141,6 +141,7 @@ defmodule Explorer.Chain.Transaction.Schema do field(:status, Status) field(:v, :decimal) field(:value, Wei) + # TODO change to Data.t(), convert current hex-string values, prune all non-hex ones field(:revert_reason, :string) field(:max_priority_fee_per_gas, Wei) field(:max_fee_per_gas, Wei) @@ -616,22 +617,52 @@ defmodule Explorer.Chain.Transaction do process_hex_revert_reason(hex, transaction, options) end - defp process_hex_revert_reason(hex_revert_reason, %__MODULE__{to_address: smart_contract, hash: hash}, options) do - case Integer.parse(hex_revert_reason, 16) do - {number, ""} -> - binary_revert_reason = :binary.encode_unsigned(number) - - {result, _, _} = - decoded_input_data( - %Transaction{ - to_address: smart_contract, - hash: hash, - input: %Data{bytes: binary_revert_reason} - }, - options - ) + @default_error_abi [ + %{ + "inputs" => [ + %{ + "name" => "reason", + "type" => "string" + } + ], + "name" => "Error", + "type" => "error" + }, + %{ + "inputs" => [ + %{ + "name" => "errorCode", + "type" => "uint256" + } + ], + "name" => "Panic", + "type" => "error" + } + ] - result + defp process_hex_revert_reason(hex_revert_reason, %__MODULE__{to_address: smart_contract, hash: hash}, options) do + case Base.decode16(hex_revert_reason, case: :mixed) do + {:ok, binary_revert_reason} -> + case find_and_decode(@default_error_abi, binary_revert_reason, hash) do + {:ok, {selector, values}} -> + {:ok, mapping} = selector_mapping(selector, values, hash) + identifier = Base.encode16(selector.method_id, case: :lower) + text = function_call(selector.function, mapping) + {:ok, identifier, text, mapping} + + _ -> + {result, _, _} = + decoded_input_data( + %Transaction{ + to_address: smart_contract, + hash: hash, + input: %Data{bytes: binary_revert_reason} + }, + options + ) + + result + end _ -> hex_revert_reason diff --git a/apps/explorer/test/explorer/chain_test.exs b/apps/explorer/test/explorer/chain_test.exs index ff6da322f930..efe47a76030f 100644 --- a/apps/explorer/test/explorer/chain_test.exs +++ b/apps/explorer/test/explorer/chain_test.exs @@ -4217,8 +4217,12 @@ defmodule Explorer.ChainTest do describe "transaction_to_revert_reason/1" do test "returns correct revert_reason from DB" do - transaction = insert(:transaction, revert_reason: "No credit of that type") - assert Chain.transaction_to_revert_reason(transaction) == "No credit of that type" + # Error("No credit of that type") + hex_reason = + "0x08c379a0000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000164e6f20637265646974206f662074686174207479706500000000000000000000" + + transaction = insert(:transaction, revert_reason: hex_reason) + assert Chain.transaction_to_revert_reason(transaction) == hex_reason end test "returns correct revert_reason from the archive node" do @@ -4231,15 +4235,57 @@ defmodule Explorer.ChainTest do ) |> with_block(insert(:block, number: 1)) + # Error("No credit of that type") + hex_reason = + "0x08c379a0000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000164e6f20637265646974206f662074686174207479706500000000000000000000" + expect( EthereumJSONRPC.Mox, :json_rpc, fn _json, [] -> - {:error, %{code: -32015, message: "VM execution error.", data: "revert: No credit of that type"}} + {:ok, + [ + %{ + id: 0, + result: %{ + "output" => "0x", + "stateDiff" => nil, + "trace" => [ + %{ + "action" => %{ + "callType" => "call", + "from" => "0x6a17ca3bbf83764791f4a9f2b4dbbaebbc8b3e0d", + "gas" => "0x5208", + "input" => "0x01", + "to" => "0x7ed1e469fcb3ee19c0366d829e291451be638e59", + "value" => "0x86b3" + }, + "error" => "Reverted", + "result" => %{ + "gasUsed" => "0x5208", + "output" => hex_reason + }, + "subtraces" => 0, + "traceAddress" => [], + "type" => "call" + } + ], + "transactionHash" => "0xdf5574290913659a1ac404ccf2d216c40587f819400a52405b081dda728ac120", + "vmTrace" => nil + } + } + ]} end ) - assert Chain.transaction_to_revert_reason(transaction) == "No credit of that type" + assert Chain.transaction_to_revert_reason(transaction) == hex_reason + + assert Transaction.decoded_revert_reason(transaction, hex_reason) == { + :ok, + "08c379a0", + "Error(string reason)", + [{"reason", "string", "No credit of that type"}] + } end end end From 88394ff6fbbf5ce1cf8d852c8c4b246d3cc999e3 Mon Sep 17 00:00:00 2001 From: Viktor Baranov Date: Wed, 15 May 2024 17:12:14 +0300 Subject: [PATCH 022/150] 6.6.0 --- CHANGELOG.md | 91 +++++++++++++++++++++++++++++++ apps/block_scout_web/mix.exs | 2 +- apps/ethereum_jsonrpc/mix.exs | 2 +- apps/explorer/mix.exs | 2 +- apps/indexer/mix.exs | 2 +- docker-compose/docker-compose.yml | 2 +- docker/Makefile | 2 +- mix.exs | 2 +- rel/config.exs | 2 +- 9 files changed, 99 insertions(+), 8 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 443947cc02b2..d919fad06d36 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,96 @@ # Changelog +## 6.6.0 + +### 🚀 Features + +- Add optional retry of NFT metadata fetch in Indexer.Fetcher.Tok… ([#10036](https://github.com/blockscout/blockscout/issues/10036)) +- Blueprint contracts support ([#10058](https://github.com/blockscout/blockscout/issues/10058)) +- Clone with immutable arguments proxy pattern ([#10039](https://github.com/blockscout/blockscout/issues/10039)) +- Improve retry NFT fetcher ([#10027](https://github.com/blockscout/blockscout/issues/10027)) +- MUD API support ([#9869](https://github.com/blockscout/blockscout/issues/9869)) +- Diamond proxy (EIP-2535) support ([#10034](https://github.com/blockscout/blockscout/issues/10034)) +- Add user ops indexer to docker compose configs ([#10010](https://github.com/blockscout/blockscout/issues/10010)) +- Save smart-contract proxy type in the DB ([#10033](https://github.com/blockscout/blockscout/issues/10033)) +- Detect EIP-1967 proxy pattern on unverified smart-contracts ([#9864](https://github.com/blockscout/blockscout/issues/9864)) +- Omit balanceOf requests for tokens that doesn't support it ([#10018](https://github.com/blockscout/blockscout/issues/10018)) +- Precompiled contracts ABI import ([#9899](https://github.com/blockscout/blockscout/issues/9899)) +- Add ENS category to search result; Add ENS to check-redirect ([#9779](https://github.com/blockscout/blockscout/issues/9779)) + +### 🐛 Bug Fixes + +- Rework revert_reason ([#9212](https://github.com/blockscout/blockscout/issues/9212)) +- Eliminate from_address_hash == #{address_hash} clause for transactions query in case of smart-contracts ([#9469](https://github.com/blockscout/blockscout/issues/9469)) +- Separate indexer setup ([#10032](https://github.com/blockscout/blockscout/issues/10032)) +- Disallow batched queries in GraphQL endpoint ([#10050](https://github.com/blockscout/blockscout/issues/10050)) +- Vyper contracts re-verificaiton ([#10053](https://github.com/blockscout/blockscout/issues/10053)) +- Fix Unknown UID bug at smart-contract verification ([#9986](https://github.com/blockscout/blockscout/issues/9986)) +- Search for long integers ([#9651](https://github.com/blockscout/blockscout/issues/9651)) +- Don't put error to NFT metadata ([#9940](https://github.com/blockscout/blockscout/issues/9940)) +- Fix typo + +- Handle DB unavailability by PolygonZkevm.TransactionBatch fetcher ([#10031](https://github.com/blockscout/blockscout/issues/10031)) +- Fix WebSocketClient reconnect ([#9937](https://github.com/blockscout/blockscout/issues/9937)) +- Fix incorrect image_url parsing from NFT meta ([#9956](https://github.com/blockscout/blockscout/issues/9956)) +- Fix CI workflow name + +### 🚜 Refactor + +- Refactor get_additional_sources/4 -> get_additional_sources/3 ([#10046](https://github.com/blockscout/blockscout/issues/10046)) +- Test database config ([#9662](https://github.com/blockscout/blockscout/issues/9662)) + +### ⚙️ Miscellaneous Tasks + +- Update hackney pool size: add new fetchers accounting ([#9941](https://github.com/blockscout/blockscout/issues/9941)) +- Bump credo from 1.7.5 to 1.7.6 ([#10060](https://github.com/blockscout/blockscout/issues/10060)) +- Bump redix from 1.5.0 to 1.5.1 ([#10059](https://github.com/blockscout/blockscout/issues/10059)) +- Bump ex_doc from 0.32.1 to 0.32.2 ([#10061](https://github.com/blockscout/blockscout/issues/10061)) +- Remove `has_methods` from `/addresses` ([#10051](https://github.com/blockscout/blockscout/issues/10051)) +- Add support of Blast-specific L1 OP withdrawal events ([#10049](https://github.com/blockscout/blockscout/issues/10049)) +- Update outdated links to ETH JSON RPC Specification in docstrings ([#10041](https://github.com/blockscout/blockscout/issues/10041)) +- Migrate to GET variant of {{metadata_url}}/api/v1/metadata ([#9994](https://github.com/blockscout/blockscout/issues/9994)) +- CI for Arbitrum on-demand Docker images + +- Bump ex_cldr_numbers from 2.32.4 to 2.33.1 ([#9978](https://github.com/blockscout/blockscout/issues/9978)) +- Bump ex_cldr from 2.38.0 to 2.38.1 ([#10009](https://github.com/blockscout/blockscout/issues/10009)) +- Bump ex_cldr_units from 3.16.5 to 3.17.0 ([#9931](https://github.com/blockscout/blockscout/issues/9931)) +- Bump style-loader in /apps/block_scout_web/assets ([#9995](https://github.com/blockscout/blockscout/issues/9995)) +- Bump mini-css-extract-plugin in /apps/block_scout_web/assets ([#9997](https://github.com/blockscout/blockscout/issues/9997)) +- Bump @babel/preset-env in /apps/block_scout_web/assets ([#9999](https://github.com/blockscout/blockscout/issues/9999)) +- Bump @amplitude/analytics-browser in /apps/block_scout_web/assets ([#10001](https://github.com/blockscout/blockscout/issues/10001)) +- Bump css-loader in /apps/block_scout_web/assets ([#10003](https://github.com/blockscout/blockscout/issues/10003)) +- Bump sweetalert2 in /apps/block_scout_web/assets ([#9998](https://github.com/blockscout/blockscout/issues/9998)) +- Bump mixpanel-browser in /apps/block_scout_web/assets ([#10000](https://github.com/blockscout/blockscout/issues/10000)) +- Bump @fortawesome/fontawesome-free ([#10002](https://github.com/blockscout/blockscout/issues/10002)) +- Bump @babel/core in /apps/block_scout_web/assets ([#9996](https://github.com/blockscout/blockscout/issues/9996)) +- Enhance indexer memory metrics ([#9984](https://github.com/blockscout/blockscout/issues/9984)) +- Bump redix from 1.4.1 to 1.5.0 ([#9977](https://github.com/blockscout/blockscout/issues/9977)) +- Bump floki from 0.36.1 to 0.36.2 ([#9979](https://github.com/blockscout/blockscout/issues/9979)) +- (old UI) Replace old Twitter icon with new 'X' ([#9641](https://github.com/blockscout/blockscout/issues/9641)) + +### New ENV Variables + +| Variable | Required | Description | Default | Version | Need recompile | +| -------------------------------------------- | -------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------ | ------- | -------------- | +| `DISABLE_API` | | If `true`, endpoint is not started. Set this if you want to use an indexer-only setup. Implemented in [#10032](https://github.com/blockscout/blockscout/pull/10032) | `false` | v6.6.0+ | | +| `INDEXER_TOKEN_INSTANCE_RETRY_MAX_REFETCH_INTERVAL` | | Maximum interval between attempts to fetch token instance metadata. [Time format](env-variables.md#time-format). Implemented in [#10027](https://github.com/blockscout/blockscout/pull/10027). | `168h` | v6.6.0+ | +| `INDEXER_TOKEN_INSTANCE_RETRY_EXPONENTIAL_TIMEOUT_BASE` | | Base to calculate exponential timeout. Implemented in [#10027](https://github.com/blockscout/blockscout/pull/10027). | `2` | v6.6.0+ | +| `INDEXER_TOKEN_INSTANCE_RETRY_EXPONENTIAL_TIMEOUT_COEFF` | | Coefficient to calculate exponential timeout. Implemented in [#10027](https://github.com/blockscout/blockscout/pull/10027). | `100` | v6.6.0+ | +| `INDEXER_TOKEN_INSTANCE_REALTIME_RETRY_ENABLED` | | If `true`, `realtime` token instance fetcher will retry once on 404 and 500 error. Implemented in [#10036](https://github.com/blockscout/blockscout/pull/10036). | `false` | v6.6.0+ | +| `INDEXER_TOKEN_INSTANCE_REALTIME_RETRY_TIMEOUT` | | Timeout for retry set by `INDEXER_TOKEN_INSTANCE_REALTIME_RETRY_ENABLED`. [Time format](env-variables.md#time-format). Implemented in [#10036](https://github.com/blockscout/blockscout/pull/10036). | `5s` | v6.6.0+ | +| `TEST_DATABASE_URL` | | Variable to define the endpoint of the Postgres Database that is used during testing. Implemented in [#9662](https://github.com/blockscout/blockscout/pull/9662). | (empty) | v6.6.0+ | | +| `TEST_DATABASE_READ_ONLY_API_URL` | | Variable to define the endpoint of the Postgres Database read-only replica that is used during testing. If it is provided, most of the read queries from API v2 and UI would go through this endpoint. Implemented in [#9662](https://github.com/blockscout/blockscout/pull/9662). | (empty) | v6.6.0+ | | +| `MUD_INDEXER_ENABLED` | | If `true`, integration with [MUD](https://mud.dev/services/indexer#schemaless-indexing-with-postgresql-via-docker) is enabled. Implemented in [#9869](https://github.com/blockscout/blockscout/pull/9869) | (empty) | v6.6.0+ | | +| `MUD_DATABASE_URL` | | MUD indexer DB connection URL. | value from `DATABASE_URL` | v6.6.0+ | | +| `MUD_POOL_SIZE` | | MUD indexer DB `pool_size` | 50 | v6.6.0+ | | + +### Deprecated ENV Variables + +| Variable | Required | Description | Default | Version | Need recompile | Deprecated in Version | +| ----------------------------------------------------- | -------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------- | -------- | -------------- | --------------------- | +| `INDEXER_TOKEN_INSTANCE_RETRY_REFETCH_INTERVAL` | | Interval between attempts to fetch token instance metadata. [Time format](env-variables.md#time-format). Implemented in [#7286](https://github.com/blockscout/blockscout/pull/7286). | `24h` | v5.1.4+ | | v6.6.0 | +| `INDEXER_INTERNAL_TRANSACTIONS_INDEXING_FINISHED_THRESHOLD` | | In the case when the 1st tx in the chain already has internal transactions, If the number of blocks in pending\_block\_operations is less than the value in this env var, Blockscout will consider, that indexing of internal transactions finished, otherwise, it will consider, that indexing is still taking place and the indexing banner will appear at the top. Implemented in [#7576](https://github.com/blockscout/blockscout/pull/7576). | 1000 | v5.2.0+ | | v6.6.0 | + ## 6.5.0 ### 🚀 Features diff --git a/apps/block_scout_web/mix.exs b/apps/block_scout_web/mix.exs index 2aad10cbfe01..bcf12490bae2 100644 --- a/apps/block_scout_web/mix.exs +++ b/apps/block_scout_web/mix.exs @@ -23,7 +23,7 @@ defmodule BlockScoutWeb.Mixfile do dialyzer: :test ], start_permanent: Mix.env() == :prod, - version: "6.5.0", + version: "6.6.0", xref: [ exclude: [ Explorer.Chain.PolygonZkevm.Reader, diff --git a/apps/ethereum_jsonrpc/mix.exs b/apps/ethereum_jsonrpc/mix.exs index 2c9a7b27f4cc..369594cf35ed 100644 --- a/apps/ethereum_jsonrpc/mix.exs +++ b/apps/ethereum_jsonrpc/mix.exs @@ -23,7 +23,7 @@ defmodule EthereumJsonrpc.MixProject do dialyzer: :test ], start_permanent: Mix.env() == :prod, - version: "6.5.0" + version: "6.6.0" ] end diff --git a/apps/explorer/mix.exs b/apps/explorer/mix.exs index 80445bbd19a3..d51224213fcc 100644 --- a/apps/explorer/mix.exs +++ b/apps/explorer/mix.exs @@ -24,7 +24,7 @@ defmodule Explorer.Mixfile do dialyzer: :test ], start_permanent: Mix.env() == :prod, - version: "6.5.0", + version: "6.6.0", xref: [exclude: [BlockScoutWeb.WebRouter.Helpers, Indexer.Helper]] ] end diff --git a/apps/indexer/mix.exs b/apps/indexer/mix.exs index 3d3ec8f15bf3..8025dc5475ff 100644 --- a/apps/indexer/mix.exs +++ b/apps/indexer/mix.exs @@ -14,7 +14,7 @@ defmodule Indexer.MixProject do elixirc_paths: elixirc_paths(Mix.env()), lockfile: "../../mix.lock", start_permanent: Mix.env() == :prod, - version: "6.5.0", + version: "6.6.0", xref: [ exclude: [ Explorer.Chain.Optimism.Deposit, diff --git a/docker-compose/docker-compose.yml b/docker-compose/docker-compose.yml index d2a3ec55af89..af1bfd6dcfde 100644 --- a/docker-compose/docker-compose.yml +++ b/docker-compose/docker-compose.yml @@ -37,7 +37,7 @@ services: CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED: "" CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL: "" ADMIN_PANEL_ENABLED: "" - RELEASE_VERSION: 6.5.0 + RELEASE_VERSION: 6.6.0 links: - db:database environment: diff --git a/docker/Makefile b/docker/Makefile index 946028577ef3..efece401af9c 100644 --- a/docker/Makefile +++ b/docker/Makefile @@ -10,7 +10,7 @@ STATS_CONTAINER_NAME := stats STATS_DB_CONTAINER_NAME := stats-db PROXY_CONTAINER_NAME := proxy PG_CONTAINER_NAME := postgres -RELEASE_VERSION ?= '6.5.0' +RELEASE_VERSION ?= '6.6.0' TAG := $(RELEASE_VERSION)-commit-$(shell git log -1 --pretty=format:"%h") STABLE_TAG := $(RELEASE_VERSION) diff --git a/mix.exs b/mix.exs index ae63d3c7ef1b..3e57d9439c69 100644 --- a/mix.exs +++ b/mix.exs @@ -7,7 +7,7 @@ defmodule BlockScout.Mixfile do [ # app: :block_scout, # aliases: aliases(config_env()), - version: "6.5.0", + version: "6.6.0", apps_path: "apps", deps: deps(), dialyzer: dialyzer(), diff --git a/rel/config.exs b/rel/config.exs index e581a15c60f0..3b48503135f9 100644 --- a/rel/config.exs +++ b/rel/config.exs @@ -71,7 +71,7 @@ end # will be used by default release :blockscout do - set version: "6.5.0-beta" + set version: "6.6.0-beta" set applications: [ :runtime_tools, block_scout_web: :permanent, From 8265c9d6d7dada0003cf69da4a3550de73e7cb5f Mon Sep 17 00:00:00 2001 From: Qwerty5Uiop <105209995+Qwerty5Uiop@users.noreply.github.com> Date: Fri, 17 May 2024 14:36:25 +0400 Subject: [PATCH 023/150] fix: Add healthcheck endpoints for indexer-only setup (#10076) --- apps/block_scout_web/config/config.exs | 3 + .../lib/block_scout_web/application.ex | 27 +++--- .../controllers/api/v1/health_controller.ex | 4 +- .../lib/block_scout_web/endpoint.ex | 88 ++++++++++--------- .../lib/block_scout_web/health_router.ex | 15 ++++ apps/block_scout_web/mix.exs | 17 +++- 6 files changed, 96 insertions(+), 58 deletions(-) create mode 100644 apps/block_scout_web/lib/block_scout_web/health_router.ex diff --git a/apps/block_scout_web/config/config.exs b/apps/block_scout_web/config/config.exs index aa3a4616a6a3..40ff4eb8f488 100644 --- a/apps/block_scout_web/config/config.exs +++ b/apps/block_scout_web/config/config.exs @@ -22,6 +22,9 @@ config :block_scout_web, config :block_scout_web, admin_panel_enabled: ConfigHelper.parse_bool_env_var("ADMIN_PANEL_ENABLED") +config :block_scout_web, + disable_api?: ConfigHelper.parse_bool_env_var("DISABLE_API") + config :block_scout_web, BlockScoutWeb.Counters.BlocksIndexedCounter, enabled: true config :block_scout_web, BlockScoutWeb.Counters.InternalTransactionsIndexedCounter, enabled: true diff --git a/apps/block_scout_web/lib/block_scout_web/application.ex b/apps/block_scout_web/lib/block_scout_web/application.ex index 78b3c4fdfcb2..e549bda3c4aa 100644 --- a/apps/block_scout_web/lib/block_scout_web/application.ex +++ b/apps/block_scout_web/lib/block_scout_web/application.ex @@ -5,16 +5,14 @@ defmodule BlockScoutWeb.Application do use Application - alias BlockScoutWeb.API.APILogger - alias BlockScoutWeb.Counters.{BlocksIndexedCounter, InternalTransactionsIndexedCounter} - alias BlockScoutWeb.Prometheus.{Exporter, PhoenixInstrumenter} - alias BlockScoutWeb.{Endpoint, MainPageRealtimeEventHandler, RealtimeEventHandler, SmartContractRealtimeEventHandler} - alias BlockScoutWeb.Utility.EventHandlersMetrics + alias BlockScoutWeb.Endpoint def start(_type, _args) do - children = setup_and_define_children() + base_children = [Supervisor.child_spec(Endpoint, [])] + api_children = setup_and_define_children() + all_children = base_children ++ api_children opts = [strategy: :one_for_one, name: BlockScoutWeb.Supervisor, max_restarts: 1_000] - Supervisor.start_link(children, opts) + Supervisor.start_link(all_children, opts) end # Tell Phoenix to update the endpoint configuration @@ -24,10 +22,16 @@ defmodule BlockScoutWeb.Application do :ok end - defp setup_and_define_children do - if Application.get_env(:block_scout_web, :disable_api?) do - [] - else + if Application.compile_env(:block_scout_web, :disable_api?) do + defp setup_and_define_children, do: [] + else + defp setup_and_define_children do + alias BlockScoutWeb.API.APILogger + alias BlockScoutWeb.Counters.{BlocksIndexedCounter, InternalTransactionsIndexedCounter} + alias BlockScoutWeb.Prometheus.{Exporter, PhoenixInstrumenter} + alias BlockScoutWeb.{MainPageRealtimeEventHandler, RealtimeEventHandler, SmartContractRealtimeEventHandler} + alias BlockScoutWeb.Utility.EventHandlersMetrics + PhoenixInstrumenter.setup() Exporter.setup() @@ -47,7 +51,6 @@ defmodule BlockScoutWeb.Application do [ # Start the endpoint when the application starts {Phoenix.PubSub, name: BlockScoutWeb.PubSub}, - Supervisor.child_spec(Endpoint, []), {Absinthe.Subscription, Endpoint}, {MainPageRealtimeEventHandler, name: MainPageRealtimeEventHandler}, {RealtimeEventHandler, name: RealtimeEventHandler}, diff --git a/apps/block_scout_web/lib/block_scout_web/controllers/api/v1/health_controller.ex b/apps/block_scout_web/lib/block_scout_web/controllers/api/v1/health_controller.ex index f65bfcd3b5d9..991c4936ca10 100644 --- a/apps/block_scout_web/lib/block_scout_web/controllers/api/v1/health_controller.ex +++ b/apps/block_scout_web/lib/block_scout_web/controllers/api/v1/health_controller.ex @@ -1,5 +1,7 @@ defmodule BlockScoutWeb.API.V1.HealthController do - use BlockScoutWeb, :controller + use Phoenix.Controller, namespace: BlockScoutWeb + + import Plug.Conn alias Explorer.Chain alias Timex.Duration diff --git a/apps/block_scout_web/lib/block_scout_web/endpoint.ex b/apps/block_scout_web/lib/block_scout_web/endpoint.ex index 734d1e9eee06..7d742482847d 100644 --- a/apps/block_scout_web/lib/block_scout_web/endpoint.ex +++ b/apps/block_scout_web/lib/block_scout_web/endpoint.ex @@ -6,19 +6,22 @@ defmodule BlockScoutWeb.Endpoint do plug(Phoenix.Ecto.SQL.Sandbox, repo: Explorer.Repo) end - socket("/socket", BlockScoutWeb.UserSocket, websocket: [timeout: 45_000]) - socket("/socket/v2", BlockScoutWeb.UserSocketV2, websocket: [timeout: 45_000]) + if Application.compile_env(:block_scout_web, :disable_api?) do + plug(BlockScoutWeb.HealthRouter) + else + socket("/socket", BlockScoutWeb.UserSocket, websocket: [timeout: 45_000]) + socket("/socket/v2", BlockScoutWeb.UserSocketV2, websocket: [timeout: 45_000]) - # Serve at "/" the static files from "priv/static" directory. - # - # You should set gzip to true if you are running phoenix.digest - # when deploying your static files in production. - plug( - Plug.Static, - at: "/", - from: :block_scout_web, - gzip: true, - only: ~w( + # Serve at "/" the static files from "priv/static" directory. + # + # You should set gzip to true if you are running phoenix.digest + # when deploying your static files in production. + plug( + Plug.Static, + at: "/", + from: :block_scout_web, + gzip: true, + only: ~w( css fonts images @@ -30,45 +33,46 @@ defmodule BlockScoutWeb.Endpoint do mstile-150x150.png safari-pinned-tab.svg ), - only_matching: ~w(manifest) - ) + only_matching: ~w(manifest) + ) - # Code reloading can be explicitly enabled under the - # :code_reloader configuration of your endpoint. - if code_reloading? do - socket("/phoenix/live_reload/socket", Phoenix.LiveReloader.Socket) - plug(Phoenix.LiveReloader) - plug(Phoenix.CodeReloader) - end + # Code reloading can be explicitly enabled under the + # :code_reloader configuration of your endpoint. + if code_reloading? do + socket("/phoenix/live_reload/socket", Phoenix.LiveReloader.Socket) + plug(Phoenix.LiveReloader) + plug(Phoenix.CodeReloader) + end - plug(Plug.RequestId) + plug(Plug.RequestId) - plug(Plug.MethodOverride) - plug(Plug.Head) + plug(Plug.MethodOverride) + plug(Plug.Head) - # The session will be stored in the cookie and signed, - # this means its contents can be read but not tampered with. - # Set :encryption_salt if you would also like to encrypt it. + # The session will be stored in the cookie and signed, + # this means its contents can be read but not tampered with. + # Set :encryption_salt if you would also like to encrypt it. - plug( - Plug.Session, - store: BlockScoutWeb.Plug.RedisCookie, - key: "_explorer_key", - signing_salt: "iC2ksJHS", - same_site: "Lax", - http_only: false, - domain: Application.compile_env(:block_scout_web, :cookie_domain), - max_age: Application.compile_env(:block_scout_web, :session_cookie_ttl) - ) + plug( + Plug.Session, + store: BlockScoutWeb.Plug.RedisCookie, + key: "_explorer_key", + signing_salt: "iC2ksJHS", + same_site: "Lax", + http_only: false, + domain: Application.compile_env(:block_scout_web, :cookie_domain), + max_age: Application.compile_env(:block_scout_web, :session_cookie_ttl) + ) - use SpandexPhoenix + use SpandexPhoenix - plug(BlockScoutWeb.Prometheus.Exporter) + plug(BlockScoutWeb.Prometheus.Exporter) - # 'x-apollo-tracing' header for https://www.graphqlbin.com to work with our GraphQL endpoint - plug(CORSPlug, headers: ["x-apollo-tracing" | CORSPlug.defaults()[:headers]]) + # 'x-apollo-tracing' header for https://www.graphqlbin.com to work with our GraphQL endpoint + plug(CORSPlug, headers: ["x-apollo-tracing" | CORSPlug.defaults()[:headers]]) - plug(BlockScoutWeb.Router) + plug(BlockScoutWeb.Router) + end def init(_key, config) do if config[:load_from_system_env] do diff --git a/apps/block_scout_web/lib/block_scout_web/health_router.ex b/apps/block_scout_web/lib/block_scout_web/health_router.ex new file mode 100644 index 000000000000..746bae3dbcf6 --- /dev/null +++ b/apps/block_scout_web/lib/block_scout_web/health_router.ex @@ -0,0 +1,15 @@ +defmodule BlockScoutWeb.HealthRouter do + @moduledoc """ + Router for health checks in case of indexer-only setup + """ + + use BlockScoutWeb, :router + + alias BlockScoutWeb.API.V1.HealthController + + scope "/api/v1/health" do + get("/", HealthController, :health) + get("/liveness", HealthController, :liveness) + get("/readiness", HealthController, :readiness) + end +end diff --git a/apps/block_scout_web/mix.exs b/apps/block_scout_web/mix.exs index bcf12490bae2..1807a4758e88 100644 --- a/apps/block_scout_web/mix.exs +++ b/apps/block_scout_web/mix.exs @@ -15,7 +15,7 @@ defmodule BlockScoutWeb.Mixfile do ignore_warnings: "../../.dialyzer-ignore" ], elixir: "~> 1.13", - elixirc_paths: elixirc_paths(Mix.env()), + elixirc_paths: elixirc_paths(Mix.env(), Application.get_env(:block_scout_web, :disable_api?)), lockfile: "../../mix.lock", package: package(), preferred_cli_env: [ @@ -48,8 +48,19 @@ defmodule BlockScoutWeb.Mixfile do end # Specifies which paths to compile per environment. - defp elixirc_paths(:test), do: ["test/support", "test/block_scout_web/features/pages"] ++ elixirc_paths() - defp elixirc_paths(_), do: elixirc_paths() + defp elixirc_paths(:test, _), do: ["test/support", "test/block_scout_web/features/pages"] ++ elixirc_paths() + + defp elixirc_paths(_, true), + do: [ + "lib/phoenix", + "lib/block_scout_web.ex", + "lib/block_scout_web/application.ex", + "lib/block_scout_web/endpoint.ex", + "lib/block_scout_web/health_router.ex", + "lib/block_scout_web/controllers/api/v1/health_controller.ex" + ] + + defp elixirc_paths(_, _), do: elixirc_paths() defp elixirc_paths, do: ["lib"] defp extra_applications, From 77203a3f83b96e3265e2b4c20a59839bc7a5e15f Mon Sep 17 00:00:00 2001 From: Rim Rakhimov Date: Mon, 20 May 2024 17:38:46 +0300 Subject: [PATCH 024/150] fix: Update Vyper inner compilers list to support all compilers (#10091) Increases the number of items retrieved for Vyper releases to 100. Allows to get the oldest vyper compilers when using internal verification (i.e., sc_verifier is disabled) When not set, the default number of items retrieved is 30, which is less than total number of currently existing releases (46). This makes the oldest compilers unavailable when using vyper verificaiton method. Should fix the failing `/api/v2/smart-contracts/{address_hash}/verification/via/vyper-code success verification` test --- apps/explorer/lib/explorer/smart_contract/compiler_version.ex | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apps/explorer/lib/explorer/smart_contract/compiler_version.ex b/apps/explorer/lib/explorer/smart_contract/compiler_version.ex index 3a0e898ff5e2..fccfcc3f5901 100644 --- a/apps/explorer/lib/explorer/smart_contract/compiler_version.ex +++ b/apps/explorer/lib/explorer/smart_contract/compiler_version.ex @@ -59,7 +59,7 @@ defmodule Explorer.SmartContract.CompilerVersion do @spec vyper_releases_url :: String.t() def vyper_releases_url do - "https://api.github.com/repos/vyperlang/vyper/releases" + "https://api.github.com/repos/vyperlang/vyper/releases?per_page=100" end defp format_data(json, compiler) do From 22c106a2b0bb0a4e34f655085377eda9e861f0bf Mon Sep 17 00:00:00 2001 From: Victor Baranov Date: Mon, 20 May 2024 19:51:25 +0300 Subject: [PATCH 025/150] Fix certified flag in the search API v2 endpoint (#10094) --- .../lib/block_scout_web/views/api/v2/search_view.ex | 1 + apps/explorer/lib/explorer/chain/search.ex | 11 +++++++---- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/apps/block_scout_web/lib/block_scout_web/views/api/v2/search_view.ex b/apps/block_scout_web/lib/block_scout_web/views/api/v2/search_view.ex index f59ba29822d9..026f4ba68041 100644 --- a/apps/block_scout_web/lib/block_scout_web/views/api/v2/search_view.ex +++ b/apps/block_scout_web/lib/block_scout_web/views/api/v2/search_view.ex @@ -64,6 +64,7 @@ defmodule BlockScoutWeb.API.V2.SearchView do "url" => address_path(Endpoint, :show, search_result.address_hash), "is_smart_contract_verified" => search_result.verified, "ens_info" => search_result[:ens_info], + "certified" => if(search_result.certified, do: search_result.certified, else: false), "priority" => search_result.priority } end diff --git a/apps/explorer/lib/explorer/chain/search.ex b/apps/explorer/lib/explorer/chain/search.ex index c863ee1e7105..3405753499ca 100644 --- a/apps/explorer/lib/explorer/chain/search.ex +++ b/apps/explorer/lib/explorer/chain/search.ex @@ -375,11 +375,12 @@ defmodule Explorer.Chain.Search do {:ok, address_hash} -> address_search_fields = search_fields() - |> Map.put(:address_hash, dynamic([address, _], address.hash)) + |> Map.put(:address_hash, dynamic([address, _, _], address.hash)) |> Map.put(:type, "address") - |> Map.put(:name, dynamic([_, address_name], address_name.name)) - |> Map.put(:inserted_at, dynamic([_, address_name], address_name.inserted_at)) - |> Map.put(:verified, dynamic([address, _], address.verified)) + |> Map.put(:name, dynamic([_, address_name, _], address_name.name)) + |> Map.put(:inserted_at, dynamic([_, address_name, _], address_name.inserted_at)) + |> Map.put(:verified, dynamic([address, _, _], address.verified)) + |> Map.put(:certified, dynamic([_, _, smart_contract], smart_contract.certified)) from(address in Address, left_join: @@ -391,6 +392,8 @@ defmodule Explorer.Chain.Search do ) ), on: address.hash == address_name.address_hash, + left_join: smart_contract in SmartContract, + on: address.hash == smart_contract.address_hash, where: address.hash == ^address_hash, select: ^address_search_fields ) From 8580be279656ef81785aa74bc8518fec4de57843 Mon Sep 17 00:00:00 2001 From: Viktor Baranov Date: Mon, 20 May 2024 19:53:49 +0300 Subject: [PATCH 026/150] Update CHANGELOG for 6.6.0 --- CHANGELOG.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index d919fad06d36..4ef25dcd5c77 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -19,6 +19,9 @@ ### 🐛 Bug Fixes +- Fix certified flag in the search API v2 endpoint ([#10094](https://github.com/blockscout/blockscout/issues/10094)) +- Update Vyper inner compilers list to support all compilers ([#10091](https://github.com/blockscout/blockscout/issues/10091)) +- Add healthcheck endpoints for indexer-only setup ([#10076](https://github.com/blockscout/blockscout/issues/10076)) - Rework revert_reason ([#9212](https://github.com/blockscout/blockscout/issues/9212)) - Eliminate from_address_hash == #{address_hash} clause for transactions query in case of smart-contracts ([#9469](https://github.com/blockscout/blockscout/issues/9469)) - Separate indexer setup ([#10032](https://github.com/blockscout/blockscout/issues/10032)) @@ -27,12 +30,9 @@ - Fix Unknown UID bug at smart-contract verification ([#9986](https://github.com/blockscout/blockscout/issues/9986)) - Search for long integers ([#9651](https://github.com/blockscout/blockscout/issues/9651)) - Don't put error to NFT metadata ([#9940](https://github.com/blockscout/blockscout/issues/9940)) -- Fix typo - - Handle DB unavailability by PolygonZkevm.TransactionBatch fetcher ([#10031](https://github.com/blockscout/blockscout/issues/10031)) - Fix WebSocketClient reconnect ([#9937](https://github.com/blockscout/blockscout/issues/9937)) - Fix incorrect image_url parsing from NFT meta ([#9956](https://github.com/blockscout/blockscout/issues/9956)) -- Fix CI workflow name ### 🚜 Refactor From 77f313a89b169d84e536728060b49985530d2044 Mon Sep 17 00:00:00 2001 From: Viktor Baranov Date: Mon, 20 May 2024 20:43:26 +0300 Subject: [PATCH 027/150] Remove custom release CI for Immutable --- .../workflows/pre-release-shibarium copy.yml | 87 +++++++++++++++++++ .../publish-docker-image-for-immutable.yml | 43 --------- 2 files changed, 87 insertions(+), 43 deletions(-) create mode 100644 .github/workflows/pre-release-shibarium copy.yml delete mode 100644 .github/workflows/publish-docker-image-for-immutable.yml diff --git a/.github/workflows/pre-release-shibarium copy.yml b/.github/workflows/pre-release-shibarium copy.yml new file mode 100644 index 000000000000..36080b3090ce --- /dev/null +++ b/.github/workflows/pre-release-shibarium copy.yml @@ -0,0 +1,87 @@ +name: Pre-release for Shibarium + +on: + workflow_dispatch: + inputs: + number: + type: number + required: true + +env: + OTP_VERSION: ${{ vars.OTP_VERSION }} + ELIXIR_VERSION: ${{ vars.ELIXIR_VERSION }} + +jobs: + push_to_registry: + name: Push Docker image to Docker Hub + runs-on: ubuntu-latest + env: + RELEASE_VERSION: ${{ vars.RELEASE_VERSION }} + steps: + - uses: actions/checkout@v4 + - name: Setup repo + uses: ./.github/actions/setup-repo + with: + docker-username: ${{ secrets.DOCKER_USERNAME }} + docker-password: ${{ secrets.DOCKER_PASSWORD }} + + - name: Build and push Docker image for Shibarium (indexer + API) + uses: docker/build-push-action@v5 + with: + context: . + file: ./docker/Dockerfile + push: true + tags: blockscout/blockscout-shibarium:${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }} + platforms: | + linux/amd64 + build-args: | + DISABLE_WEBAPP=false + API_V1_READ_METHODS_DISABLED=false + API_V1_WRITE_METHODS_DISABLED=false + CACHE_EXCHANGE_RATES_PERIOD= + CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= + CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= + ADMIN_PANEL_ENABLED=false + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }} + RELEASE_VERSION=${{ env.RELEASE_VERSION }} + CHAIN_TYPE=shibarium + + - name: Build and push Docker image for Shibarium (indexer) + uses: docker/build-push-action@v5 + with: + context: . + file: ./docker/Dockerfile + push: true + tags: blockscout/blockscout-shibarium:${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }}-indexer + platforms: | + linux/amd64 + build-args: | + DISABLE_API=true + DISABLE_WEBAPP=true + CACHE_EXCHANGE_RATES_PERIOD= + CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= + CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= + ADMIN_PANEL_ENABLED=false + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }} + RELEASE_VERSION=${{ env.RELEASE_VERSION }} + CHAIN_TYPE=shibarium + + - name: Build and push Docker image for Shibarium (API) + uses: docker/build-push-action@v5 + with: + context: . + file: ./docker/Dockerfile + push: true + tags: blockscout/blockscout-shibarium:${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }}-api + platforms: | + linux/amd64 + build-args: | + DISABLE_INDEXER=true + DISABLE_WEBAPP=true + CACHE_EXCHANGE_RATES_PERIOD= + CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= + CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= + ADMIN_PANEL_ENABLED=false + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }} + RELEASE_VERSION=${{ env.RELEASE_VERSION }} + CHAIN_TYPE=shibarium \ No newline at end of file diff --git a/.github/workflows/publish-docker-image-for-immutable.yml b/.github/workflows/publish-docker-image-for-immutable.yml deleted file mode 100644 index 3a204bcfea06..000000000000 --- a/.github/workflows/publish-docker-image-for-immutable.yml +++ /dev/null @@ -1,43 +0,0 @@ -name: Immutable Publish Docker image - -on: - workflow_dispatch: - push: - branches: - - production-immutable -jobs: - push_to_registry: - name: Push Docker image to Docker Hub - runs-on: ubuntu-latest - env: - RELEASE_VERSION: ${{ vars.RELEASE_VERSION }} - DOCKER_CHAIN_NAME: immutable - steps: - - uses: actions/checkout@v4 - - name: Setup repo - uses: ./.github/actions/setup-repo-and-short-sha - with: - docker-username: ${{ secrets.DOCKER_USERNAME }} - docker-password: ${{ secrets.DOCKER_PASSWORD }} - - - name: Build and push Docker image - uses: docker/build-push-action@v5 - with: - context: . - file: ./docker/Dockerfile - push: true - tags: blockscout/blockscout-${{ env.DOCKER_CHAIN_NAME }}:${{ env.RELEASE_VERSION }}-postrelease-${{ env.SHORT_SHA }} - platforms: | - linux/amd64 - linux/arm64/v8 - build-args: | - CACHE_EXCHANGE_RATES_PERIOD= - API_V1_READ_METHODS_DISABLED=false - DISABLE_WEBAPP=false - API_V1_WRITE_METHODS_DISABLED=false - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - ADMIN_PANEL_ENABLED=false - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta.+commit.${{ env.SHORT_SHA }} - RELEASE_VERSION=${{ env.RELEASE_VERSION }} - CHAIN_TYPE=polygon_edge \ No newline at end of file From 2151248e10a701463f62917e655f1d7022346dfc Mon Sep 17 00:00:00 2001 From: Kirill Fedoseev Date: Wed, 22 May 2024 12:09:50 +0200 Subject: [PATCH 028/150] feat: implement fetch_first_trace for Geth (#10087) * feat: implement fetch_first_trace for Geth * chore: add missing doc & spec --- .../api/rpc/transaction_controller_test.exs | 214 ++++++++++-------- .../views/transaction_view_test.exs | 94 ++++++-- .../lib/ethereum_jsonrpc/geth.ex | 38 +++- apps/explorer/lib/explorer/chain.ex | 14 +- apps/explorer/test/explorer/chain_test.exs | 91 +++++--- 5 files changed, 291 insertions(+), 160 deletions(-) diff --git a/apps/block_scout_web/test/block_scout_web/controllers/api/rpc/transaction_controller_test.exs b/apps/block_scout_web/test/block_scout_web/controllers/api/rpc/transaction_controller_test.exs index bda682e10388..913849e8ca30 100644 --- a/apps/block_scout_web/test/block_scout_web/controllers/api/rpc/transaction_controller_test.exs +++ b/apps/block_scout_web/test/block_scout_web/controllers/api/rpc/transaction_controller_test.exs @@ -664,21 +664,69 @@ defmodule BlockScoutWeb.API.RPC.TransactionControllerTest do hex_reason = "0x08c379a0000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000164e6f20637265646974206f662074686174207479706500000000000000000000" - # fail to trace_replayTransaction expect( EthereumJSONRPC.Mox, :json_rpc, - fn _json, [] -> - {:error, :econnrefused} - end - ) - - # fallback to eth_call - expect( - EthereumJSONRPC.Mox, - :json_rpc, - fn _json, [] -> - {:error, %{code: -32015, message: "VM execution error.", data: hex_reason}} + fn + [%{method: "debug_traceTransaction"}], _options -> + {:ok, + [ + %{ + id: 0, + result: %{ + "from" => "0x6a17ca3bbf83764791f4a9f2b4dbbaebbc8b3e0d", + "gas" => "0x5208", + "gasUsed" => "0x5208", + "input" => "0x01", + "output" => hex_reason, + "to" => "0x7ed1e469fcb3ee19c0366d829e291451be638e59", + "type" => "CALL", + "value" => "0x86b3" + } + } + ]} + + [%{method: "trace_replayTransaction"}], _options -> + {:ok, + [ + %{ + id: 0, + result: %{ + "output" => "0x", + "stateDiff" => nil, + "trace" => [ + %{ + "action" => %{ + "callType" => "call", + "from" => "0x6a17ca3bbf83764791f4a9f2b4dbbaebbc8b3e0d", + "gas" => "0x5208", + "input" => "0x01", + "to" => "0x7ed1e469fcb3ee19c0366d829e291451be638e59", + "value" => "0x86b3" + }, + "error" => "Reverted", + "result" => %{ + "gasUsed" => "0x5208", + "output" => hex_reason + }, + "subtraces" => 0, + "traceAddress" => [], + "type" => "call" + } + ], + "transactionHash" => "0xdf5574290913659a1ac404ccf2d216c40587f819400a52405b081dda728ac120", + "vmTrace" => nil + } + } + ]} + + %{method: "eth_call"}, _options -> + {:error, + %{ + code: 3, + data: hex_reason, + message: "execution reverted" + }} end ) @@ -720,91 +768,65 @@ defmodule BlockScoutWeb.API.RPC.TransactionControllerTest do expect( EthereumJSONRPC.Mox, :json_rpc, - fn _json, [] -> - {:ok, - [ - %{ - id: 0, - result: %{ - "output" => "0x", - "stateDiff" => nil, - "trace" => [ - %{ - "action" => %{ - "callType" => "call", - "from" => "0x6a17ca3bbf83764791f4a9f2b4dbbaebbc8b3e0d", - "gas" => "0x5208", - "input" => "0x01", - "to" => "0x7ed1e469fcb3ee19c0366d829e291451be638e59", - "value" => "0x0" - }, - "error" => "Reverted", - "result" => %{ - "gasUsed" => "0x5208", - "output" => "0x" - }, - "subtraces" => 0, - "traceAddress" => [], - "type" => "call" - } - ], - "transactionHash" => "0xac2a7dab94d965893199e7ee01649e2d66f0787a4c558b3118c09e80d4df8269", - "vmTrace" => nil + fn + [%{method: "debug_traceTransaction"}], _options -> + {:ok, + [ + %{ + id: 0, + result: %{ + "error" => "Reverted", + "from" => "0x6a17ca3bbf83764791f4a9f2b4dbbaebbc8b3e0d", + "gas" => "0x5208", + "gasUsed" => "0x5208", + "input" => "0x01", + "to" => "0x7ed1e469fcb3ee19c0366d829e291451be638e59", + "type" => "CALL", + "value" => "0x86b3" + } } - } - ]} - end - ) - - params = %{ - "module" => "transaction", - "action" => "gettxinfo", - "txhash" => "#{transaction.hash}" - } - - assert response = - conn - |> get("/api", params) - |> json_response(200) - - assert response["result"]["revertReason"] == "0x" - assert response["status"] == "1" - assert response["message"] == "OK" - end - - test "with a txhash with empty revert reason from DB if eth_call doesn't return an error", %{conn: conn} do - block = insert(:block, number: 100, hash: "0x3e51328bccedee581e8ba35190216a61a5d67fd91ca528f3553142c0c7d18391") - - transaction = - :transaction - |> insert( - error: "Reverted", - status: :error, - block_hash: block.hash, - block_number: block.number, - cumulative_gas_used: 884_322, - gas_used: 106_025, - index: 0, - hash: "0xac2a7dab94d965893199e7ee01649e2d66f0787a4c558b3118c09e80d4df8269" - ) - - insert(:address) - - # fail to trace_replayTransaction - expect( - EthereumJSONRPC.Mox, - :json_rpc, - fn _json, [] -> - {:error, :econnrefused} - end - ) + ]} + + [%{method: "trace_replayTransaction"}], _options -> + {:ok, + [ + %{ + id: 0, + result: %{ + "output" => "0x", + "stateDiff" => nil, + "trace" => [ + %{ + "action" => %{ + "callType" => "call", + "from" => "0x6a17ca3bbf83764791f4a9f2b4dbbaebbc8b3e0d", + "gas" => "0x5208", + "input" => "0x01", + "to" => "0x7ed1e469fcb3ee19c0366d829e291451be638e59", + "value" => "0x86b3" + }, + "error" => "Reverted", + "result" => %{ + "gasUsed" => "0x5208", + "output" => "0x" + }, + "subtraces" => 0, + "traceAddress" => [], + "type" => "call" + } + ], + "transactionHash" => "0xdf5574290913659a1ac404ccf2d216c40587f819400a52405b081dda728ac120", + "vmTrace" => nil + } + } + ]} - # fallback to eth_call - expect( - EthereumJSONRPC.Mox, - :json_rpc, - fn _json, [] -> - {:ok, :ok} + %{method: "eth_call"}, _options -> + {:error, + %{ + code: 3, + message: "execution reverted" + }} end ) @@ -819,7 +841,7 @@ defmodule BlockScoutWeb.API.RPC.TransactionControllerTest do |> get("/api", params) |> json_response(200) - assert response["result"]["revertReason"] == "" + assert response["result"]["revertReason"] in ["", "0x"] assert response["status"] == "1" assert response["message"] == "OK" end diff --git a/apps/block_scout_web/test/block_scout_web/views/transaction_view_test.exs b/apps/block_scout_web/test/block_scout_web/views/transaction_view_test.exs index 7ce84f652ae9..a28f2b579e90 100644 --- a/apps/block_scout_web/test/block_scout_web/views/transaction_view_test.exs +++ b/apps/block_scout_web/test/block_scout_web/views/transaction_view_test.exs @@ -291,35 +291,81 @@ defmodule BlockScoutWeb.TransactionViewTest do describe "transaction_revert_reason/2" do test "handles transactions with gas_price set to nil" do - transaction = insert(:transaction, gas_price: nil, error: "execution reverted") + transaction = + :transaction + |> insert(error: "execution reverted") + |> with_block() + |> Map.put(:gas_price, nil) - # fail to trace_replayTransaction - EthereumJSONRPC.Mox - |> expect( + hex_reason = + "0x08c379a00000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000002b556e69737761705632526f757465723a20494e53554646494349454e545f4f55545055545f414d4f554e54000000000000000000000000000000000000000000" + + expect( + EthereumJSONRPC.Mox, :json_rpc, - fn _json, [] -> - {:error, :econnrefused} + fn + [%{method: "debug_traceTransaction"}], _options -> + {:ok, + [ + %{ + id: 0, + result: %{ + "from" => "0x6a17ca3bbf83764791f4a9f2b4dbbaebbc8b3e0d", + "gas" => "0x5208", + "gasUsed" => "0x5208", + "input" => "0x01", + "output" => hex_reason, + "to" => "0x7ed1e469fcb3ee19c0366d829e291451be638e59", + "type" => "CALL", + "value" => "0x86b3" + } + } + ]} + + [%{method: "trace_replayTransaction"}], _options -> + {:ok, + [ + %{ + id: 0, + result: %{ + "output" => "0x", + "stateDiff" => nil, + "trace" => [ + %{ + "action" => %{ + "callType" => "call", + "from" => "0x6a17ca3bbf83764791f4a9f2b4dbbaebbc8b3e0d", + "gas" => "0x5208", + "input" => "0x01", + "to" => "0x7ed1e469fcb3ee19c0366d829e291451be638e59", + "value" => "0x86b3" + }, + "error" => "Reverted", + "result" => %{ + "gasUsed" => "0x5208", + "output" => hex_reason + }, + "subtraces" => 0, + "traceAddress" => [], + "type" => "call" + } + ], + "transactionHash" => "0xdf5574290913659a1ac404ccf2d216c40587f819400a52405b081dda728ac120", + "vmTrace" => nil + } + } + ]} + + %{method: "eth_call"}, _options -> + {:error, + %{ + code: 3, + data: hex_reason, + message: "execution reverted" + }} end ) - # fallback to eth_call - EthereumJSONRPC.Mox - |> expect(:json_rpc, fn %{ - id: 0, - method: "eth_call", - params: [ - %{gasPrice: "0x0"}, - "latest" - ] - }, - _options -> - {:error, - %{ - data: - "0x08c379a00000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000002b556e69737761705632526f757465723a20494e53554646494349454e545f4f55545055545f414d4f554e54000000000000000000000000000000000000000000" - }} - end) - revert_reason = TransactionView.transaction_revert_reason(transaction, nil) assert revert_reason == diff --git a/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/geth.ex b/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/geth.ex index be07754c18a6..a93bce430708 100644 --- a/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/geth.ex +++ b/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/geth.ex @@ -50,6 +50,7 @@ defmodule EthereumJSONRPC.Geth do parsed_timeout -> json_rpc_named_arguments + |> Keyword.update(:transport_options, [http_options: []], &Keyword.put_new(&1, :http_options, [])) |> put_in([:transport_options, :http_options, :timeout], parsed_timeout) |> put_in([:transport_options, :http_options, :recv_timeout], parsed_timeout) end @@ -59,7 +60,26 @@ defmodule EthereumJSONRPC.Geth do Fetches the first trace from the trace URL. """ @impl EthereumJSONRPC.Variant - def fetch_first_trace(_transactions_params, _json_rpc_named_arguments), do: :ignore + def fetch_first_trace(transactions_params, json_rpc_named_arguments) when is_list(transactions_params) do + id_to_params = id_to_params(transactions_params) + + json_rpc_named_arguments_corrected_timeout = correct_timeouts(json_rpc_named_arguments) + + with {:ok, responses} <- + id_to_params + |> debug_trace_transaction_requests(true) + |> json_rpc(json_rpc_named_arguments_corrected_timeout), + {:ok, [first_trace]} <- + debug_trace_transaction_responses_to_internal_transactions_params( + responses, + id_to_params, + json_rpc_named_arguments_corrected_timeout + ) do + %{block_hash: block_hash} = transactions_params |> Enum.at(0) + + {:ok, [%{first_trace: first_trace, block_hash: block_hash, json_rpc_named_arguments: json_rpc_named_arguments}]} + end + end @doc """ Fetches the `t:Explorer.Chain.InternalTransaction.changeset/2` params from the Geth trace URL. @@ -142,9 +162,9 @@ defmodule EthereumJSONRPC.Geth do PendingTransaction.fetch_pending_transactions_geth(json_rpc_named_arguments) end - def debug_trace_transaction_requests(id_to_params) when is_map(id_to_params) do + def debug_trace_transaction_requests(id_to_params, only_first_trace \\ false) when is_map(id_to_params) do Enum.map(id_to_params, fn {id, %{hash_data: hash_data}} -> - debug_trace_transaction_request(%{id: id, hash_data: hash_data}) + debug_trace_transaction_request(%{id: id, hash_data: hash_data}, only_first_trace) end) end @@ -156,13 +176,13 @@ defmodule EthereumJSONRPC.Geth do @external_resource @tracer_path @tracer File.read!(@tracer_path) - defp debug_trace_transaction_request(%{id: id, hash_data: hash_data}) do + defp debug_trace_transaction_request(%{id: id, hash_data: hash_data}, only_first_trace) do debug_trace_timeout = Application.get_env(:ethereum_jsonrpc, __MODULE__)[:debug_trace_timeout] request(%{ id: id, method: "debug_traceTransaction", - params: [hash_data, %{timeout: debug_trace_timeout} |> Map.merge(tracer_params())] + params: [hash_data, %{timeout: debug_trace_timeout} |> Map.merge(tracer_params(only_first_trace))] }) end @@ -179,7 +199,7 @@ defmodule EthereumJSONRPC.Geth do }) end - defp tracer_params do + defp tracer_params(only_first_trace \\ false) do cond do tracer_type() == "js" -> %{"tracer" => @tracer} @@ -193,7 +213,11 @@ defmodule EthereumJSONRPC.Geth do } true -> - %{"tracer" => "callTracer"} + if only_first_trace do + %{"tracer" => "callTracer", "tracerConfig" => %{"onlyTopCall" => true}} + else + %{"tracer" => "callTracer"} + end end end diff --git a/apps/explorer/lib/explorer/chain.ex b/apps/explorer/lib/explorer/chain.ex index 590017fda803..c698ba938798 100644 --- a/apps/explorer/lib/explorer/chain.ex +++ b/apps/explorer/lib/explorer/chain.ex @@ -118,6 +118,7 @@ defmodule Explorer.Chain do @revert_msg_prefix_4 "Reverted " # Geth-like node @revert_msg_prefix_5 "execution reverted: " + @revert_msg_prefix_6_empty "execution reverted" @limit_showing_transactions 10_000 @default_page_size 50 @@ -3029,14 +3030,22 @@ defmodule Explorer.Chain do end end + @doc """ + Parses the revert reason from an error returned by JSON RPC node during eth_call. + Returns the formatted revert reason as a hex or utf8 string. + Returns `nil` if the revert reason cannot be parsed or error format is unknown. + """ + @spec parse_revert_reason_from_error(any()) :: String.t() | nil def parse_revert_reason_from_error(%{data: data}), do: format_revert_data(data) def parse_revert_reason_from_error(%{message: message}), do: format_revert_reason_message(message) + def parse_revert_reason_from_error(_), do: nil + defp format_revert_data(revert_data) do case revert_data do "revert" -> - "0x" + "" "0x" <> _ -> revert_data @@ -3063,6 +3072,9 @@ defmodule Explorer.Chain do @revert_msg_prefix_5 <> rest -> rest + @revert_msg_prefix_6_empty -> + "" + _ -> nil end diff --git a/apps/explorer/test/explorer/chain_test.exs b/apps/explorer/test/explorer/chain_test.exs index efe47a76030f..2b71b6ef9ec8 100644 --- a/apps/explorer/test/explorer/chain_test.exs +++ b/apps/explorer/test/explorer/chain_test.exs @@ -4242,39 +4242,66 @@ defmodule Explorer.ChainTest do expect( EthereumJSONRPC.Mox, :json_rpc, - fn _json, [] -> - {:ok, - [ - %{ - id: 0, - result: %{ - "output" => "0x", - "stateDiff" => nil, - "trace" => [ - %{ - "action" => %{ - "callType" => "call", - "from" => "0x6a17ca3bbf83764791f4a9f2b4dbbaebbc8b3e0d", - "gas" => "0x5208", - "input" => "0x01", - "to" => "0x7ed1e469fcb3ee19c0366d829e291451be638e59", - "value" => "0x86b3" - }, - "error" => "Reverted", - "result" => %{ - "gasUsed" => "0x5208", - "output" => hex_reason - }, - "subtraces" => 0, - "traceAddress" => [], - "type" => "call" - } - ], - "transactionHash" => "0xdf5574290913659a1ac404ccf2d216c40587f819400a52405b081dda728ac120", - "vmTrace" => nil + fn + [%{method: "debug_traceTransaction"}], _options -> + {:ok, + [ + %{ + id: 0, + result: %{ + "from" => "0x6a17ca3bbf83764791f4a9f2b4dbbaebbc8b3e0d", + "gas" => "0x5208", + "gasUsed" => "0x5208", + "input" => "0x01", + "output" => hex_reason, + "to" => "0x7ed1e469fcb3ee19c0366d829e291451be638e59", + "type" => "CALL", + "value" => "0x86b3" + } } - } - ]} + ]} + + [%{method: "trace_replayTransaction"}], _options -> + {:ok, + [ + %{ + id: 0, + result: %{ + "output" => "0x", + "stateDiff" => nil, + "trace" => [ + %{ + "action" => %{ + "callType" => "call", + "from" => "0x6a17ca3bbf83764791f4a9f2b4dbbaebbc8b3e0d", + "gas" => "0x5208", + "input" => "0x01", + "to" => "0x7ed1e469fcb3ee19c0366d829e291451be638e59", + "value" => "0x86b3" + }, + "error" => "Reverted", + "result" => %{ + "gasUsed" => "0x5208", + "output" => hex_reason + }, + "subtraces" => 0, + "traceAddress" => [], + "type" => "call" + } + ], + "transactionHash" => "0xdf5574290913659a1ac404ccf2d216c40587f819400a52405b081dda728ac120", + "vmTrace" => nil + } + } + ]} + + %{method: "eth_call"}, _options -> + {:error, + %{ + code: 3, + data: hex_reason, + message: "execution reverted" + }} end ) From 8493441c9dd609b31216d4dd63d32156ba030f19 Mon Sep 17 00:00:00 2001 From: Viktor Baranov Date: Wed, 22 May 2024 13:12:18 +0300 Subject: [PATCH 029/150] Update CHANGELOG --- CHANGELOG.md | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 4ef25dcd5c77..626675b15330 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,7 @@ ### 🚀 Features +- Implement fetch_first_trace for Geth ([#10087](https://github.com/blockscout/blockscout/issues/10087)) - Add optional retry of NFT metadata fetch in Indexer.Fetcher.Tok… ([#10036](https://github.com/blockscout/blockscout/issues/10036)) - Blueprint contracts support ([#10058](https://github.com/blockscout/blockscout/issues/10058)) - Clone with immutable arguments proxy pattern ([#10039](https://github.com/blockscout/blockscout/issues/10039)) @@ -49,8 +50,6 @@ - Add support of Blast-specific L1 OP withdrawal events ([#10049](https://github.com/blockscout/blockscout/issues/10049)) - Update outdated links to ETH JSON RPC Specification in docstrings ([#10041](https://github.com/blockscout/blockscout/issues/10041)) - Migrate to GET variant of {{metadata_url}}/api/v1/metadata ([#9994](https://github.com/blockscout/blockscout/issues/9994)) -- CI for Arbitrum on-demand Docker images - - Bump ex_cldr_numbers from 2.32.4 to 2.33.1 ([#9978](https://github.com/blockscout/blockscout/issues/9978)) - Bump ex_cldr from 2.38.0 to 2.38.1 ([#10009](https://github.com/blockscout/blockscout/issues/10009)) - Bump ex_cldr_units from 3.16.5 to 3.17.0 ([#9931](https://github.com/blockscout/blockscout/issues/9931)) From c6c9a0ca250d7452d62886dadbbd4194fa701b3c Mon Sep 17 00:00:00 2001 From: Viktor Baranov Date: Thu, 23 May 2024 00:23:27 +0300 Subject: [PATCH 030/150] Fix GA pre-release && release workflows --- .../workflows/pre-release-shibarium copy.yml | 87 ------------------- .github/workflows/pre-release.yml | 2 +- .github/workflows/release.yml | 2 +- 3 files changed, 2 insertions(+), 89 deletions(-) delete mode 100644 .github/workflows/pre-release-shibarium copy.yml diff --git a/.github/workflows/pre-release-shibarium copy.yml b/.github/workflows/pre-release-shibarium copy.yml deleted file mode 100644 index 36080b3090ce..000000000000 --- a/.github/workflows/pre-release-shibarium copy.yml +++ /dev/null @@ -1,87 +0,0 @@ -name: Pre-release for Shibarium - -on: - workflow_dispatch: - inputs: - number: - type: number - required: true - -env: - OTP_VERSION: ${{ vars.OTP_VERSION }} - ELIXIR_VERSION: ${{ vars.ELIXIR_VERSION }} - -jobs: - push_to_registry: - name: Push Docker image to Docker Hub - runs-on: ubuntu-latest - env: - RELEASE_VERSION: ${{ vars.RELEASE_VERSION }} - steps: - - uses: actions/checkout@v4 - - name: Setup repo - uses: ./.github/actions/setup-repo - with: - docker-username: ${{ secrets.DOCKER_USERNAME }} - docker-password: ${{ secrets.DOCKER_PASSWORD }} - - - name: Build and push Docker image for Shibarium (indexer + API) - uses: docker/build-push-action@v5 - with: - context: . - file: ./docker/Dockerfile - push: true - tags: blockscout/blockscout-shibarium:${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }} - platforms: | - linux/amd64 - build-args: | - DISABLE_WEBAPP=false - API_V1_READ_METHODS_DISABLED=false - API_V1_WRITE_METHODS_DISABLED=false - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }} - RELEASE_VERSION=${{ env.RELEASE_VERSION }} - CHAIN_TYPE=shibarium - - - name: Build and push Docker image for Shibarium (indexer) - uses: docker/build-push-action@v5 - with: - context: . - file: ./docker/Dockerfile - push: true - tags: blockscout/blockscout-shibarium:${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }}-indexer - platforms: | - linux/amd64 - build-args: | - DISABLE_API=true - DISABLE_WEBAPP=true - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }} - RELEASE_VERSION=${{ env.RELEASE_VERSION }} - CHAIN_TYPE=shibarium - - - name: Build and push Docker image for Shibarium (API) - uses: docker/build-push-action@v5 - with: - context: . - file: ./docker/Dockerfile - push: true - tags: blockscout/blockscout-shibarium:${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }}-api - platforms: | - linux/amd64 - build-args: | - DISABLE_INDEXER=true - DISABLE_WEBAPP=true - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }} - RELEASE_VERSION=${{ env.RELEASE_VERSION }} - CHAIN_TYPE=shibarium \ No newline at end of file diff --git a/.github/workflows/pre-release.yml b/.github/workflows/pre-release.yml index 52baad99de4d..1d971c883edc 100644 --- a/.github/workflows/pre-release.yml +++ b/.github/workflows/pre-release.yml @@ -88,7 +88,7 @@ jobs: push: true cache-from: type=registry,ref=blockscout/blockscout:buildcache cache-to: type=registry,ref=blockscout/blockscout:buildcache,mode=max - tags: blockscout/blockscout:${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }}-indexer + tags: blockscout/blockscout:${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }}-api platforms: | linux/amd64 linux/arm64/v8 diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 880ea35cd376..84cc52b18f45 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -85,7 +85,7 @@ jobs: push: true cache-from: type=registry,ref=blockscout/blockscout:buildcache cache-to: type=registry,ref=blockscout/blockscout:buildcache,mode=max - tags: blockscout/blockscout:${{ env.RELEASE_VERSION }}-indexer + tags: blockscout/blockscout:${{ env.RELEASE_VERSION }}-api platforms: | linux/amd64 linux/arm64/v8 From 3e551ba2424bda1e15b3e7c9ecc031504c96207a Mon Sep 17 00:00:00 2001 From: Victor Baranov Date: Thu, 23 May 2024 13:05:45 +0300 Subject: [PATCH 031/150] Improve response of address API to return multiple implementations for Diamond proxy (#10113) --- .../views/api/v2/address_view.ex | 33 +++++++------------ .../block_scout_web/views/api/v2/helper.ex | 10 ++++-- .../account/api/v2/user_controller_test.exs | 4 +-- .../api/v2/address_controller_test.exs | 5 ++- 4 files changed, 24 insertions(+), 28 deletions(-) diff --git a/apps/block_scout_web/lib/block_scout_web/views/api/v2/address_view.ex b/apps/block_scout_web/lib/block_scout_web/views/api/v2/address_view.ex index 32a1296499bd..abd555a1cd37 100644 --- a/apps/block_scout_web/lib/block_scout_web/views/api/v2/address_view.ex +++ b/apps/block_scout_web/lib/block_scout_web/views/api/v2/address_view.ex @@ -95,24 +95,24 @@ defmodule BlockScoutWeb.API.V2.AddressView do is_proxy = AddressView.smart_contract_is_proxy?(address_with_smart_contract, @api_true) - {implementation_addresses, implementation_names} = + implementations = with true <- is_proxy, {addresses, names} <- Implementation.get_implementation(address_with_smart_contract.smart_contract, @api_true), false <- addresses && Enum.empty?(addresses) do addresses |> Enum.zip(names) - |> Enum.reduce({[], []}, fn {address, name}, {addresses, names} = acc -> + |> Enum.reduce([], fn {address, name}, acc -> with {:ok, address_hash} <- Chain.string_to_address_hash(address), checksummed_address <- Address.checksum(address_hash) do - {[checksummed_address | addresses], [name | names]} + [%{"address" => checksummed_address, "name" => name} | acc] else _ -> acc end end) else _ -> - {[], []} + [] end balance = address.fetched_coin_balance && address.fetched_coin_balance.value @@ -123,8 +123,7 @@ defmodule BlockScoutWeb.API.V2.AddressView do token = address.token && TokenView.render("token.json", %{token: address.token}) # todo: added for backward compatibility, remove when frontend unbound from these props - {implementation_address, implementation_name} = - single_implementation(implementation_addresses, implementation_names) + {implementation_address, implementation_name} = single_implementation(implementations) Map.merge(base_info, %{ "creator_address_hash" => creator_hash && Address.checksum(creator_hash), @@ -133,10 +132,9 @@ defmodule BlockScoutWeb.API.V2.AddressView do "coin_balance" => balance, "exchange_rate" => exchange_rate, # todo: added for backward compatibility, remove when frontend unbound from these props - "implementation_name" => implementation_name, - "implementation_names" => implementation_names, "implementation_address" => implementation_address, - "implementation_addresses" => implementation_addresses, + "implementation_name" => implementation_name, + "implementations" => implementations, "block_number_balance_updated_at" => address.fetched_coin_balance_block_number, "has_decompiled_code" => AddressView.has_decompiled_code?(address), "has_validated_blocks" => Counters.check_if_validated_blocks_at_address(address.hash, @api_true), @@ -148,19 +146,12 @@ defmodule BlockScoutWeb.API.V2.AddressView do }) end - defp single_implementation(implementation_addresses, implementation_names) do - implementation_name = - if implementation_names && !Enum.empty?(implementation_names) do - implementation_names |> Enum.at(0) + defp single_implementation(implementations) do + %{"address" => implementation_address, "name" => implementation_name} = + if implementations && !Enum.empty?(implementations) do + implementations |> Enum.at(0) else - nil - end - - implementation_address = - if implementation_addresses && !Enum.empty?(implementation_addresses) do - implementation_addresses |> Enum.at(0) - else - nil + %{"address" => nil, "name" => nil} end {implementation_address, implementation_name} diff --git a/apps/block_scout_web/lib/block_scout_web/views/api/v2/helper.ex b/apps/block_scout_web/lib/block_scout_web/views/api/v2/helper.ex index 7cbf82adb83f..f7ae3593c234 100644 --- a/apps/block_scout_web/lib/block_scout_web/views/api/v2/helper.ex +++ b/apps/block_scout_web/lib/block_scout_web/views/api/v2/helper.ex @@ -56,6 +56,12 @@ defmodule BlockScoutWeb.API.V2.Helper do def address_with_info(%Address{} = address, _address_hash) do implementation_names = Implementation.names(address) + formatted_implementation_names = + implementation_names + |> Enum.map(fn name -> + %{"name" => name} + end) + implementation_name = if Enum.empty?(implementation_names) do nil @@ -69,7 +75,7 @@ defmodule BlockScoutWeb.API.V2.Helper do "name" => address_name(address), # todo: added for backward compatibility, remove when frontend unbound from these props "implementation_name" => implementation_name, - "implementation_names" => implementation_names, + "implementations" => formatted_implementation_names, "is_verified" => verified?(address), "ens_domain_name" => address.ens_domain_name, "metadata" => address.metadata @@ -98,7 +104,7 @@ defmodule BlockScoutWeb.API.V2.Helper do "name" => nil, # todo: added for backward compatibility, remove when frontend unbound from these props "implementation_name" => nil, - "implementation_names" => [], + "implementations" => [], "is_verified" => nil, "ens_domain_name" => nil, "metadata" => nil diff --git a/apps/block_scout_web/test/block_scout_web/controllers/account/api/v2/user_controller_test.exs b/apps/block_scout_web/test/block_scout_web/controllers/account/api/v2/user_controller_test.exs index 4ff5086ea5ad..4c80ab128dab 100644 --- a/apps/block_scout_web/test/block_scout_web/controllers/account/api/v2/user_controller_test.exs +++ b/apps/block_scout_web/test/block_scout_web/controllers/account/api/v2/user_controller_test.exs @@ -153,7 +153,7 @@ defmodule BlockScoutWeb.Account.Api.V2.UserControllerTest do "hash" => Address.checksum(addr), # todo: added for backward compatibility, remove when frontend unbound from these props "implementation_name" => nil, - "implementation_names" => [], + "implementations" => [], "is_contract" => false, "is_verified" => false, "name" => nil, @@ -209,7 +209,7 @@ defmodule BlockScoutWeb.Account.Api.V2.UserControllerTest do "hash" => Address.checksum(addr), # todo: added for backward compatibility, remove when frontend unbound from these props "implementation_name" => nil, - "implementation_names" => [], + "implementations" => [], "is_contract" => false, "is_verified" => false, "name" => nil, diff --git a/apps/block_scout_web/test/block_scout_web/controllers/api/v2/address_controller_test.exs b/apps/block_scout_web/test/block_scout_web/controllers/api/v2/address_controller_test.exs index 1d41ca8d6dae..6bdfc7d5e76e 100644 --- a/apps/block_scout_web/test/block_scout_web/controllers/api/v2/address_controller_test.exs +++ b/apps/block_scout_web/test/block_scout_web/controllers/api/v2/address_controller_test.exs @@ -74,9 +74,8 @@ defmodule BlockScoutWeb.API.V2.AddressControllerTest do "exchange_rate" => nil, # todo: added for backward compatibility, remove when frontend unbound from these props "implementation_name" => nil, - "implementation_names" => [], "implementation_address" => nil, - "implementation_addresses" => [], + "implementations" => [], "block_number_balance_updated_at" => nil, "has_decompiled_code" => false, "has_validated_blocks" => false, @@ -136,7 +135,7 @@ defmodule BlockScoutWeb.API.V2.AddressControllerTest do "creator_address_hash" => ^from, "creation_tx_hash" => ^tx_hash, "implementation_address" => ^implementation_address_hash_string, - "implementation_addresses" => [^implementation_address_hash_string] + "implementations" => [%{"address" => ^implementation_address_hash_string, "name" => nil}] } = json_response(request, 200) end From fff614da41ff4dcb65305be6df9d0cf747ee7ba1 Mon Sep 17 00:00:00 2001 From: Viktor Baranov Date: Thu, 23 May 2024 13:08:49 +0300 Subject: [PATCH 032/150] Update CHANGELOG --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 626675b15330..5467cd686b1f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -37,6 +37,7 @@ ### 🚜 Refactor +- Improve response of address API to return multiple implementations for Diamond proxy ([#10113](https://github.com/blockscout/blockscout/pull/10113)) - Refactor get_additional_sources/4 -> get_additional_sources/3 ([#10046](https://github.com/blockscout/blockscout/issues/10046)) - Test database config ([#9662](https://github.com/blockscout/blockscout/issues/9662)) From d78223929dc8b2a1e8420b6f911bff8c3868603e Mon Sep 17 00:00:00 2001 From: Viktor Baranov Date: Thu, 23 May 2024 17:09:30 +0300 Subject: [PATCH 033/150] Indexer/API separated images for Redstone --- .github/workflows/release-redstone.yml | 44 ++++++++++++++++++++++++++ 1 file changed, 44 insertions(+) diff --git a/.github/workflows/release-redstone.yml b/.github/workflows/release-redstone.yml index 9207dd195ac8..8f0e22bc059e 100644 --- a/.github/workflows/release-redstone.yml +++ b/.github/workflows/release-redstone.yml @@ -43,4 +43,48 @@ jobs: BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=optimism + MUD_INDEXER_ENABLED=true + + - name: Build and push Docker image for Redstone (indexer) + uses: docker/build-push-action@v5 + with: + context: . + file: ./docker/Dockerfile + push: true + tags: blockscout/blockscout-redstone:${{ env.RELEASE_VERSION }}-indexer + platforms: | + linux/amd64 + linux/arm64/v8 + build-args: | + DISABLE_API=true + DISABLE_WEBAPP=true + CACHE_EXCHANGE_RATES_PERIOD= + CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= + CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= + ADMIN_PANEL_ENABLED=false + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + RELEASE_VERSION=${{ env.RELEASE_VERSION }} + CHAIN_TYPE=optimism + MUD_INDEXER_ENABLED=true + + - name: Build and push Docker image for Redstone (API) + uses: docker/build-push-action@v5 + with: + context: . + file: ./docker/Dockerfile + push: true + tags: blockscout/blockscout-redstone:${{ env.RELEASE_VERSION }}-api + platforms: | + linux/amd64 + linux/arm64/v8 + build-args: | + DISABLE_INDEXER=true + DISABLE_WEBAPP=true + CACHE_EXCHANGE_RATES_PERIOD= + CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= + CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= + ADMIN_PANEL_ENABLED=false + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + RELEASE_VERSION=${{ env.RELEASE_VERSION }} + CHAIN_TYPE=optimism MUD_INDEXER_ENABLED=true \ No newline at end of file From bf3f32137d6fdf8aa7c4ff83cd87fc579396ef4c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 27 May 2024 11:15:53 +0300 Subject: [PATCH 034/150] chore: Bump ecto_sql from 3.11.1 to 3.11.2 updated-dependencies: - dependency-name: ecto_sql dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- mix.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/mix.lock b/mix.lock index 8973c1f953a9..d5db0e1a8b52 100644 --- a/mix.lock +++ b/mix.lock @@ -38,7 +38,7 @@ "digital_token": {:hex, :digital_token, "0.6.0", "13e6de581f0b1f6c686f7c7d12ab11a84a7b22fa79adeb4b50eec1a2d278d258", [:mix], [{:cldr_utils, "~> 2.17", [hex: :cldr_utils, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}], "hexpm", "2455d626e7c61a128b02a4a8caddb092548c3eb613ac6f6a85e4cbb6caddc4d1"}, "earmark_parser": {:hex, :earmark_parser, "1.4.39", "424642f8335b05bb9eb611aa1564c148a8ee35c9c8a8bba6e129d51a3e3c6769", [:mix], [], "hexpm", "06553a88d1f1846da9ef066b87b57c6f605552cfbe40d20bd8d59cc6bde41944"}, "ecto": {:hex, :ecto, "3.11.2", "e1d26be989db350a633667c5cda9c3d115ae779b66da567c68c80cfb26a8c9ee", [:mix], [{:decimal, "~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "3c38bca2c6f8d8023f2145326cc8a80100c3ffe4dcbd9842ff867f7fc6156c65"}, - "ecto_sql": {:hex, :ecto_sql, "3.11.1", "e9abf28ae27ef3916b43545f9578b4750956ccea444853606472089e7d169470", [:mix], [{:db_connection, "~> 2.5 or ~> 2.4.1", [hex: :db_connection, repo: "hexpm", optional: false]}, {:ecto, "~> 3.11.0", [hex: :ecto, repo: "hexpm", optional: false]}, {:myxql, "~> 0.6.0", [hex: :myxql, repo: "hexpm", optional: true]}, {:postgrex, "~> 0.16.0 or ~> 0.17.0 or ~> 1.0", [hex: :postgrex, repo: "hexpm", optional: true]}, {:tds, "~> 2.1.1 or ~> 2.2", [hex: :tds, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4.0 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "ce14063ab3514424276e7e360108ad6c2308f6d88164a076aac8a387e1fea634"}, + "ecto_sql": {:hex, :ecto_sql, "3.11.2", "c7cc7f812af571e50b80294dc2e535821b3b795ce8008d07aa5f336591a185a8", [:mix], [{:db_connection, "~> 2.4.1 or ~> 2.5", [hex: :db_connection, repo: "hexpm", optional: false]}, {:ecto, "~> 3.11.0", [hex: :ecto, repo: "hexpm", optional: false]}, {:myxql, "~> 0.6.0", [hex: :myxql, repo: "hexpm", optional: true]}, {:postgrex, "~> 0.16 or ~> 1.0", [hex: :postgrex, repo: "hexpm", optional: true]}, {:tds, "~> 2.1.1 or ~> 2.2", [hex: :tds, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4.0 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "73c07f995ac17dbf89d3cfaaf688fcefabcd18b7b004ac63b0dc4ef39499ed6b"}, "elixir_make": {:hex, :elixir_make, "0.7.7", "7128c60c2476019ed978210c245badf08b03dbec4f24d05790ef791da11aa17c", [:mix], [{:castore, "~> 0.1 or ~> 1.0", [hex: :castore, repo: "hexpm", optional: true]}], "hexpm", "5bc19fff950fad52bbe5f211b12db9ec82c6b34a9647da0c2224b8b8464c7e6c"}, "erlex": {:hex, :erlex, "0.2.6", "c7987d15e899c7a2f34f5420d2a2ea0d659682c06ac607572df55a43753aa12e", [:mix], [], "hexpm", "2ed2e25711feb44d52b17d2780eabf998452f6efda104877a3881c2f8c0c0c75"}, "ex_abi": {:hex, :ex_abi, "0.7.2", "9950d8aa764c74b8c89cc279c72ac786675aca315c08bc06b4a387407fe67873", [:mix], [{:ex_keccak, "~> 0.7.5", [hex: :ex_keccak, repo: "hexpm", optional: false]}, {:jason, "~> 1.4", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "1ca991d6eb497959aab4b3aaeb7d0f71b67d4b617d5689113da82d6e4cedc408"}, @@ -109,7 +109,7 @@ "plug_crypto": {:hex, :plug_crypto, "1.2.5", "918772575e48e81e455818229bf719d4ab4181fcbf7f85b68a35620f78d89ced", [:mix], [], "hexpm", "26549a1d6345e2172eb1c233866756ae44a9609bd33ee6f99147ab3fd87fd842"}, "poison": {:hex, :poison, "4.0.1", "bcb755a16fac91cad79bfe9fc3585bb07b9331e50cfe3420a24bcc2d735709ae", [:mix], [], "hexpm", "ba8836feea4b394bb718a161fc59a288fe0109b5006d6bdf97b6badfcf6f0f25"}, "poolboy": {:hex, :poolboy, "1.5.2", "392b007a1693a64540cead79830443abf5762f5d30cf50bc95cb2c1aaafa006b", [:rebar3], [], "hexpm", "dad79704ce5440f3d5a3681c8590b9dc25d1a561e8f5a9c995281012860901e3"}, - "postgrex": {:hex, :postgrex, "0.17.5", "0483d054938a8dc069b21bdd636bf56c487404c241ce6c319c1f43588246b281", [:mix], [{:db_connection, "~> 2.1", [hex: :db_connection, repo: "hexpm", optional: false]}, {:decimal, "~> 1.5 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:table, "~> 0.1.0", [hex: :table, repo: "hexpm", optional: true]}], "hexpm", "50b8b11afbb2c4095a3ba675b4f055c416d0f3d7de6633a595fc131a828a67eb"}, + "postgrex": {:hex, :postgrex, "0.18.0", "f34664101eaca11ff24481ed4c378492fed2ff416cd9b06c399e90f321867d7e", [:mix], [{:db_connection, "~> 2.1", [hex: :db_connection, repo: "hexpm", optional: false]}, {:decimal, "~> 1.5 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:table, "~> 0.1.0", [hex: :table, repo: "hexpm", optional: true]}], "hexpm", "a042989ba1bc1cca7383ebb9e461398e3f89f868c92ce6671feb7ef132a252d1"}, "prometheus": {:hex, :prometheus, "4.11.0", "b95f8de8530f541bd95951e18e355a840003672e5eda4788c5fa6183406ba29a", [:mix, :rebar3], [{:quantile_estimator, "~> 0.2.1", [hex: :quantile_estimator, repo: "hexpm", optional: false]}], "hexpm", "719862351aabf4df7079b05dc085d2bbcbe3ac0ac3009e956671b1d5ab88247d"}, "prometheus_ecto": {:hex, :prometheus_ecto, "1.4.3", "3dd4da1812b8e0dbee81ea58bb3b62ed7588f2eae0c9e97e434c46807ff82311", [:mix], [{:ecto, "~> 2.0 or ~> 3.0", [hex: :ecto, repo: "hexpm", optional: false]}, {:prometheus_ex, "~> 1.1 or ~> 2.0 or ~> 3.0", [hex: :prometheus_ex, repo: "hexpm", optional: false]}], "hexpm", "8d66289f77f913b37eda81fd287340c17e61a447549deb28efc254532b2bed82"}, "prometheus_ex": {:git, "https://github.com/lanodan/prometheus.ex", "31f7fbe4b71b79ba27efc2a5085746c4011ceb8f", [branch: "fix/elixir-1.14"]}, From 35c885def5896bb56eacdda274ac859493d09861 Mon Sep 17 00:00:00 2001 From: Alexander Kolotov Date: Mon, 27 May 2024 11:17:24 +0300 Subject: [PATCH 035/150] feat: indexer for cross level messages on Arbitrum (#9312) * Initial version of x-level messages indexer * fixes for cspell and credo * new state of x-level messages * Monitoring of new L1-to-L2 messages on L1 * new batches discovery * fetcher workers in separate modules * proper name * Fix for responses without "id", e.g. "Too Many Requests" * update DB with new batches and corresponding data * update DB with confirmed blocks * fixes for cspell and credo * tracking commitments confirmations for L1 to L2 messages * Proper usign of max function * tracking completion of L2 to L1 messages * catchup historical messages to L2 * incorrect version of committed file * catchup historical messages from L2 and completion of L1-to-L2 messages * historical batches catchup * status for historical l2-to-l1 messages * address matching issue * catchup historical executions of L2-to-L1 messages * db query to find unconfirmed blocks gaps * first changes to catchup historical confirmations * finalized catchup of historical confirmations * 4844 blobs support * fix for the issue with multiple confirmations * limit amount of batches to handle at once * Use latest L1 block by fetchers if start block is not configured * merge issue fix * missed file * historical messages discovery * reduce logs severity * first iteration to improve documentation for new functionality * second iteration to improve documentation for new functionality * third iteration to improve documentation for new functionality * fourth iteration to improve documentation for new functionality * fifth iteration to improve documentation for new functionality * final iteration to improve documentation for new functionality * merge issues addressed * code review issues addressed * code review issues addressed * fix merge issue * raising exception in the case of DB inconsistency * fix formatting issue * termination case for RollupMessagesCatchup * code review comments addressed * code review comments addressed * consistency in primary keys * dialyzer fix * code review comments addressed * missed doc comment * code review comments addressed * updated indices creation as per code review comments * fix merge issue * configuration of intervals as time variables * TODO added to reflect improvement ability * database fields refactoring * association renaming * feat: APIv2 endpoints for Arbitrum messages and batches (#9963) * Arbitrum related info in Transaction and Block views * Views to get info about batches and messages * usage of committed for batches instead of confirmed * merge issues addressed * changes after merge * formatting issue fix * code review comment addressed * associations and fields in api response renamed * format issue addressed * feat: Arbitrum-specific fields in the block and transaction API endpoints (#10067) * Arbitrum related info in Transaction and Block views * Views to get info about batches and messages * usage of committed for batches instead of confirmed * merge issues addressed * changes after merge * formatting issue fix * block and transaction views extended * code review comment addressed * associations and fields in api response renamed * format issue addressed * fix credo issue * fix tests issues * ethereumjsonrpc test fail investigation * test issues fixes --- .../lib/block_scout_web/api_router.ex | 24 + .../lib/block_scout_web/chain.ex | 12 +- .../controllers/api/v2/arbitrum_controller.ex | 163 +++ .../controllers/api/v2/block_controller.ex | 50 +- .../api/v2/transaction_controller.ex | 47 +- .../views/api/v2/arbitrum_view.ex | 425 +++++++ .../views/api/v2/block_view.ex | 10 + .../block_scout_web/views/api/v2/helper.ex | 40 + .../views/api/v2/transaction_view.ex | 14 + .../views/api/v2/zksync_view.ex | 54 +- apps/block_scout_web/mix.exs | 3 +- .../lib/ethereum_jsonrpc/block.ex | 63 +- .../lib/ethereum_jsonrpc/blocks.ex | 5 + .../lib/ethereum_jsonrpc/http.ex | 48 +- .../lib/ethereum_jsonrpc/receipt.ex | 91 +- .../lib/ethereum_jsonrpc/receipts.ex | 3 + .../lib/ethereum_jsonrpc/transaction.ex | 14 +- .../test/ethereum_jsonrpc/block_test.exs | 60 +- apps/explorer/config/dev.exs | 3 + apps/explorer/config/prod.exs | 4 + apps/explorer/config/test.exs | 1 + apps/explorer/lib/explorer/application.ex | 1 + apps/explorer/lib/explorer/chain.ex | 63 +- .../explorer/chain/arbitrum/batch_block.ex | 53 + .../chain/arbitrum/batch_transaction.ex | 52 + .../lib/explorer/chain/arbitrum/l1_batch.ex | 62 + .../explorer/chain/arbitrum/l1_execution.ex | 46 + .../chain/arbitrum/lifecycle_transaction.ex | 54 + .../lib/explorer/chain/arbitrum/message.ex | 57 + .../lib/explorer/chain/arbitrum/reader.ex | 913 +++++++++++++++ apps/explorer/lib/explorer/chain/block.ex | 59 +- .../lib/explorer/chain/cache/helper.ex | 16 + .../import/runner/arbitrum/batch_blocks.ex | 104 ++ .../runner/arbitrum/batch_transactions.ex | 79 ++ .../import/runner/arbitrum/l1_batches.ex | 112 ++ .../import/runner/arbitrum/l1_executions.ex | 102 ++ .../runner/arbitrum/lifecycle_transactions.ex | 107 ++ .../chain/import/runner/arbitrum/messages.ex | 117 ++ .../chain/import/runner/transactions.ex | 88 +- .../chain/import/stage/block_referencing.ex | 16 +- .../lib/explorer/chain/transaction.ex | 81 +- .../lib/explorer/chain_spec/genesis_data.ex | 1 + apps/explorer/lib/explorer/repo.ex | 10 + .../explorer/utility/missing_block_range.ex | 34 +- .../20240201125730_create_arbitrum_tables.exs | 124 ++ ...58_extend_transaction_and_block_tables.exs | 15 + apps/explorer/test/support/factory.ex | 28 + apps/indexer/lib/indexer/block/fetcher.ex | 12 +- .../lib/indexer/fetcher/arbitrum/messaging.ex | 295 +++++ .../arbitrum/rollup_messages_catchup.ex | 365 ++++++ .../arbitrum/tracking_batches_statuses.ex | 459 ++++++++ .../arbitrum/tracking_messages_on_l1.ex | 223 ++++ .../lib/indexer/fetcher/arbitrum/utils/db.ex | 787 +++++++++++++ .../indexer/fetcher/arbitrum/utils/helper.ex | 86 ++ .../indexer/fetcher/arbitrum/utils/logging.ex | 162 +++ .../lib/indexer/fetcher/arbitrum/utils/rpc.ex | 391 +++++++ .../workers/historical_messages_on_l2.ex | 284 +++++ .../arbitrum/workers/l1_finalization.ex | 74 ++ .../fetcher/arbitrum/workers/new_batches.ex | 975 ++++++++++++++++ .../arbitrum/workers/new_confirmations.ex | 1034 +++++++++++++++++ .../arbitrum/workers/new_l1_executions.ex | 413 +++++++ .../arbitrum/workers/new_messages_to_l2.ex | 346 ++++++ apps/indexer/lib/indexer/fetcher/optimism.ex | 1 + .../lib/indexer/fetcher/polygon_edge.ex | 1 + .../indexer/fetcher/polygon_zkevm/bridge.ex | 52 +- .../lib/indexer/fetcher/zksync/utils/db.ex | 2 + .../lib/indexer/fetcher/zksync/utils/rpc.ex | 10 +- apps/indexer/lib/indexer/helper.ex | 259 ++++- apps/indexer/lib/indexer/supervisor.ex | 12 + .../indexer/transform/arbitrum/messaging.ex | 44 + .../indexer/transform/transaction_actions.ex | 74 +- config/config_helper.exs | 1 + config/runtime.exs | 43 +- config/runtime/dev.exs | 9 + config/runtime/prod.exs | 8 + cspell.json | 13 +- 76 files changed, 9684 insertions(+), 279 deletions(-) create mode 100644 apps/block_scout_web/lib/block_scout_web/controllers/api/v2/arbitrum_controller.ex create mode 100644 apps/block_scout_web/lib/block_scout_web/views/api/v2/arbitrum_view.ex create mode 100644 apps/explorer/lib/explorer/chain/arbitrum/batch_block.ex create mode 100644 apps/explorer/lib/explorer/chain/arbitrum/batch_transaction.ex create mode 100644 apps/explorer/lib/explorer/chain/arbitrum/l1_batch.ex create mode 100644 apps/explorer/lib/explorer/chain/arbitrum/l1_execution.ex create mode 100644 apps/explorer/lib/explorer/chain/arbitrum/lifecycle_transaction.ex create mode 100644 apps/explorer/lib/explorer/chain/arbitrum/message.ex create mode 100644 apps/explorer/lib/explorer/chain/arbitrum/reader.ex create mode 100644 apps/explorer/lib/explorer/chain/import/runner/arbitrum/batch_blocks.ex create mode 100644 apps/explorer/lib/explorer/chain/import/runner/arbitrum/batch_transactions.ex create mode 100644 apps/explorer/lib/explorer/chain/import/runner/arbitrum/l1_batches.ex create mode 100644 apps/explorer/lib/explorer/chain/import/runner/arbitrum/l1_executions.ex create mode 100644 apps/explorer/lib/explorer/chain/import/runner/arbitrum/lifecycle_transactions.ex create mode 100644 apps/explorer/lib/explorer/chain/import/runner/arbitrum/messages.ex create mode 100644 apps/explorer/priv/arbitrum/migrations/20240201125730_create_arbitrum_tables.exs create mode 100644 apps/explorer/priv/arbitrum/migrations/20240510184858_extend_transaction_and_block_tables.exs create mode 100644 apps/indexer/lib/indexer/fetcher/arbitrum/messaging.ex create mode 100644 apps/indexer/lib/indexer/fetcher/arbitrum/rollup_messages_catchup.ex create mode 100644 apps/indexer/lib/indexer/fetcher/arbitrum/tracking_batches_statuses.ex create mode 100644 apps/indexer/lib/indexer/fetcher/arbitrum/tracking_messages_on_l1.ex create mode 100644 apps/indexer/lib/indexer/fetcher/arbitrum/utils/db.ex create mode 100644 apps/indexer/lib/indexer/fetcher/arbitrum/utils/helper.ex create mode 100644 apps/indexer/lib/indexer/fetcher/arbitrum/utils/logging.ex create mode 100644 apps/indexer/lib/indexer/fetcher/arbitrum/utils/rpc.ex create mode 100644 apps/indexer/lib/indexer/fetcher/arbitrum/workers/historical_messages_on_l2.ex create mode 100644 apps/indexer/lib/indexer/fetcher/arbitrum/workers/l1_finalization.ex create mode 100644 apps/indexer/lib/indexer/fetcher/arbitrum/workers/new_batches.ex create mode 100644 apps/indexer/lib/indexer/fetcher/arbitrum/workers/new_confirmations.ex create mode 100644 apps/indexer/lib/indexer/fetcher/arbitrum/workers/new_l1_executions.ex create mode 100644 apps/indexer/lib/indexer/fetcher/arbitrum/workers/new_messages_to_l2.ex create mode 100644 apps/indexer/lib/indexer/transform/arbitrum/messaging.ex diff --git a/apps/block_scout_web/lib/block_scout_web/api_router.ex b/apps/block_scout_web/lib/block_scout_web/api_router.ex index e7ad4338e2f9..53eae5775bff 100644 --- a/apps/block_scout_web/lib/block_scout_web/api_router.ex +++ b/apps/block_scout_web/lib/block_scout_web/api_router.ex @@ -196,6 +196,10 @@ defmodule BlockScoutWeb.ApiRouter do get("/zksync-batch/:batch_number", V2.TransactionController, :zksync_batch) end + if Application.compile_env(:explorer, :chain_type) == :arbitrum do + get("/arbitrum-batch/:batch_number", V2.TransactionController, :arbitrum_batch) + end + if Application.compile_env(:explorer, :chain_type) == :suave do get("/execution-node/:execution_node_hash_param", V2.TransactionController, :execution_node) end @@ -219,6 +223,10 @@ defmodule BlockScoutWeb.ApiRouter do get("/:block_hash_or_number/transactions", V2.BlockController, :transactions) get("/:block_hash_or_number/internal-transactions", V2.BlockController, :internal_transactions) get("/:block_hash_or_number/withdrawals", V2.BlockController, :withdrawals) + + if Application.compile_env(:explorer, :chain_type) == :arbitrum do + get("/arbitrum-batch/:batch_number", V2.BlockController, :arbitrum_batch) + end end scope "/addresses" do @@ -277,6 +285,12 @@ defmodule BlockScoutWeb.ApiRouter do get("/zksync/batches/confirmed", V2.ZkSyncController, :batches_confirmed) get("/zksync/batches/latest-number", V2.ZkSyncController, :batch_latest_number) end + + if Application.compile_env(:explorer, :chain_type) == :arbitrum do + get("/arbitrum/messages/to-rollup", V2.ArbitrumController, :recent_messages_to_l2) + get("/arbitrum/batches/committed", V2.ArbitrumController, :batches_committed) + get("/arbitrum/batches/latest-number", V2.ArbitrumController, :batch_latest_number) + end end scope "/stats" do @@ -402,6 +416,16 @@ defmodule BlockScoutWeb.ApiRouter do get("/worlds/:world/tables/:table_id/records/:record_id", V2.MudController, :world_table_record) end end + + scope "/arbitrum" do + if Application.compile_env(:explorer, :chain_type) == :arbitrum do + get("/messages/:direction", V2.ArbitrumController, :messages) + get("/messages/:direction/count", V2.ArbitrumController, :messages_count) + get("/batches", V2.ArbitrumController, :batches) + get("/batches/count", V2.ArbitrumController, :batches_count) + get("/batches/:batch_number", V2.ArbitrumController, :batch) + end + end end scope "/v1/graphql" do diff --git a/apps/block_scout_web/lib/block_scout_web/chain.ex b/apps/block_scout_web/lib/block_scout_web/chain.ex index 8125942f7f98..f17fc6b5d357 100644 --- a/apps/block_scout_web/lib/block_scout_web/chain.ex +++ b/apps/block_scout_web/lib/block_scout_web/chain.ex @@ -433,7 +433,11 @@ defmodule BlockScoutWeb.Chain do end end - # clause for Polygon Edge Deposits and Withdrawals and for account's entities pagination + # clause for pagination of entities: + # - Account's entities + # - Polygon Edge Deposits + # - Polygon Edge Withdrawals + # - Arbitrum cross chain messages def paging_options(%{"id" => id_string}) when is_binary(id_string) do case Integer.parse(id_string) do {id, ""} -> @@ -444,7 +448,11 @@ defmodule BlockScoutWeb.Chain do end end - # clause for Polygon Edge Deposits and Withdrawals and for account's entities pagination + # clause for pagination of entities: + # - Account's entities + # - Polygon Edge Deposits + # - Polygon Edge Withdrawals + # - Arbitrum cross chain messages def paging_options(%{"id" => id}) when is_integer(id) do [paging_options: %{@default_paging_options | key: {id}}] end diff --git a/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/arbitrum_controller.ex b/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/arbitrum_controller.ex new file mode 100644 index 000000000000..3230371b2da8 --- /dev/null +++ b/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/arbitrum_controller.ex @@ -0,0 +1,163 @@ +defmodule BlockScoutWeb.API.V2.ArbitrumController do + use BlockScoutWeb, :controller + + import BlockScoutWeb.Chain, + only: [ + next_page_params: 4, + paging_options: 1, + split_list_by_page: 1 + ] + + alias Explorer.PagingOptions + alias Explorer.Chain.Arbitrum.{L1Batch, Message, Reader} + + action_fallback(BlockScoutWeb.API.V2.FallbackController) + + @batch_necessity_by_association %{:commitment_transaction => :optional} + + @doc """ + Function to handle GET requests to `/api/v2/arbitrum/messages/:direction` endpoint. + """ + @spec messages(Plug.Conn.t(), map()) :: Plug.Conn.t() + def messages(conn, %{"direction" => direction} = params) do + options = + params + |> paging_options() + |> Keyword.put(:api?, true) + + {messages, next_page} = + direction + |> Reader.messages(options) + |> split_list_by_page() + + next_page_params = + next_page_params( + next_page, + messages, + params, + fn %Message{message_id: msg_id} -> %{"id" => msg_id} end + ) + + conn + |> put_status(200) + |> render(:arbitrum_messages, %{ + messages: messages, + next_page_params: next_page_params + }) + end + + @doc """ + Function to handle GET requests to `/api/v2/arbitrum/messages/:direction/count` endpoint. + """ + @spec messages_count(Plug.Conn.t(), map()) :: Plug.Conn.t() + def messages_count(conn, %{"direction" => direction} = _params) do + conn + |> put_status(200) + |> render(:arbitrum_messages_count, %{count: Reader.messages_count(direction, api?: true)}) + end + + @doc """ + Function to handle GET requests to `/api/v2/arbitrum/batches/:batch_number` endpoint. + """ + @spec batch(Plug.Conn.t(), map()) :: Plug.Conn.t() + def batch(conn, %{"batch_number" => batch_number} = _params) do + case Reader.batch( + batch_number, + necessity_by_association: @batch_necessity_by_association, + api?: true + ) do + {:ok, batch} -> + conn + |> put_status(200) + |> render(:arbitrum_batch, %{batch: batch}) + + {:error, :not_found} = res -> + res + end + end + + @doc """ + Function to handle GET requests to `/api/v2/arbitrum/batches/count` endpoint. + """ + @spec batches_count(Plug.Conn.t(), map()) :: Plug.Conn.t() + def batches_count(conn, _params) do + conn + |> put_status(200) + |> render(:arbitrum_batches_count, %{count: Reader.batches_count(api?: true)}) + end + + @doc """ + Function to handle GET requests to `/api/v2/arbitrum/batches` endpoint. + """ + @spec batches(Plug.Conn.t(), map()) :: Plug.Conn.t() + def batches(conn, params) do + {batches, next_page} = + params + |> paging_options() + |> Keyword.put(:necessity_by_association, @batch_necessity_by_association) + |> Keyword.put(:api?, true) + |> Reader.batches() + |> split_list_by_page() + + next_page_params = + next_page_params( + next_page, + batches, + params, + fn %L1Batch{number: number} -> %{"number" => number} end + ) + + conn + |> put_status(200) + |> render(:arbitrum_batches, %{ + batches: batches, + next_page_params: next_page_params + }) + end + + @doc """ + Function to handle GET requests to `/api/v2/main-page/arbitrum/batches/committed` endpoint. + """ + @spec batches_committed(Plug.Conn.t(), map()) :: Plug.Conn.t() + def batches_committed(conn, _params) do + batches = + [] + |> Keyword.put(:necessity_by_association, @batch_necessity_by_association) + |> Keyword.put(:api?, true) + |> Keyword.put(:committed?, true) + |> Reader.batches() + + conn + |> put_status(200) + |> render(:arbitrum_batches, %{batches: batches}) + end + + @doc """ + Function to handle GET requests to `/api/v2/main-page/arbitrum/batches/latest-number` endpoint. + """ + @spec batch_latest_number(Plug.Conn.t(), map()) :: Plug.Conn.t() + def batch_latest_number(conn, _params) do + conn + |> put_status(200) + |> render(:arbitrum_batch_latest_number, %{number: batch_latest_number()}) + end + + defp batch_latest_number do + case Reader.batch(:latest, api?: true) do + {:ok, batch} -> batch.number + {:error, :not_found} -> 0 + end + end + + @doc """ + Function to handle GET requests to `/api/v2/main-page/arbitrum/messages/to-rollup` endpoint. + """ + @spec recent_messages_to_l2(Plug.Conn.t(), map()) :: Plug.Conn.t() + def recent_messages_to_l2(conn, _params) do + messages = Reader.relayed_l1_to_l2_messages(paging_options: %PagingOptions{page_size: 6}, api?: true) + + conn + |> put_status(200) + |> render(:arbitrum_messages, %{messages: messages}) + end +end diff --git a/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/block_controller.ex b/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/block_controller.ex index 4bc20eb21fa4..c33c6fcd34db 100644 --- a/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/block_controller.ex +++ b/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/block_controller.ex @@ -19,6 +19,7 @@ defmodule BlockScoutWeb.API.V2.BlockController do alias BlockScoutWeb.API.V2.{TransactionView, WithdrawalView} alias Explorer.Chain + alias Explorer.Chain.Arbitrum.Reader, as: ArbitrumReader alias Explorer.Chain.InternalTransaction case Application.compile_env(:explorer, :chain_type) do @@ -39,6 +40,14 @@ defmodule BlockScoutWeb.API.V2.BlockController do :zksync_execute_transaction => :optional } + :arbitrum -> + @chain_type_transaction_necessity_by_association %{} + @chain_type_block_necessity_by_association %{ + :arbitrum_batch => :optional, + :arbitrum_commitment_transaction => :optional, + :arbitrum_confirmation_transaction => :optional + } + _ -> @chain_type_transaction_necessity_by_association %{} @chain_type_block_necessity_by_association %{} @@ -85,20 +94,6 @@ defmodule BlockScoutWeb.API.V2.BlockController do api?: true ] - @block_params [ - necessity_by_association: - %{ - [miner: :names] => :optional, - :uncles => :optional, - :nephews => :optional, - :rewards => :optional, - :transactions => :optional, - :withdrawals => :optional - } - |> Map.merge(@chain_type_block_necessity_by_association), - api?: true - ] - action_fallback(BlockScoutWeb.API.V2.FallbackController) @doc """ @@ -155,6 +150,33 @@ defmodule BlockScoutWeb.API.V2.BlockController do }) end + @doc """ + Function to handle GET requests to `/api/v2/blocks/arbitrum-batch/:batch_number` endpoint. + It renders the list of L2 blocks bound to the specified batch. + """ + @spec arbitrum_batch(Plug.Conn.t(), any()) :: Plug.Conn.t() + def arbitrum_batch(conn, %{"batch_number" => batch_number} = params) do + full_options = + params + |> select_block_type() + |> Keyword.merge(paging_options(params)) + |> Keyword.merge(@api_true) + + {blocks, next_page} = + batch_number + |> ArbitrumReader.batch_blocks(full_options) + |> split_list_by_page() + + next_page_params = next_page |> next_page_params(blocks, delete_parameters_from_next_page_params(params)) + + conn + |> put_status(200) + |> render(:blocks, %{ + blocks: blocks |> maybe_preload_ens() |> maybe_preload_metadata(), + next_page_params: next_page_params + }) + end + @doc """ Function to handle GET requests to `/api/v2/blocks/:block_hash_or_number/transactions` endpoint. """ diff --git a/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/transaction_controller.ex b/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/transaction_controller.ex index 51989d4431ad..7421b3e2235e 100644 --- a/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/transaction_controller.ex +++ b/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/transaction_controller.ex @@ -32,6 +32,7 @@ defmodule BlockScoutWeb.API.V2.TransactionController do alias BlockScoutWeb.MicroserviceInterfaces.TransactionInterpretation, as: TransactionInterpretationService alias BlockScoutWeb.Models.TransactionStateHelper alias Explorer.Chain + alias Explorer.Chain.Arbitrum.Reader, as: ArbitrumReader alias Explorer.Chain.Beacon.Reader, as: BeaconReader alias Explorer.Chain.{Hash, InternalTransaction, Transaction} alias Explorer.Chain.PolygonZkevm.Reader, as: PolygonZkevmReader @@ -114,6 +115,14 @@ defmodule BlockScoutWeb.API.V2.TransactionController do |> Map.put(:zksync_prove_transaction, :optional) |> Map.put(:zksync_execute_transaction, :optional) + :arbitrum -> + necessity_by_association_with_actions + |> Map.put(:arbitrum_batch, :optional) + |> Map.put(:arbitrum_commitment_transaction, :optional) + |> Map.put(:arbitrum_confirmation_transaction, :optional) + |> Map.put(:arbitrum_message_to_l2, :optional) + |> Map.put(:arbitrum_message_from_l2, :optional) + :suave -> necessity_by_association_with_actions |> Map.put(:logs, :optional) @@ -194,7 +203,35 @@ defmodule BlockScoutWeb.API.V2.TransactionController do It renders the list of L2 transactions bound to the specified batch. """ @spec zksync_batch(Plug.Conn.t(), map()) :: Plug.Conn.t() - def zksync_batch(conn, %{"batch_number" => batch_number} = params) do + def zksync_batch(conn, params) do + handle_batch_transactions(conn, params, &ZkSyncReader.batch_transactions/2) + end + + @doc """ + Function to handle GET requests to `/api/v2/transactions/arbitrum-batch/:batch_number` endpoint. + It renders the list of L2 transactions bound to the specified batch. + """ + @spec arbitrum_batch(Plug.Conn.t(), map()) :: Plug.Conn.t() + def arbitrum_batch(conn, params) do + handle_batch_transactions(conn, params, &ArbitrumReader.batch_transactions/2) + end + + # Processes and renders transactions for a specified batch into an HTTP response. + # + # This function retrieves a list of transactions for a given batch using a specified function, + # then extracts the transaction hashes. These hashes are used to retrieve the corresponding + # `Explorer.Chain.Transaction` records according to the given pagination options. It formats + # these transactions into an HTTP response. + # + # ## Parameters + # - `conn`: The connection object. + # - `params`: Parameters from the request, including the batch number. + # - `batch_transactions_fun`: A function to fetch transaction descriptions for the given batch. + # + # ## Returns + # - Updated connection object with the transactions data rendered. + @spec handle_batch_transactions(Plug.Conn.t(), map(), function()) :: Plug.Conn.t() + defp handle_batch_transactions(conn, %{"batch_number" => batch_number} = params, batch_transactions_fun) do full_options = [ necessity_by_association: @transaction_necessity_by_association @@ -206,13 +243,13 @@ defmodule BlockScoutWeb.API.V2.TransactionController do # it will require to re-implement all pagination logic existing in Explorer.Chain.Transaction # In order to simplify the code, all transaction are requested from the batch and then # only subset of them is returned from `hashes_to_transactions`. - raw_transactions_list = + transactions_plus_one = batch_number - |> ZkSyncReader.batch_transactions(api?: true) - |> Enum.map(fn tx -> tx.hash end) + |> batch_transactions_fun.(@api_true) + |> Enum.map(fn tx -> tx.tx_hash end) |> Chain.hashes_to_transactions(full_options) - {transactions, next_page} = split_list_by_page(raw_transactions_list) + {transactions, next_page} = split_list_by_page(transactions_plus_one) next_page_params = next_page |> next_page_params(transactions, delete_parameters_from_next_page_params(params)) conn diff --git a/apps/block_scout_web/lib/block_scout_web/views/api/v2/arbitrum_view.ex b/apps/block_scout_web/lib/block_scout_web/views/api/v2/arbitrum_view.ex new file mode 100644 index 000000000000..ffc9c745aa84 --- /dev/null +++ b/apps/block_scout_web/lib/block_scout_web/views/api/v2/arbitrum_view.ex @@ -0,0 +1,425 @@ +defmodule BlockScoutWeb.API.V2.ArbitrumView do + use BlockScoutWeb, :view + + alias BlockScoutWeb.API.V2.Helper, as: APIV2Helper + alias Explorer.Chain.{Block, Hash, Transaction, Wei} + alias Explorer.Chain.Arbitrum.{L1Batch, LifecycleTransaction} + + @doc """ + Function to render GET requests to `/api/v2/arbitrum/messages/:direction` endpoint. + """ + @spec render(binary(), map()) :: map() | non_neg_integer() + def render("arbitrum_messages.json", %{ + messages: messages, + next_page_params: next_page_params + }) do + messages_out = + messages + |> Enum.map(fn msg -> + %{ + "id" => msg.message_id, + "origination_address" => msg.originator_address, + "origination_transaction_hash" => msg.originating_transaction_hash, + "origination_timestamp" => msg.origination_timestamp, + "origination_transaction_block_number" => msg.originating_transaction_block_number, + "completion_transaction_hash" => msg.completion_transaction_hash, + "status" => msg.status + } + end) + + %{ + items: messages_out, + next_page_params: next_page_params + } + end + + @doc """ + Function to render GET requests to `/api/v2/main-page/arbitrum/messages/to-rollup` endpoint. + """ + def render("arbitrum_messages.json", %{messages: messages}) do + messages_out = + messages + |> Enum.map(fn msg -> + %{ + "origination_transaction_hash" => msg.originating_transaction_hash, + "origination_timestamp" => msg.origination_timestamp, + "origination_transaction_block_number" => msg.originating_transaction_block_number, + "completion_transaction_hash" => msg.completion_transaction_hash + } + end) + + %{items: messages_out} + end + + @doc """ + Function to render GET requests to `/api/v2/arbitrum/messages/:direction/count` endpoint. + """ + def render("arbitrum_messages_count.json", %{count: count}) do + count + end + + @doc """ + Function to render GET requests to `/api/v2/arbitrum/batches/:batch_number` endpoint. + """ + def render("arbitrum_batch.json", %{batch: batch}) do + %{ + "number" => batch.number, + "transactions_count" => batch.transactions_count, + "start_block" => batch.start_block, + "end_block" => batch.end_block, + "before_acc" => batch.before_acc, + "after_acc" => batch.after_acc + } + |> add_l1_tx_info(batch) + end + + @doc """ + Function to render GET requests to `/api/v2/arbitrum/batches` endpoint. + """ + def render("arbitrum_batches.json", %{ + batches: batches, + next_page_params: next_page_params + }) do + %{ + items: render_arbitrum_batches(batches), + next_page_params: next_page_params + } + end + + @doc """ + Function to render GET requests to `/api/v2/main-page/arbitrum/batches/committed` endpoint. + """ + def render("arbitrum_batches.json", %{batches: batches}) do + %{items: render_arbitrum_batches(batches)} + end + + @doc """ + Function to render GET requests to `/api/v2/arbitrum/batches/count` endpoint. + """ + def render("arbitrum_batches_count.json", %{count: count}) do + count + end + + @doc """ + Function to render GET requests to `/api/v2/main-page/arbitrum/batches/latest-number` endpoint. + """ + def render("arbitrum_batch_latest_number.json", %{number: number}) do + number + end + + # Transforms a list of L1 batches into a map format for HTTP response. + # + # This function processes a list of Arbitrum L1 batches and converts each batch into + # a map that includes basic batch information and details of the associated + # transaction that committed the batch to L1. + # + # ## Parameters + # - `batches`: A list of `Explorer.Chain.Arbitrum.L1Batch` entries. + # + # ## Returns + # - A list of maps with detailed information about each batch, formatted for use + # in JSON HTTP responses. + @spec render_arbitrum_batches([L1Batch]) :: [map()] + defp render_arbitrum_batches(batches) do + Enum.map(batches, fn batch -> + %{ + "number" => batch.number, + "transactions_count" => batch.transactions_count, + "block_count" => batch.end_block - batch.start_block + 1 + } + |> add_l1_tx_info(batch) + end) + end + + @doc """ + Extends the json output with a sub-map containing information related Arbitrum. + + ## Parameters + - `out_json`: a map defining output json which will be extended + - `transaction`: transaction structure containing Arbitrum related data + + ## Returns + A map extended with data related Arbitrum rollup + """ + @spec extend_transaction_json_response(map(), %{ + :__struct__ => Transaction, + :arbitrum_batch => any(), + :arbitrum_commitment_transaction => any(), + :arbitrum_confirmation_transaction => any(), + :arbitrum_message_to_l2 => any(), + :arbitrum_message_from_l2 => any(), + :gas_used_for_l1 => Decimal.t(), + :gas_used => Decimal.t(), + :gas_price => Wei.t(), + optional(any()) => any() + }) :: map() + def extend_transaction_json_response(out_json, %Transaction{} = transaction) do + arbitrum_info = + %{} + |> extend_with_settlement_info(transaction) + |> extend_if_message(transaction) + |> extend_with_transaction_info(transaction) + + Map.put(out_json, "arbitrum", arbitrum_info) + end + + @doc """ + Extends the json output with a sub-map containing information related Arbitrum. + + ## Parameters + - `out_json`: a map defining output json which will be extended + - `block`: block structure containing Arbitrum related data + + ## Returns + A map extended with data related Arbitrum rollup + """ + @spec extend_block_json_response(map(), %{ + :__struct__ => Block, + :arbitrum_batch => any(), + :arbitrum_commitment_transaction => any(), + :arbitrum_confirmation_transaction => any(), + :nonce => Hash.Nonce.t(), + :send_count => non_neg_integer(), + :send_root => Hash.Full.t(), + :l1_block_number => non_neg_integer(), + optional(any()) => any() + }) :: map() + def extend_block_json_response(out_json, %Block{} = block) do + arbitrum_info = + %{} + |> extend_with_settlement_info(block) + |> extend_with_block_info(block) + + Map.put(out_json, "arbitrum", arbitrum_info) + end + + # Augments an output JSON with settlement-related information such as batch number and L1 transaction details to JSON. + @spec extend_with_settlement_info(map(), %{ + :__struct__ => Block | Transaction, + :arbitrum_batch => any(), + :arbitrum_commitment_transaction => any(), + :arbitrum_confirmation_transaction => any(), + optional(any()) => any() + }) :: map() + defp extend_with_settlement_info(out_json, arbitrum_entity) do + out_json + |> add_l1_txs_info_and_status(%{ + batch_number: get_batch_number(arbitrum_entity), + commitment_transaction: arbitrum_entity.arbitrum_commitment_transaction, + confirmation_transaction: arbitrum_entity.arbitrum_confirmation_transaction + }) + |> Map.put("batch_number", get_batch_number(arbitrum_entity)) + end + + # Retrieves the batch number from an Arbitrum block or transaction if the batch + # data is loaded. + @spec get_batch_number(%{ + :__struct__ => Block | Transaction, + :arbitrum_batch => any(), + optional(any()) => any() + }) :: nil | non_neg_integer() + defp get_batch_number(arbitrum_entity) do + case Map.get(arbitrum_entity, :arbitrum_batch) do + nil -> nil + %Ecto.Association.NotLoaded{} -> nil + value -> value.number + end + end + + # Augments an output JSON with commit transaction details and its status. + @spec add_l1_tx_info(map(), %{ + :__struct__ => L1Batch, + :commitment_transaction => any(), + optional(any()) => any() + }) :: map() + defp add_l1_tx_info(out_json, %L1Batch{} = batch) do + l1_tx = %{commitment_transaction: handle_associated_l1_txs_properly(batch.commitment_transaction)} + + out_json + |> Map.merge(%{ + "commitment_transaction" => %{ + "hash" => APIV2Helper.get_2map_data(l1_tx, :commitment_transaction, :hash), + "block_number" => APIV2Helper.get_2map_data(l1_tx, :commitment_transaction, :block), + "timestamp" => APIV2Helper.get_2map_data(l1_tx, :commitment_transaction, :ts), + "status" => APIV2Helper.get_2map_data(l1_tx, :commitment_transaction, :status) + } + }) + end + + # Augments an output JSON with commit and confirm transaction details and their statuses. + @spec add_l1_txs_info_and_status(map(), %{ + :commitment_transaction => any(), + :confirmation_transaction => any(), + optional(:batch_number) => any() + }) :: map() + defp add_l1_txs_info_and_status(out_json, arbitrum_item) + when is_map(arbitrum_item) and + is_map_key(arbitrum_item, :commitment_transaction) and + is_map_key(arbitrum_item, :confirmation_transaction) do + l1_txs = get_associated_l1_txs(arbitrum_item) + + out_json + |> Map.merge(%{ + "status" => block_or_transaction_status(arbitrum_item), + "commitment_transaction" => %{ + "hash" => APIV2Helper.get_2map_data(l1_txs, :commitment_transaction, :hash), + "timestamp" => APIV2Helper.get_2map_data(l1_txs, :commitment_transaction, :ts), + "status" => APIV2Helper.get_2map_data(l1_txs, :commitment_transaction, :status) + }, + "confirmation_transaction" => %{ + "hash" => APIV2Helper.get_2map_data(l1_txs, :confirmation_transaction, :hash), + "timestamp" => APIV2Helper.get_2map_data(l1_txs, :confirmation_transaction, :ts), + "status" => APIV2Helper.get_2map_data(l1_txs, :confirmation_transaction, :status) + } + }) + end + + # Extract transaction hash and block number, timestamp, finalization status for + # L1 transactions associated with an Arbitrum rollup entity: transaction or block. + # + # ## Parameters + # - `arbitrum_item`: a short description of a transaction, or block. + # + # ## Returns + # A map containing nesting maps describing corresponding L1 transactions + @spec get_associated_l1_txs(%{ + :commitment_transaction => any(), + :confirmation_transaction => any(), + optional(any()) => any() + }) :: %{ + :commitment_transaction => + nil + | %{ + :hash => nil | binary(), + :block_number => nil | non_neg_integer(), + :ts => nil | DateTime.t(), + :status => nil | :finalized | :unfinalized + }, + :confirmation_transaction => + nil + | %{ + :hash => nil | binary(), + :block_number => nil | non_neg_integer(), + :ts => nil | DateTime.t(), + :status => nil | :finalized | :unfinalized + } + } + defp get_associated_l1_txs(arbitrum_item) do + [:commitment_transaction, :confirmation_transaction] + |> Enum.reduce(%{}, fn key, l1_txs -> + Map.put(l1_txs, key, handle_associated_l1_txs_properly(Map.get(arbitrum_item, key))) + end) + end + + # Returns details of an associated L1 transaction or nil if not loaded or not available. + @spec handle_associated_l1_txs_properly(LifecycleTransaction | Ecto.Association.NotLoaded.t() | nil) :: + nil + | %{ + :hash => nil | binary(), + :block => nil | non_neg_integer(), + :ts => nil | DateTime.t(), + :status => nil | :finalized | :unfinalized + } + defp handle_associated_l1_txs_properly(associated_l1_tx) do + case associated_l1_tx do + nil -> nil + %Ecto.Association.NotLoaded{} -> nil + value -> %{hash: value.hash, block: value.block_number, ts: value.timestamp, status: value.status} + end + end + + # Inspects L1 transactions of a rollup block or transaction to determine its status. + # + # ## Parameters + # - `arbitrum_item`: An Arbitrum transaction or block. + # + # ## Returns + # A string with one of predefined statuses + @spec block_or_transaction_status(%{ + :commitment_transaction => any(), + :confirmation_transaction => any(), + optional(:batch_number) => any() + }) :: String.t() + defp block_or_transaction_status(arbitrum_item) do + cond do + APIV2Helper.specified?(arbitrum_item.confirmation_transaction) -> "Confirmed on base" + APIV2Helper.specified?(arbitrum_item.commitment_transaction) -> "Sent to base" + not is_nil(arbitrum_item.batch_number) -> "Sealed on rollup" + true -> "Processed on rollup" + end + end + + # Determines if an Arbitrum transaction contains a cross-chain message and extends + # the incoming map with the `contains_message` field to reflect the direction of + # the message. + # + # ## Parameters + # - `arbitrum_tx`: An Arbitrum transaction. + # + # ## Returns + # - A map extended with a field indicating the direction of the message. + @spec extend_if_message(map(), %{ + :__struct__ => Transaction, + :arbitrum_message_to_l2 => any(), + :arbitrum_message_from_l2 => any(), + optional(any()) => any() + }) :: map() + defp extend_if_message(arbitrum_json, %Transaction{} = arbitrum_tx) do + message_type = + case {APIV2Helper.specified?(arbitrum_tx.arbitrum_message_to_l2), + APIV2Helper.specified?(arbitrum_tx.arbitrum_message_from_l2)} do + {true, false} -> "incoming" + {false, true} -> "outcoming" + _ -> nil + end + + Map.put(arbitrum_json, "contains_message", message_type) + end + + # Extends the output JSON with information from Arbitrum-specific fields of the transaction. + @spec extend_with_transaction_info(map(), %{ + :__struct__ => Transaction, + :gas_used_for_l1 => Decimal.t(), + :gas_used => Decimal.t(), + :gas_price => Wei.t(), + optional(any()) => any() + }) :: map() + defp extend_with_transaction_info(out_json, %Transaction{} = arbitrum_tx) do + gas_used_for_l2 = + arbitrum_tx.gas_used + |> Decimal.sub(arbitrum_tx.gas_used_for_l1) + + poster_fee = + arbitrum_tx.gas_price + |> Wei.to(:wei) + |> Decimal.mult(arbitrum_tx.gas_used_for_l1) + + network_fee = + arbitrum_tx.gas_price + |> Wei.to(:wei) + |> Decimal.mult(gas_used_for_l2) + + out_json + |> Map.put("gas_used_for_l1", arbitrum_tx.gas_used_for_l1) + |> Map.put("gas_used_for_l2", gas_used_for_l2) + |> Map.put("poster_fee", poster_fee) + |> Map.put("network_fee", network_fee) + end + + # Extends the output JSON with information from the Arbitrum-specific fields of the block. + @spec extend_with_block_info(map(), %{ + :__struct__ => Block, + :nonce => Hash.Nonce.t(), + :send_count => non_neg_integer(), + :send_root => Hash.Full.t(), + :l1_block_number => non_neg_integer(), + optional(any()) => any() + }) :: map() + defp extend_with_block_info(out_json, %Block{} = arbitrum_block) do + out_json + |> Map.put("delayed_messages", Hash.to_integer(arbitrum_block.nonce)) + |> Map.put("l1_block_height", arbitrum_block.l1_block_number) + |> Map.put("send_count", arbitrum_block.send_count) + |> Map.put("send_root", arbitrum_block.send_root) + end +end diff --git a/apps/block_scout_web/lib/block_scout_web/views/api/v2/block_view.ex b/apps/block_scout_web/lib/block_scout_web/views/api/v2/block_view.ex index ee712ef9997a..cf86d59ce54d 100644 --- a/apps/block_scout_web/lib/block_scout_web/views/api/v2/block_view.ex +++ b/apps/block_scout_web/lib/block_scout_web/views/api/v2/block_view.ex @@ -120,6 +120,16 @@ defmodule BlockScoutWeb.API.V2.BlockView do end end + :arbitrum -> + defp chain_type_fields(result, block, single_block?) do + if single_block? do + # credo:disable-for-next-line Credo.Check.Design.AliasUsage + BlockScoutWeb.API.V2.ArbitrumView.extend_block_json_response(result, block) + else + result + end + end + :ethereum -> defp chain_type_fields(result, block, single_block?) do # credo:disable-for-next-line Credo.Check.Design.AliasUsage diff --git a/apps/block_scout_web/lib/block_scout_web/views/api/v2/helper.ex b/apps/block_scout_web/lib/block_scout_web/views/api/v2/helper.ex index f7ae3593c234..2afc67df048a 100644 --- a/apps/block_scout_web/lib/block_scout_web/views/api/v2/helper.ex +++ b/apps/block_scout_web/lib/block_scout_web/views/api/v2/helper.ex @@ -160,4 +160,44 @@ defmodule BlockScoutWeb.API.V2.Helper do x_days_back = Date.add(latest, -1 * (num_days - 1)) %{earliest: x_days_back, latest: latest} end + + @doc """ + Checks if an item associated with a DB entity has actual value + + ## Parameters + - `associated_item`: an item associated with a DB entity + + ## Returns + - `false`: if the item is nil or not loaded + - `true`: if the item has actual value + """ + @spec specified?(any()) :: boolean() + def specified?(associated_item) do + case associated_item do + nil -> false + %Ecto.Association.NotLoaded{} -> false + _ -> true + end + end + + @doc """ + Gets the value of an element nested in a map using two keys. + + Clarification: Returns `map[key1][key2]` + + ## Parameters + - `map`: The high-level map. + - `key1`: The key of the element in `map`. + - `key2`: The key of the element in the map accessible by `map[key1]`. + + ## Returns + The value of the element, or `nil` if the map accessible by `key1` does not exist. + """ + @spec get_2map_data(map(), any(), any()) :: any() + def get_2map_data(map, key1, key2) do + case Map.get(map, key1) do + nil -> nil + inner_map -> Map.get(inner_map, key2) + end + end end diff --git a/apps/block_scout_web/lib/block_scout_web/views/api/v2/transaction_view.ex b/apps/block_scout_web/lib/block_scout_web/views/api/v2/transaction_view.ex index 40ce0d5edb36..7ed695897c1c 100644 --- a/apps/block_scout_web/lib/block_scout_web/views/api/v2/transaction_view.ex +++ b/apps/block_scout_web/lib/block_scout_web/views/api/v2/transaction_view.ex @@ -837,6 +837,20 @@ defmodule BlockScoutWeb.API.V2.TransactionView do end end + :arbitrum -> + defp chain_type_transformations(transactions) do + transactions + end + + defp chain_type_fields(result, transaction, single_tx?, _conn, _watchlist_names) do + if single_tx? do + # credo:disable-for-next-line Credo.Check.Design.AliasUsage + BlockScoutWeb.API.V2.ArbitrumView.extend_transaction_json_response(result, transaction) + else + result + end + end + :optimism -> defp chain_type_transformations(transactions) do transactions diff --git a/apps/block_scout_web/lib/block_scout_web/views/api/v2/zksync_view.ex b/apps/block_scout_web/lib/block_scout_web/views/api/v2/zksync_view.ex index 0d4f3a8d81e5..a5c418aa8d79 100644 --- a/apps/block_scout_web/lib/block_scout_web/views/api/v2/zksync_view.ex +++ b/apps/block_scout_web/lib/block_scout_web/views/api/v2/zksync_view.ex @@ -4,6 +4,8 @@ defmodule BlockScoutWeb.API.V2.ZkSyncView do alias Explorer.Chain.{Block, Transaction} alias Explorer.Chain.ZkSync.TransactionBatch + alias BlockScoutWeb.API.V2.Helper, as: APIV2Helper + @doc """ Function to render GET requests to `/api/v2/zksync/batches/:batch_number` endpoint. """ @@ -146,12 +148,12 @@ defmodule BlockScoutWeb.API.V2.ZkSyncView do out_json |> Map.merge(%{ "status" => batch_status(zksync_item), - "commit_transaction_hash" => get_2map_data(l1_txs, :commit_transaction, :hash), - "commit_transaction_timestamp" => get_2map_data(l1_txs, :commit_transaction, :ts), - "prove_transaction_hash" => get_2map_data(l1_txs, :prove_transaction, :hash), - "prove_transaction_timestamp" => get_2map_data(l1_txs, :prove_transaction, :ts), - "execute_transaction_hash" => get_2map_data(l1_txs, :execute_transaction, :hash), - "execute_transaction_timestamp" => get_2map_data(l1_txs, :execute_transaction, :ts) + "commit_transaction_hash" => APIV2Helper.get_2map_data(l1_txs, :commit_transaction, :hash), + "commit_transaction_timestamp" => APIV2Helper.get_2map_data(l1_txs, :commit_transaction, :ts), + "prove_transaction_hash" => APIV2Helper.get_2map_data(l1_txs, :prove_transaction, :hash), + "prove_transaction_timestamp" => APIV2Helper.get_2map_data(l1_txs, :prove_transaction, :ts), + "execute_transaction_hash" => APIV2Helper.get_2map_data(l1_txs, :execute_transaction, :hash), + "execute_transaction_timestamp" => APIV2Helper.get_2map_data(l1_txs, :execute_transaction, :ts) }) end @@ -183,47 +185,13 @@ defmodule BlockScoutWeb.API.V2.ZkSyncView do # A string with one of predefined statuses defp batch_status(zksync_item) do cond do - specified?(zksync_item.execute_transaction) -> "Executed on L1" - specified?(zksync_item.prove_transaction) -> "Validated on L1" - specified?(zksync_item.commit_transaction) -> "Sent to L1" + APIV2Helper.specified?(zksync_item.execute_transaction) -> "Executed on L1" + APIV2Helper.specified?(zksync_item.prove_transaction) -> "Validated on L1" + APIV2Helper.specified?(zksync_item.commit_transaction) -> "Sent to L1" # Batch entity itself has no batch_number not Map.has_key?(zksync_item, :batch_number) -> "Sealed on L2" not is_nil(zksync_item.batch_number) -> "Sealed on L2" true -> "Processed on L2" end end - - # Checks if an item associated with a DB entity has actual value - # - # ## Parameters - # - `associated_item`: an item associated with a DB entity - # - # ## Returns - # - `false`: if the item is nil or not loaded - # - `true`: if the item has actual value - defp specified?(associated_item) do - case associated_item do - nil -> false - %Ecto.Association.NotLoaded{} -> false - _ -> true - end - end - - # Gets the value of an element nested in a map using two keys. - # - # Clarification: Returns `map[key1][key2]` - # - # ## Parameters - # - `map`: The high-level map. - # - `key1`: The key of the element in `map`. - # - `key2`: The key of the element in the map accessible by `map[key1]`. - # - # ## Returns - # The value of the element, or `nil` if the map accessible by `key1` does not exist. - defp get_2map_data(map, key1, key2) do - case Map.get(map, key1) do - nil -> nil - inner_map -> Map.get(inner_map, key2) - end - end end diff --git a/apps/block_scout_web/mix.exs b/apps/block_scout_web/mix.exs index 1807a4758e88..838074096086 100644 --- a/apps/block_scout_web/mix.exs +++ b/apps/block_scout_web/mix.exs @@ -31,7 +31,8 @@ defmodule BlockScoutWeb.Mixfile do Explorer.Chain.Cache.OptimismFinalizationPeriod, Explorer.Chain.Optimism.OutputRoot, Explorer.Chain.Optimism.WithdrawalEvent, - Explorer.Chain.ZkSync.Reader + Explorer.Chain.ZkSync.Reader, + Explorer.Chain.Arbitrum.Reader ] ] ] diff --git a/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/block.ex b/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/block.ex index ad7eb40fff7d..1c6b870b2864 100644 --- a/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/block.ex +++ b/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/block.ex @@ -29,6 +29,15 @@ defmodule EthereumJSONRPC.Block do ] ) + :arbitrum -> + @chain_type_fields quote( + do: [ + send_count: non_neg_integer(), + send_root: EthereumJSONRPC.hash(), + l1_block_number: non_neg_integer() + ] + ) + _ -> @chain_type_fields quote(do: []) end @@ -172,6 +181,11 @@ defmodule EthereumJSONRPC.Block do "blobGasUsed" => 262144,\ "excessBlobGas" => 79429632,\ """ + :arbitrum -> """ + "sendRoot" => "0xc71ee2cf4201f65590aa6c052270dc41e926e628f213e268a58d9a8d8f739f82",\ + "sendCount" => 91,\ + "l1BlockNumber" => 19828534,\ + """ _ -> "" end} ...> "uncles" => [] @@ -209,6 +223,11 @@ defmodule EthereumJSONRPC.Block do blob_gas_used: 262144,\ excess_blob_gas: 79429632,\ """ + :arbitrum -> """ + send_root: "0xc71ee2cf4201f65590aa6c052270dc41e926e628f213e268a58d9a8d8f739f82",\ + send_count: 91,\ + l1_block_number: 19828534,\ + """ _ -> "" end} uncles: [] @@ -272,6 +291,11 @@ defmodule EthereumJSONRPC.Block do blob_gas_used: 0,\ excess_blob_gas: 0,\ """ + :arbitrum -> """ + send_root: nil,\ + send_count: nil,\ + l1_block_number: nil,\ + """ _ -> "" end} uncles: [] @@ -461,9 +485,9 @@ defmodule EthereumJSONRPC.Block do } end - defp chain_type_fields(params, elixir) do - case Application.get_env(:explorer, :chain_type) do - :rsk -> + case Application.compile_env(:explorer, :chain_type) do + :rsk -> + defp chain_type_fields(params, elixir) do params |> Map.merge(%{ minimum_gas_price: Map.get(elixir, "minimumGasPrice"), @@ -472,8 +496,10 @@ defmodule EthereumJSONRPC.Block do bitcoin_merged_mining_merkle_proof: Map.get(elixir, "bitcoinMergedMiningMerkleProof"), hash_for_merged_mining: Map.get(elixir, "hashForMergedMining") }) + end - :ethereum -> + :ethereum -> + defp chain_type_fields(params, elixir) do params |> Map.merge(%{ withdrawals_root: @@ -481,10 +507,20 @@ defmodule EthereumJSONRPC.Block do blob_gas_used: Map.get(elixir, "blobGasUsed", 0), excess_blob_gas: Map.get(elixir, "excessBlobGas", 0) }) + end - _ -> + :arbitrum -> + defp chain_type_fields(params, elixir) do params - end + |> Map.merge(%{ + send_count: Map.get(elixir, "sendCount"), + send_root: Map.get(elixir, "sendRoot"), + l1_block_number: Map.get(elixir, "l1BlockNumber") + }) + end + + _ -> + defp chain_type_fields(params, _), do: params end @doc """ @@ -790,7 +826,9 @@ defmodule EthereumJSONRPC.Block do end defp entry_to_elixir({key, quantity}, _block) - when key in ~w(difficulty gasLimit gasUsed minimumGasPrice baseFeePerGas number size cumulativeDifficulty totalDifficulty paidFees minimumGasPrice blobGasUsed excessBlobGas) and + when key in ~w(difficulty gasLimit gasUsed minimumGasPrice baseFeePerGas number size + cumulativeDifficulty totalDifficulty paidFees minimumGasPrice blobGasUsed + excessBlobGas l1BlockNumber sendCount) and not is_nil(quantity) do {key, quantity_to_integer(quantity)} end @@ -804,8 +842,10 @@ defmodule EthereumJSONRPC.Block do # `t:EthereumJSONRPC.address/0` and `t:EthereumJSONRPC.hash/0` pass through as `Explorer.Chain` can verify correct # hash format defp entry_to_elixir({key, _} = entry, _block) - when key in ~w(author extraData hash logsBloom miner mixHash nonce parentHash receiptsRoot sealFields sha3Uncles - signature stateRoot step transactionsRoot uncles withdrawalsRoot bitcoinMergedMiningHeader bitcoinMergedMiningCoinbaseTransaction bitcoinMergedMiningMerkleProof hashForMergedMining), + when key in ~w(author extraData hash logsBloom miner mixHash nonce parentHash receiptsRoot + sealFields sha3Uncles signature stateRoot step transactionsRoot uncles + withdrawalsRoot bitcoinMergedMiningHeader bitcoinMergedMiningCoinbaseTransaction + bitcoinMergedMiningMerkleProof hashForMergedMining sendRoot), do: entry defp entry_to_elixir({"timestamp" = key, timestamp}, _block) do @@ -825,11 +865,6 @@ defmodule EthereumJSONRPC.Block do {key, Withdrawals.to_elixir(withdrawals, block_hash, quantity_to_integer(block_number))} end - # Arbitrum fields - defp entry_to_elixir({"l1BlockNumber", _}, _block) do - {:ignore, :ignore} - end - # bitcoinMergedMiningCoinbaseTransaction bitcoinMergedMiningHeader bitcoinMergedMiningMerkleProof hashForMergedMining - RSK https://github.com/blockscout/blockscout/pull/2934 # committedSeals committee pastCommittedSeals proposerSeal round - Autonity network https://github.com/blockscout/blockscout/pull/3480 # blockGasCost extDataGasUsed - sgb/ava https://github.com/blockscout/blockscout/pull/5301 diff --git a/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/blocks.ex b/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/blocks.ex index 8659195ff198..eb2b14dd450b 100644 --- a/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/blocks.ex +++ b/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/blocks.ex @@ -129,6 +129,11 @@ defmodule EthereumJSONRPC.Blocks do blob_gas_used: 0,\ excess_blob_gas: 0,\ """ + :arbitrum -> """ + send_root: nil,\ + send_count: nil,\ + l1_block_number: nil,\ + """ _ -> "" end} uncles: ["0xe670ec64341771606e55d6b4ca35a1a6b75ee3d5145a99d05921026d15273311"] diff --git a/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/http.ex b/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/http.ex index e1704d1068a8..4335a29e77e9 100644 --- a/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/http.ex +++ b/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/http.ex @@ -163,11 +163,34 @@ defmodule EthereumJSONRPC.HTTP do {:error, resp} end - # restrict response to only those fields supported by the JSON-RPC 2.0 standard, which means that level of keys is - # validated, so we can indicate that with switch to atom keys. - def standardize_response(%{"jsonrpc" => "2.0" = jsonrpc, "id" => id} = unstandardized) do + @doc """ + Standardizes responses to adhere to the JSON-RPC 2.0 standard. + + This function adjusts responses to conform to JSON-RPC 2.0, ensuring the keys are atom-based + and that 'id', 'jsonrpc', 'result', and 'error' fields meet the protocol's requirements. + It also validates the mutual exclusivity of 'result' and 'error' fields within a response. + + ## Parameters + - `unstandardized`: A map representing the response with string keys. + + ## Returns + - A standardized map with atom keys and fields aligned with the JSON-RPC 2.0 standard, including + handling of possible mutual exclusivity errors between 'result' and 'error' fields. + """ + @spec standardize_response(map()) :: %{ + :id => nil | non_neg_integer(), + :jsonrpc => binary(), + optional(:error) => %{:code => integer(), :message => binary(), optional(:data) => any()}, + optional(:result) => any() + } + def standardize_response(%{"jsonrpc" => "2.0" = jsonrpc} = unstandardized) do + # Avoid extracting `id` directly in the function declaration. Some endpoints + # do not adhere to standards and may omit the `id` in responses related to + # error scenarios. Consequently, the function call would fail during input + # argument matching. + # Nethermind return string ids - id = quantity_to_integer(id) + id = quantity_to_integer(unstandardized["id"]) standardized = %{jsonrpc: jsonrpc, id: id} @@ -187,8 +210,21 @@ defmodule EthereumJSONRPC.HTTP do end end - # restrict error to only those fields supported by the JSON-RPC 2.0 standard, which means that level of keys is - # validated, so we can indicate that with switch to atom keys. + @doc """ + Standardizes error responses to adhere to the JSON-RPC 2.0 standard. + + This function converts a map containing error information into a format compliant + with the JSON-RPC 2.0 specification. It ensures the keys are atom-based and checks + for the presence of optional 'data' field, incorporating it if available. + + ## Parameters + - `unstandardized`: A map representing the error with string keys: "code", "message" + and "data" (optional). + + ## Returns + - A standardized map with keys as atoms and fields aligned with the JSON-RPC 2.0 standard. + """ + @spec standardize_error(map()) :: %{:code => integer(), :message => binary(), optional(:data) => any()} def standardize_error(%{"code" => code, "message" => message} = unstandardized) when is_integer(code) and is_binary(message) do standardized = %{code: code, message: message} diff --git a/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/receipt.ex b/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/receipt.ex index e1535a3ff0d5..caa06a212a63 100644 --- a/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/receipt.ex +++ b/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/receipt.ex @@ -8,6 +8,36 @@ defmodule EthereumJSONRPC.Receipt do alias EthereumJSONRPC.Logs + case Application.compile_env(:explorer, :chain_type) do + :ethereum -> + @chain_type_fields quote( + do: [ + blob_gas_price: non_neg_integer(), + blob_gas_used: non_neg_integer() + ] + ) + + :optimism -> + @chain_type_fields quote( + do: [ + l1_fee: non_neg_integer(), + l1_fee_scalar: non_neg_integer(), + l1_gas_price: non_neg_integer(), + l1_gas_used: non_neg_integer() + ] + ) + + :arbitrum -> + @chain_type_fields quote( + do: [ + gas_used_for_l1: non_neg_integer() + ] + ) + + _ -> + @chain_type_fields quote(do: []) + end + @type elixir :: %{String.t() => String.t() | non_neg_integer} @typedoc """ @@ -38,11 +68,16 @@ defmodule EthereumJSONRPC.Receipt do | nil } - @typedoc """ - * `:ok` - transaction succeeded - * `:error` - transaction failed - """ - @type status :: :ok | :error + @type params :: %{ + unquote_splicing(@chain_type_fields), + optional(:gas_price) => non_neg_integer(), + cumulative_gas_used: non_neg_integer(), + gas_used: non_neg_integer(), + created_contract_address_hash: EthereumJSONRPC.hash(), + status: :ok | :error, + transaction_hash: EthereumJSONRPC.hash(), + transaction_index: non_neg_integer() + } @doc """ Get `t:EthereumJSONRPC.Logs.elixir/0` from `t:elixir/0` @@ -86,6 +121,9 @@ defmodule EthereumJSONRPC.Receipt do l1_gas_price: 0,\ l1_gas_used: 0\ """ + :arbitrum -> """ + gas_used_for_l1: nil\ + """ _ -> "" end} } @@ -132,20 +170,15 @@ defmodule EthereumJSONRPC.Receipt do l1_gas_price: 0,\ l1_gas_used: 0\ """ + :arbitrum -> """ + gas_used_for_l1: nil\ + """ _ -> "" end} } """ - @spec elixir_to_params(elixir) :: %{ - optional(:gas_price) => non_neg_integer(), - cumulative_gas_used: non_neg_integer, - gas_used: non_neg_integer, - created_contract_address_hash: String.t() | nil, - status: status(), - transaction_hash: String.t(), - transaction_index: non_neg_integer() - } + @spec elixir_to_params(elixir) :: params def elixir_to_params(elixir) do elixir |> do_elixir_to_params() @@ -184,16 +217,18 @@ defmodule EthereumJSONRPC.Receipt do defp maybe_append_gas_price(params, _), do: params - defp chain_type_fields(params, elixir) do - case Application.get_env(:explorer, :chain_type) do - :ethereum -> + case Application.compile_env(:explorer, :chain_type) do + :ethereum -> + defp chain_type_fields(params, elixir) do params |> Map.merge(%{ blob_gas_price: Map.get(elixir, "blobGasPrice", 0), blob_gas_used: Map.get(elixir, "blobGasUsed", 0) }) + end - :optimism -> + :optimism -> + defp chain_type_fields(params, elixir) do params |> Map.merge(%{ l1_fee: Map.get(elixir, "l1Fee", 0), @@ -201,10 +236,18 @@ defmodule EthereumJSONRPC.Receipt do l1_gas_price: Map.get(elixir, "l1GasPrice", 0), l1_gas_used: Map.get(elixir, "l1GasUsed", 0) }) + end - _ -> + :arbitrum -> + defp chain_type_fields(params, elixir) do params - end + |> Map.merge(%{ + gas_used_for_l1: Map.get(elixir, "gasUsedForL1") + }) + end + + _ -> + defp chain_type_fields(params, _), do: params end @doc """ @@ -320,11 +363,13 @@ defmodule EthereumJSONRPC.Receipt do # hash format # gas is passed in from the `t:EthereumJSONRPC.Transaction.params/0` to allow pre-Byzantium status to be derived defp entry_to_elixir({key, _} = entry) - when key in ~w(blockHash contractAddress from gas logsBloom root to transactionHash revertReason type l1FeeScalar), + when key in ~w(blockHash contractAddress from gas logsBloom root to transactionHash + revertReason type l1FeeScalar), do: {:ok, entry} defp entry_to_elixir({key, quantity}) - when key in ~w(blockNumber cumulativeGasUsed gasUsed transactionIndex blobGasUsed blobGasPrice l1Fee l1GasPrice l1GasUsed effectiveGasPrice) do + when key in ~w(blockNumber cumulativeGasUsed gasUsed transactionIndex blobGasUsed + blobGasPrice l1Fee l1GasPrice l1GasUsed effectiveGasPrice gasUsedForL1) do result = if is_nil(quantity) do nil @@ -367,7 +412,7 @@ defmodule EthereumJSONRPC.Receipt do end # Arbitrum fields - defp entry_to_elixir({key, _}) when key in ~w(returnData returnCode feeStats l1BlockNumber gasUsedForL1) do + defp entry_to_elixir({key, _}) when key in ~w(returnData returnCode feeStats l1BlockNumber) do :ignore end diff --git a/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/receipts.ex b/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/receipts.ex index 739eb43f7cee..bcd912ac859e 100644 --- a/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/receipts.ex +++ b/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/receipts.ex @@ -111,6 +111,9 @@ defmodule EthereumJSONRPC.Receipts do l1_gas_price: 0,\ l1_gas_used: 0\ """ + :arbitrum -> """ + gas_used_for_l1: nil\ + """ _ -> "" end} } diff --git a/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/transaction.ex b/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/transaction.ex index 4d60d448db9e..dd784b34b119 100644 --- a/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/transaction.ex +++ b/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/transaction.ex @@ -48,6 +48,13 @@ defmodule EthereumJSONRPC.Transaction do ] ) + :arbitrum -> + @chain_type_fields quote( + do: [ + request_id: non_neg_integer() + ] + ) + _ -> @chain_type_fields quote(do: []) end @@ -509,6 +516,11 @@ defmodule EthereumJSONRPC.Transaction do }) end + :arbitrum -> + put_if_present(params, elixir, [ + {"requestId", :request_id} + ]) + _ -> params end @@ -631,7 +643,7 @@ defmodule EthereumJSONRPC.Transaction do do: {"input", value} defp entry_to_elixir({key, quantity}) - when key in ~w(gas gasPrice nonce r s standardV v value type maxPriorityFeePerGas maxFeePerGas maxFeePerBlobGas) and + when key in ~w(gas gasPrice nonce r s standardV v value type maxPriorityFeePerGas maxFeePerGas maxFeePerBlobGas requestId) and quantity != nil do {key, quantity_to_integer(quantity)} end diff --git a/apps/ethereum_jsonrpc/test/ethereum_jsonrpc/block_test.exs b/apps/ethereum_jsonrpc/test/ethereum_jsonrpc/block_test.exs index edb7c5351702..7628b54da2bb 100644 --- a/apps/ethereum_jsonrpc/test/ethereum_jsonrpc/block_test.exs +++ b/apps/ethereum_jsonrpc/test/ethereum_jsonrpc/block_test.exs @@ -6,7 +6,7 @@ defmodule EthereumJSONRPC.BlockTest do alias EthereumJSONRPC.Block describe "elixir_to_params/1" do - test "sets totalDifficulty to nil if it's empty" do + test "sets default values for params (incl. nil)" do result = Block.elixir_to_params(%{ "difficulty" => 17_561_410_778, @@ -55,32 +55,38 @@ defmodule EthereumJSONRPC.BlockTest do transactions_root: "0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421", uncles: [] } - |> (&if(Application.get_env(:explorer, :chain_type) == :rsk, - do: - Map.merge( - &1, - %{ - bitcoin_merged_mining_coinbase_transaction: nil, - bitcoin_merged_mining_header: nil, - bitcoin_merged_mining_merkle_proof: nil, - hash_for_merged_mining: nil, - minimum_gas_price: nil - } - ), - else: &1 - )).() - |> (&if(Application.get_env(:explorer, :chain_type) == :ethereum, - do: - Map.merge( - &1, - %{ - withdrawals_root: "0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421", - blob_gas_used: 0, - excess_blob_gas: 0 - } - ), - else: &1 - )).() + |> Map.merge(chain_type_fields()) + end + + case Application.compile_env(:explorer, :chain_type) do + :rsk -> + defp chain_type_fields, + do: %{ + bitcoin_merged_mining_coinbase_transaction: nil, + bitcoin_merged_mining_header: nil, + bitcoin_merged_mining_merkle_proof: nil, + hash_for_merged_mining: nil, + minimum_gas_price: nil + } + + :ethereum -> + defp chain_type_fields, + do: %{ + withdrawals_root: "0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421", + blob_gas_used: 0, + excess_blob_gas: 0 + } + + :arbitrum -> + defp chain_type_fields, + do: %{ + send_root: nil, + send_count: nil, + l1_block_number: nil + } + + _ -> + defp chain_type_fields, do: %{} end end diff --git a/apps/explorer/config/dev.exs b/apps/explorer/config/dev.exs index 36ba58629488..4fbc76dbe887 100644 --- a/apps/explorer/config/dev.exs +++ b/apps/explorer/config/dev.exs @@ -31,6 +31,9 @@ config :explorer, Explorer.Repo.Suave, timeout: :timer.seconds(80) config :explorer, Explorer.Repo.Beacon, timeout: :timer.seconds(80) +# Configure Arbitrum database +config :explorer, Explorer.Repo.Arbitrum, timeout: :timer.seconds(80) + config :explorer, Explorer.Repo.BridgedTokens, timeout: :timer.seconds(80) config :explorer, Explorer.Repo.Filecoin, timeout: :timer.seconds(80) diff --git a/apps/explorer/config/prod.exs b/apps/explorer/config/prod.exs index f8337d04ca74..9f69dbd54b36 100644 --- a/apps/explorer/config/prod.exs +++ b/apps/explorer/config/prod.exs @@ -48,6 +48,10 @@ config :explorer, Explorer.Repo.Beacon, prepare: :unnamed, timeout: :timer.seconds(60) +config :explorer, Explorer.Repo.Arbitrum, + prepare: :unnamed, + timeout: :timer.seconds(60) + config :explorer, Explorer.Repo.BridgedTokens, prepare: :unnamed, timeout: :timer.seconds(60) diff --git a/apps/explorer/config/test.exs b/apps/explorer/config/test.exs index b3670fe1e927..b006aef935c4 100644 --- a/apps/explorer/config/test.exs +++ b/apps/explorer/config/test.exs @@ -62,6 +62,7 @@ for repo <- [ Explorer.Repo.RSK, Explorer.Repo.Shibarium, Explorer.Repo.Suave, + Explorer.Repo.Arbitrum, Explorer.Repo.BridgedTokens, Explorer.Repo.Filecoin, Explorer.Repo.Stability, diff --git a/apps/explorer/lib/explorer/application.ex b/apps/explorer/lib/explorer/application.ex index 72c1296a5eab..9fa12bb7297a 100644 --- a/apps/explorer/lib/explorer/application.ex +++ b/apps/explorer/lib/explorer/application.ex @@ -155,6 +155,7 @@ defmodule Explorer.Application do Explorer.Repo.RSK, Explorer.Repo.Shibarium, Explorer.Repo.Suave, + Explorer.Repo.Arbitrum, Explorer.Repo.BridgedTokens, Explorer.Repo.Filecoin, Explorer.Repo.Stability diff --git a/apps/explorer/lib/explorer/chain.ex b/apps/explorer/lib/explorer/chain.ex index c698ba938798..34f8cd5ca392 100644 --- a/apps/explorer/lib/explorer/chain.ex +++ b/apps/explorer/lib/explorer/chain.ex @@ -1731,6 +1731,20 @@ defmodule Explorer.Chain do |> Enum.into(%{}) end + @doc """ + Retrieves the total row count for a given table. + + This function estimates the row count using system catalogs. If the estimate + is unavailable, it performs an exact count using an aggregate query. + + ## Parameters + - `module`: The module representing the table schema. + - `options`: An optional keyword list of options, such as selecting a specific repository. + + ## Returns + - The total row count as a non-negative integer. + """ + @spec get_table_rows_total_count(atom(), keyword()) :: non_neg_integer() def get_table_rows_total_count(module, options) do table_name = module.__schema__(:source) @@ -2468,6 +2482,21 @@ defmodule Explorer.Chain do end end + @doc """ + Finds the block number closest to a given timestamp, with a one-minute buffer, optionally + adjusting based on whether the block should be before or after the timestamp. + + ## Parameters + - `given_timestamp`: The timestamp for which the closest block number is being sought. + - `closest`: A direction indicator (`:before` or `:after`) specifying whether the block number + returned should be before or after the given timestamp. + - `from_api`: A boolean flag indicating whether to use the replica database or the primary one + for the query. + + ## Returns + - `{:ok, block_number}` where `block_number` is the block number closest to the specified timestamp. + - `{:error, :not_found}` if no block is found within the specified criteria. + """ @spec timestamp_to_block_number(DateTime.t(), :before | :after, boolean()) :: {:ok, Block.block_number()} | {:error, :not_found} def timestamp_to_block_number(given_timestamp, closest, from_api) do @@ -3311,6 +3340,22 @@ defmodule Explorer.Chain do def limit_showing_transactions, do: @limit_showing_transactions + @doc """ + Dynamically joins and preloads associations in a query based on necessity. + + This function adjusts the provided Ecto query to include joins for associations. It supports + both optional and required joins. Optional joins use the `preload` function to fetch associations + without enforcing their presence. Required joins ensure the association exists. + + ## Parameters + - `query`: The initial Ecto query. + - `associations`: A single association or a tuple with nested association preloads. + - `necessity`: Specifies if the association is `:optional` or `:required`. + + ## Returns + - The modified query with the specified associations joined according to the defined necessity. + """ + @spec join_association(atom() | Ecto.Query.t(), [{atom(), atom()}], :optional | :required) :: Ecto.Query.t() def join_association(query, [{association, nested_preload}], necessity) when is_atom(association) and is_atom(nested_preload) do case necessity do @@ -3328,6 +3373,7 @@ defmodule Explorer.Chain do end end + @spec join_association(atom() | Ecto.Query.t(), atom(), :optional | :required) :: Ecto.Query.t() def join_association(query, association, necessity) do case necessity do :optional -> @@ -3338,10 +3384,23 @@ defmodule Explorer.Chain do end end - @spec join_associations(atom() | Ecto.Query.t(), map) :: Ecto.Query.t() @doc """ - Function to preload entities associated with selected in provided query items + Applies dynamic joins to a query based on provided association necessities. + + This function iterates over a map of associations with their required join types, either + `:optional` or `:required`, and applies the corresponding joins to the given query. + + More info is available on https://hexdocs.pm/ecto/Ecto.Query.html#preload/3 + + ## Parameters + - `query`: The base query to which associations will be joined. + - `necessity_by_association`: A map specifying each association and its necessity + (`:optional` or `:required`). + + ## Returns + - The query with all specified associations joined according to their necessity. """ + @spec join_associations(atom() | Ecto.Query.t(), %{any() => :optional | :required}) :: Ecto.Query.t() def join_associations(query, necessity_by_association) when is_map(necessity_by_association) do Enum.reduce(necessity_by_association, query, fn {association, join}, acc_query -> join_association(acc_query, association, join) diff --git a/apps/explorer/lib/explorer/chain/arbitrum/batch_block.ex b/apps/explorer/lib/explorer/chain/arbitrum/batch_block.ex new file mode 100644 index 000000000000..9ba9a0e806dd --- /dev/null +++ b/apps/explorer/lib/explorer/chain/arbitrum/batch_block.ex @@ -0,0 +1,53 @@ +defmodule Explorer.Chain.Arbitrum.BatchBlock do + @moduledoc """ + Models a list of blocks related to a batch for Arbitrum. + + Changes in the schema should be reflected in the bulk import module: + - Explorer.Chain.Import.Runner.Arbitrum.BatchBlocks + + Migrations: + - Explorer.Repo.Arbitrum.Migrations.CreateArbitrumTables + """ + + use Explorer.Schema + + alias Explorer.Chain.Arbitrum.{L1Batch, LifecycleTransaction} + + @optional_attrs ~w(confirmation_id)a + + @required_attrs ~w(batch_number block_number)a + + @type t :: %__MODULE__{ + batch_number: non_neg_integer(), + batch: %Ecto.Association.NotLoaded{} | L1Batch.t() | nil, + block_number: non_neg_integer(), + confirmation_id: non_neg_integer() | nil, + confirmation_transaction: %Ecto.Association.NotLoaded{} | LifecycleTransaction.t() | nil + } + + @primary_key {:block_number, :integer, autogenerate: false} + schema "arbitrum_batch_l2_blocks" do + belongs_to(:batch, L1Batch, foreign_key: :batch_number, references: :number, type: :integer) + + belongs_to(:confirmation_transaction, LifecycleTransaction, + foreign_key: :confirmation_id, + references: :id, + type: :integer + ) + + timestamps() + end + + @doc """ + Validates that the `attrs` are valid. + """ + @spec changeset(Ecto.Schema.t(), map()) :: Ecto.Schema.t() + def changeset(%__MODULE__{} = items, attrs \\ %{}) do + items + |> cast(attrs, @required_attrs ++ @optional_attrs) + |> validate_required(@required_attrs) + |> foreign_key_constraint(:batch_number) + |> foreign_key_constraint(:confirmation_id) + |> unique_constraint(:block_number) + end +end diff --git a/apps/explorer/lib/explorer/chain/arbitrum/batch_transaction.ex b/apps/explorer/lib/explorer/chain/arbitrum/batch_transaction.ex new file mode 100644 index 000000000000..c4ac8c6213c8 --- /dev/null +++ b/apps/explorer/lib/explorer/chain/arbitrum/batch_transaction.ex @@ -0,0 +1,52 @@ +defmodule Explorer.Chain.Arbitrum.BatchTransaction do + @moduledoc """ + Models a list of transactions related to a batch for Arbitrum. + + Changes in the schema should be reflected in the bulk import module: + - Explorer.Chain.Import.Runner.Arbitrum.BatchTransactions + + Migrations: + - Explorer.Repo.Arbitrum.Migrations.CreateArbitrumTables + """ + + use Explorer.Schema + + alias Explorer.Chain.Arbitrum.L1Batch + alias Explorer.Chain.{Hash, Transaction} + + @required_attrs ~w(batch_number tx_hash)a + + @type t :: %__MODULE__{ + batch_number: non_neg_integer(), + batch: %Ecto.Association.NotLoaded{} | L1Batch.t() | nil, + tx_hash: Hash.t(), + l2_transaction: %Ecto.Association.NotLoaded{} | Transaction.t() | nil + } + + @primary_key false + schema "arbitrum_batch_l2_transactions" do + belongs_to(:batch, L1Batch, foreign_key: :batch_number, references: :number, type: :integer) + + belongs_to(:l2_transaction, Transaction, + foreign_key: :tx_hash, + primary_key: true, + references: :hash, + type: Hash.Full + ) + + timestamps() + end + + @doc """ + Validates that the `attrs` are valid. + """ + @spec changeset(Ecto.Schema.t(), map()) :: Ecto.Schema.t() + def changeset(%__MODULE__{} = transactions, attrs \\ %{}) do + transactions + |> cast(attrs, @required_attrs) + |> validate_required(@required_attrs) + |> foreign_key_constraint(:batch_number) + |> foreign_key_constraint(:block_hash) + |> unique_constraint(:tx_hash) + end +end diff --git a/apps/explorer/lib/explorer/chain/arbitrum/l1_batch.ex b/apps/explorer/lib/explorer/chain/arbitrum/l1_batch.ex new file mode 100644 index 000000000000..8ec71726ea61 --- /dev/null +++ b/apps/explorer/lib/explorer/chain/arbitrum/l1_batch.ex @@ -0,0 +1,62 @@ +defmodule Explorer.Chain.Arbitrum.L1Batch do + @moduledoc """ + Models an L1 batch for Arbitrum. + + Changes in the schema should be reflected in the bulk import module: + - Explorer.Chain.Import.Runner.Arbitrum.L1Batches + + Migrations: + - Explorer.Repo.Arbitrum.Migrations.CreateArbitrumTables + """ + + use Explorer.Schema + + alias Explorer.Chain.{ + Block, + Hash + } + + alias Explorer.Chain.Arbitrum.LifecycleTransaction + + @required_attrs ~w(number transactions_count start_block end_block before_acc after_acc commitment_id)a + + @type t :: %__MODULE__{ + number: non_neg_integer(), + transactions_count: non_neg_integer(), + start_block: Block.block_number(), + end_block: Block.block_number(), + before_acc: Hash.t(), + after_acc: Hash.t(), + commitment_id: non_neg_integer(), + commitment_transaction: %Ecto.Association.NotLoaded{} | LifecycleTransaction.t() | nil + } + + @primary_key {:number, :integer, autogenerate: false} + schema "arbitrum_l1_batches" do + field(:transactions_count, :integer) + field(:start_block, :integer) + field(:end_block, :integer) + field(:before_acc, Hash.Full) + field(:after_acc, Hash.Full) + + belongs_to(:commitment_transaction, LifecycleTransaction, + foreign_key: :commitment_id, + references: :id, + type: :integer + ) + + timestamps() + end + + @doc """ + Validates that the `attrs` are valid. + """ + @spec changeset(Ecto.Schema.t(), map()) :: Ecto.Schema.t() + def changeset(%__MODULE__{} = batches, attrs \\ %{}) do + batches + |> cast(attrs, @required_attrs) + |> validate_required(@required_attrs) + |> foreign_key_constraint(:commitment_id) + |> unique_constraint(:number) + end +end diff --git a/apps/explorer/lib/explorer/chain/arbitrum/l1_execution.ex b/apps/explorer/lib/explorer/chain/arbitrum/l1_execution.ex new file mode 100644 index 000000000000..32ae344d2d68 --- /dev/null +++ b/apps/explorer/lib/explorer/chain/arbitrum/l1_execution.ex @@ -0,0 +1,46 @@ +defmodule Explorer.Chain.Arbitrum.L1Execution do + @moduledoc """ + Models a list of execution transactions related to a L2 to L1 messages on Arbitrum. + + Changes in the schema should be reflected in the bulk import module: + - Explorer.Chain.Import.Runner.Arbitrum.L1Executions + + Migrations: + - Explorer.Repo.Arbitrum.Migrations.CreateArbitrumTables + """ + + use Explorer.Schema + + alias Explorer.Chain.Arbitrum.LifecycleTransaction + + @required_attrs ~w(message_id execution_id)a + + @type t :: %__MODULE__{ + message_id: non_neg_integer(), + execution_id: non_neg_integer(), + execution_transaction: %Ecto.Association.NotLoaded{} | LifecycleTransaction.t() | nil + } + + @primary_key {:message_id, :integer, autogenerate: false} + schema "arbitrum_l1_executions" do + belongs_to(:execution_transaction, LifecycleTransaction, + foreign_key: :execution_id, + references: :id, + type: :integer + ) + + timestamps() + end + + @doc """ + Validates that the `attrs` are valid. + """ + @spec changeset(Ecto.Schema.t(), map()) :: Ecto.Schema.t() + def changeset(%__MODULE__{} = items, attrs \\ %{}) do + items + |> cast(attrs, @required_attrs) + |> validate_required(@required_attrs) + |> foreign_key_constraint(:execution_id) + |> unique_constraint(:message_id) + end +end diff --git a/apps/explorer/lib/explorer/chain/arbitrum/lifecycle_transaction.ex b/apps/explorer/lib/explorer/chain/arbitrum/lifecycle_transaction.ex new file mode 100644 index 000000000000..5cd8dc05462a --- /dev/null +++ b/apps/explorer/lib/explorer/chain/arbitrum/lifecycle_transaction.ex @@ -0,0 +1,54 @@ +defmodule Explorer.Chain.Arbitrum.LifecycleTransaction do + @moduledoc """ + Models an L1 lifecycle transaction for Arbitrum. + + Changes in the schema should be reflected in the bulk import module: + - Explorer.Chain.Import.Runner.Arbitrum.LifecycleTransactions + + Migrations: + - Explorer.Repo.Arbitrum.Migrations.CreateArbitrumTables + """ + + use Explorer.Schema + + alias Explorer.Chain.{ + Block, + Hash + } + + alias Explorer.Chain.Arbitrum.{BatchBlock, L1Batch} + + @required_attrs ~w(id hash block_number timestamp status)a + + @type t :: %__MODULE__{ + id: non_neg_integer(), + hash: Hash.t(), + block_number: Block.block_number(), + timestamp: DateTime.t(), + status: String.t() + } + + @primary_key {:id, :integer, autogenerate: false} + schema "arbitrum_lifecycle_l1_transactions" do + field(:hash, Hash.Full) + field(:block_number, :integer) + field(:timestamp, :utc_datetime_usec) + field(:status, Ecto.Enum, values: [:unfinalized, :finalized]) + + has_many(:committed_batches, L1Batch, foreign_key: :commitment_id) + has_many(:confirmed_blocks, BatchBlock, foreign_key: :confirmation_id) + + timestamps() + end + + @doc """ + Validates that the `attrs` are valid. + """ + @spec changeset(Ecto.Schema.t(), map()) :: Ecto.Schema.t() + def changeset(%__MODULE__{} = txn, attrs \\ %{}) do + txn + |> cast(attrs, @required_attrs) + |> validate_required(@required_attrs) + |> unique_constraint([:id, :hash]) + end +end diff --git a/apps/explorer/lib/explorer/chain/arbitrum/message.ex b/apps/explorer/lib/explorer/chain/arbitrum/message.ex new file mode 100644 index 000000000000..e3899078a61a --- /dev/null +++ b/apps/explorer/lib/explorer/chain/arbitrum/message.ex @@ -0,0 +1,57 @@ +defmodule Explorer.Chain.Arbitrum.Message do + @moduledoc """ + Models an L1<->L2 messages on Arbitrum. + + Changes in the schema should be reflected in the bulk import module: + - Explorer.Chain.Import.Runner.Arbitrum.Messages + + Migrations: + - Explorer.Repo.Arbitrum.Migrations.CreateArbitrumTables + """ + + use Explorer.Schema + + alias Explorer.Chain.{Block, Hash} + + @optional_attrs ~w(originator_address originating_transaction_hash origination_timestamp originating_transaction_block_number completion_transaction_hash)a + + @required_attrs ~w(direction message_id status)a + + @allowed_attrs @optional_attrs ++ @required_attrs + + @type t :: %__MODULE__{ + direction: String.t(), + message_id: non_neg_integer(), + originator_address: Hash.Address.t() | nil, + originating_transaction_hash: Hash.t() | nil, + origination_timestamp: DateTime.t() | nil, + originating_transaction_block_number: Block.block_number() | nil, + completion_transaction_hash: Hash.t() | nil, + status: String.t() + } + + @primary_key false + schema "arbitrum_crosslevel_messages" do + field(:direction, Ecto.Enum, values: [:to_l2, :from_l2], primary_key: true) + field(:message_id, :integer, primary_key: true) + field(:originator_address, Hash.Address) + field(:originating_transaction_hash, Hash.Full) + field(:origination_timestamp, :utc_datetime_usec) + field(:originating_transaction_block_number, :integer) + field(:completion_transaction_hash, Hash.Full) + field(:status, Ecto.Enum, values: [:initiated, :sent, :confirmed, :relayed]) + + timestamps() + end + + @doc """ + Validates that the `attrs` are valid. + """ + @spec changeset(Ecto.Schema.t(), map()) :: Ecto.Schema.t() + def changeset(%__MODULE__{} = txn, attrs \\ %{}) do + txn + |> cast(attrs, @allowed_attrs) + |> validate_required(@required_attrs) + |> unique_constraint([:direction, :message_id]) + end +end diff --git a/apps/explorer/lib/explorer/chain/arbitrum/reader.ex b/apps/explorer/lib/explorer/chain/arbitrum/reader.ex new file mode 100644 index 000000000000..0ea40f28e44a --- /dev/null +++ b/apps/explorer/lib/explorer/chain/arbitrum/reader.ex @@ -0,0 +1,913 @@ +defmodule Explorer.Chain.Arbitrum.Reader do + @moduledoc """ + Contains read functions for Arbitrum modules. + """ + + import Ecto.Query, only: [from: 2, limit: 2, order_by: 2, subquery: 1, where: 2, where: 3] + import Explorer.Chain, only: [select_repo: 1] + + alias Explorer.Chain.Arbitrum.{BatchBlock, BatchTransaction, L1Batch, L1Execution, LifecycleTransaction, Message} + + alias Explorer.{Chain, PagingOptions, Repo} + + alias Explorer.Chain.Block, as: FullBlock + alias Explorer.Chain.{Hash, Transaction} + + @doc """ + Retrieves the number of the latest L1 block where an L1-to-L2 message was discovered. + + ## Returns + - The number of L1 block, or `nil` if no L1-to-L2 messages are found. + """ + @spec l1_block_of_latest_discovered_message_to_l2() :: FullBlock.block_number() | nil + def l1_block_of_latest_discovered_message_to_l2 do + query = + from(msg in Message, + select: msg.originating_transaction_block_number, + where: msg.direction == :to_l2 and not is_nil(msg.originating_transaction_block_number), + order_by: [desc: msg.message_id], + limit: 1 + ) + + query + |> Repo.one() + end + + @doc """ + Retrieves the number of the earliest L1 block where an L1-to-L2 message was discovered. + + ## Returns + - The number of L1 block, or `nil` if no L1-to-L2 messages are found. + """ + @spec l1_block_of_earliest_discovered_message_to_l2() :: FullBlock.block_number() | nil + def l1_block_of_earliest_discovered_message_to_l2 do + query = + from(msg in Message, + select: msg.originating_transaction_block_number, + where: msg.direction == :to_l2 and not is_nil(msg.originating_transaction_block_number), + order_by: [asc: msg.message_id], + limit: 1 + ) + + query + |> Repo.one() + end + + @doc """ + Retrieves the number of the earliest rollup block where an L2-to-L1 message was discovered. + + ## Returns + - The number of rollup block, or `nil` if no L2-to-L1 messages are found. + """ + @spec rollup_block_of_earliest_discovered_message_from_l2() :: FullBlock.block_number() | nil + def rollup_block_of_earliest_discovered_message_from_l2 do + query = + from(msg in Message, + select: msg.originating_transaction_block_number, + where: msg.direction == :from_l2 and not is_nil(msg.originating_transaction_block_number), + order_by: [asc: msg.originating_transaction_block_number], + limit: 1 + ) + + query + |> Repo.one() + end + + @doc """ + Retrieves the number of the earliest rollup block where a completed L1-to-L2 message was discovered. + + ## Returns + - The block number of the rollup block, or `nil` if no completed L1-to-L2 messages are found, + or if the rollup transaction that emitted the corresponding message has not been indexed yet. + """ + @spec rollup_block_of_earliest_discovered_message_to_l2() :: FullBlock.block_number() | nil + def rollup_block_of_earliest_discovered_message_to_l2 do + completion_tx_subquery = + from(msg in Message, + select: msg.completion_transaction_hash, + where: msg.direction == :to_l2 and not is_nil(msg.completion_transaction_hash), + order_by: [asc: msg.message_id], + limit: 1 + ) + + query = + from(tx in Transaction, + select: tx.block_number, + where: tx.hash == subquery(completion_tx_subquery), + limit: 1 + ) + + query + |> Repo.one() + end + + @doc """ + Retrieves the number of the latest L1 block where the commitment transaction with a batch was included. + + As per the Arbitrum rollup nature, from the indexer's point of view, a batch does not exist until + the commitment transaction is submitted to L1. Therefore, the situation where a batch exists but + there is no commitment transaction is not possible. + + ## Returns + - The number of the L1 block, or `nil` if no rollup batches are found, or if the association between the batch + and the commitment transaction has been broken due to database inconsistency. + """ + @spec l1_block_of_latest_committed_batch() :: FullBlock.block_number() | nil + def l1_block_of_latest_committed_batch do + query = + from(batch in L1Batch, + order_by: [desc: batch.number], + limit: 1 + ) + + case query + # :required is used since the situation when commit transaction is not found is not possible + |> Chain.join_associations(%{:commitment_transaction => :required}) + |> Repo.one() do + nil -> nil + batch -> batch.commitment_transaction.block_number + end + end + + @doc """ + Retrieves the number of the earliest L1 block where the commitment transaction with a batch was included. + + As per the Arbitrum rollup nature, from the indexer's point of view, a batch does not exist until + the commitment transaction is submitted to L1. Therefore, the situation where a batch exists but + there is no commitment transaction is not possible. + + ## Returns + - The number of the L1 block, or `nil` if no rollup batches are found, or if the association between the batch + and the commitment transaction has been broken due to database inconsistency. + """ + @spec l1_block_of_earliest_committed_batch() :: FullBlock.block_number() | nil + def l1_block_of_earliest_committed_batch do + query = + from(batch in L1Batch, + order_by: [asc: batch.number], + limit: 1 + ) + + case query + # :required is used since the situation when commit transaction is not found is not possible + |> Chain.join_associations(%{:commitment_transaction => :required}) + |> Repo.one() do + nil -> nil + batch -> batch.commitment_transaction.block_number + end + end + + @doc """ + Retrieves the block number of the highest rollup block that has been included in a batch. + + ## Returns + - The number of the highest rollup block included in a batch, or `nil` if no rollup batches are found. + """ + @spec highest_committed_block() :: FullBlock.block_number() | nil + def highest_committed_block do + query = + from(batch in L1Batch, + select: batch.end_block, + order_by: [desc: batch.number], + limit: 1 + ) + + query + |> Repo.one() + end + + @doc """ + Reads a list of L1 transactions by their hashes from the `arbitrum_lifecycle_l1_transactions` table. + + ## Parameters + - `l1_tx_hashes`: A list of hashes to retrieve L1 transactions for. + + ## Returns + - A list of `Explorer.Chain.Arbitrum.LifecycleTransaction` corresponding to the hashes from + the input list. The output list may be smaller than the input list. + """ + @spec lifecycle_transactions(maybe_improper_list(Hash.t(), [])) :: [LifecycleTransaction] + def lifecycle_transactions(l1_tx_hashes) when is_list(l1_tx_hashes) do + query = + from( + lt in LifecycleTransaction, + select: {lt.hash, lt.id}, + where: lt.hash in ^l1_tx_hashes + ) + + Repo.all(query, timeout: :infinity) + end + + @doc """ + Reads a list of transactions executing L2-to-L1 messages by their IDs. + + ## Parameters + - `message_ids`: A list of IDs to retrieve executing transactions for. + + ## Returns + - A list of `Explorer.Chain.Arbitrum.L1Execution` corresponding to the message IDs from + the input list. The output list may be smaller than the input list if some IDs do not + correspond to any existing transactions. + """ + @spec l1_executions(maybe_improper_list(non_neg_integer(), [])) :: [L1Execution] + def l1_executions(message_ids) when is_list(message_ids) do + query = + from( + ex in L1Execution, + where: ex.message_id in ^message_ids + ) + + query + # :required is used since execution records in the table are created only when + # the corresponding execution transaction is indexed + |> Chain.join_associations(%{:execution_transaction => :required}) + |> Repo.all(timeout: :infinity) + end + + @doc """ + Determines the next index for the L1 transaction available in the `arbitrum_lifecycle_l1_transactions` table. + + ## Returns + - The next available index. If there are no L1 transactions imported yet, it will return `1`. + """ + @spec next_lifecycle_transaction_id() :: non_neg_integer + def next_lifecycle_transaction_id do + query = + from(lt in LifecycleTransaction, + select: lt.id, + order_by: [desc: lt.id], + limit: 1 + ) + + last_id = + query + |> Repo.one() + |> Kernel.||(0) + + last_id + 1 + end + + @doc """ + Retrieves unfinalized L1 transactions from the `LifecycleTransaction` table that are + involved in changing the statuses of rollup blocks or transactions. + + An L1 transaction is considered unfinalized if it has not yet reached a state where + it is permanently included in the blockchain, meaning it is still susceptible to + potential reorganization or change. Transactions are evaluated against the `finalized_block` + parameter to determine their finalized status. + + ## Parameters + - `finalized_block`: The L1 block number above which transactions are considered finalized. + Transactions in blocks higher than this number are not included in the results. + + ## Returns + - A list of `Explorer.Chain.Arbitrum.LifecycleTransaction` representing unfinalized transactions, + or `[]` if no unfinalized transactions are found. + """ + @spec lifecycle_unfinalized_transactions(FullBlock.block_number()) :: [LifecycleTransaction] + def lifecycle_unfinalized_transactions(finalized_block) + when is_integer(finalized_block) and finalized_block >= 0 do + query = + from( + lt in LifecycleTransaction, + where: lt.block_number <= ^finalized_block and lt.status == :unfinalized + ) + + Repo.all(query, timeout: :infinity) + end + + @doc """ + Gets the rollup block number by the hash of the block. Lookup is performed only + for blocks explicitly included in a batch, i.e., the batch has been identified by + the corresponding fetcher. The function may return `nil` as a successful response + if the batch containing the rollup block has not been indexed yet. + + ## Parameters + - `block_hash`: The hash of a block included in the batch. + + ## Returns + - the number of the rollup block corresponding to the given hash or `nil` if the + block or batch were not indexed yet. + """ + @spec rollup_block_hash_to_num(binary()) :: FullBlock.block_number() | nil + def rollup_block_hash_to_num(block_hash) when is_binary(block_hash) do + query = + from( + fb in FullBlock, + inner_join: rb in BatchBlock, + on: rb.block_number == fb.number, + select: fb.number, + where: fb.hash == ^block_hash + ) + + query + |> Repo.one() + end + + @doc """ + Checks if the numbers from the provided list correspond to the numbers of indexed batches. + + ## Parameters + - `batches_numbers`: The list of batch numbers. + + ## Returns + - A list of batch numbers that are indexed and match the provided list, or `[]` + if none of the batch numbers in the provided list exist in the database. The output list + may be smaller than the input list. + """ + @spec batches_exist(maybe_improper_list(non_neg_integer(), [])) :: [non_neg_integer] + def batches_exist(batches_numbers) when is_list(batches_numbers) do + query = + from( + batch in L1Batch, + select: batch.number, + where: batch.number in ^batches_numbers + ) + + query + |> Repo.all(timeout: :infinity) + end + + @doc """ + Retrieves the batch in which the rollup block, identified by the given block number, was included. + + ## Parameters + - `number`: The number of a rollup block. + + ## Returns + - An instance of `Explorer.Chain.Arbitrum.L1Batch` representing the batch containing + the specified rollup block number, or `nil` if no corresponding batch is found. + """ + @spec get_batch_by_rollup_block_number(FullBlock.block_number()) :: L1Batch | nil + def get_batch_by_rollup_block_number(number) + when is_integer(number) and number >= 0 do + query = + from(batch in L1Batch, + # end_block has higher number than start_block + where: batch.end_block >= ^number and batch.start_block <= ^number + ) + + query + # :required is used since the situation when commit transaction is not found is not possible + |> Chain.join_associations(%{:commitment_transaction => :required}) + |> Repo.one() + end + + @doc """ + Retrieves the L1 block number where the confirmation transaction of the highest confirmed rollup block was included. + + ## Returns + - The L1 block number if a confirmed rollup block is found and the confirmation transaction is indexed; + `nil` if no confirmed rollup blocks are found or if there is a database inconsistency. + """ + @spec l1_block_of_latest_confirmed_block() :: FullBlock.block_number() | nil + def l1_block_of_latest_confirmed_block do + query = + from( + rb in BatchBlock, + where: not is_nil(rb.confirmation_id), + order_by: [desc: rb.block_number], + limit: 1 + ) + + case query + # :required is used since existence of the confirmation id is checked above + |> Chain.join_associations(%{:confirmation_transaction => :required}) + |> Repo.one() do + nil -> + nil + + block -> + case block.confirmation_transaction do + # `nil` and `%Ecto.Association.NotLoaded{}` indicate DB inconsistency + nil -> nil + %Ecto.Association.NotLoaded{} -> nil + confirmation_transaction -> confirmation_transaction.block_number + end + end + end + + @doc """ + Retrieves the number of the highest confirmed rollup block. + + ## Returns + - The number of the highest confirmed rollup block, or `nil` if no confirmed rollup blocks are found. + """ + @spec highest_confirmed_block() :: FullBlock.block_number() | nil + def highest_confirmed_block do + query = + from( + rb in BatchBlock, + where: not is_nil(rb.confirmation_id), + select: rb.block_number, + order_by: [desc: rb.block_number], + limit: 1 + ) + + query + |> Repo.one() + end + + @doc """ + Retrieves the number of the latest L1 block where a transaction executing an L2-to-L1 message was discovered. + + ## Returns + - The number of the latest L1 block with an executing transaction for an L2-to-L1 message, or `nil` if no such transactions are found. + """ + @spec l1_block_of_latest_execution() :: FullBlock.block_number() | nil + def l1_block_of_latest_execution do + query = + from( + tx in LifecycleTransaction, + inner_join: ex in L1Execution, + on: tx.id == ex.execution_id, + select: tx.block_number, + order_by: [desc: tx.block_number], + limit: 1 + ) + + query + |> Repo.one() + end + + @doc """ + Retrieves the number of the earliest L1 block where a transaction executing an L2-to-L1 message was discovered. + + ## Returns + - The number of the earliest L1 block with an executing transaction for an L2-to-L1 message, or `nil` if no such transactions are found. + """ + @spec l1_block_of_earliest_execution() :: FullBlock.block_number() | nil + def l1_block_of_earliest_execution do + query = + from( + tx in LifecycleTransaction, + inner_join: ex in L1Execution, + on: tx.id == ex.execution_id, + select: tx.block_number, + order_by: [asc: tx.block_number], + limit: 1 + ) + + query + |> Repo.one() + end + + @doc """ + Retrieves all unconfirmed rollup blocks within the specified range from `first_block` to `last_block`, + inclusive, where `first_block` is less than or equal to `last_block`. + + Since the function relies on the block data generated by the block fetcher, the returned list + may contain fewer blocks than actually exist if some of the blocks have not been indexed by the fetcher yet. + + ## Parameters + - `first_block`: The rollup block number starting the lookup range. + - `last_block`:The rollup block number ending the lookup range. + + ## Returns + - A list of maps containing the batch number, rollup block number and hash for each + unconfirmed block within the range. Returns `[]` if no unconfirmed blocks are found + within the range, or if the block fetcher has not indexed them. + """ + @spec unconfirmed_rollup_blocks(FullBlock.block_number(), FullBlock.block_number()) :: [BatchBlock] + def unconfirmed_rollup_blocks(first_block, last_block) + when is_integer(first_block) and first_block >= 0 and + is_integer(last_block) and first_block <= last_block do + query = + from( + rb in BatchBlock, + where: rb.block_number >= ^first_block and rb.block_number <= ^last_block and is_nil(rb.confirmation_id), + order_by: [asc: rb.block_number] + ) + + Repo.all(query, timeout: :infinity) + end + + @doc """ + Calculates the number of confirmed rollup blocks in the specified batch. + + ## Parameters + - `batch_number`: The number of the batch for which the count of confirmed blocks is to be calculated. + + ## Returns + - The number of confirmed blocks in the batch with the given number. + """ + @spec count_confirmed_rollup_blocks_in_batch(non_neg_integer()) :: non_neg_integer + def count_confirmed_rollup_blocks_in_batch(batch_number) + when is_integer(batch_number) and batch_number >= 0 do + query = + from( + rb in BatchBlock, + where: rb.batch_number == ^batch_number and not is_nil(rb.confirmation_id) + ) + + Repo.aggregate(query, :count, timeout: :infinity) + end + + @doc """ + Retrieves all L2-to-L1 messages with the specified status that originated in rollup blocks with numbers not higher than `block_number`. + + ## Parameters + - `status`: The status of the messages to retrieve, such as `:initiated`, `:sent`, `:confirmed`, or `:relayed`. + - `block_number`: The number of a rollup block that limits the messages lookup. + + ## Returns + - Instances of `Explorer.Chain.Arbitrum.Message` corresponding to the criteria, or `[]` if no messages + with the given status are found in the rollup blocks up to the specified number. + """ + @spec l2_to_l1_messages(:confirmed | :initiated | :relayed | :sent, FullBlock.block_number()) :: [ + Message + ] + def l2_to_l1_messages(status, block_number) + when status in [:initiated, :sent, :confirmed, :relayed] and + is_integer(block_number) and + block_number >= 0 do + query = + from(msg in Message, + where: + msg.direction == :from_l2 and msg.originating_transaction_block_number <= ^block_number and + msg.status == ^status, + order_by: [desc: msg.message_id] + ) + + Repo.all(query, timeout: :infinity) + end + + @doc """ + Retrieves the numbers of the L1 blocks containing the confirmation transactions + bounding the first interval where missed confirmation transactions could be found. + + The absence of a confirmation transaction is assumed based on the analysis of a + series of confirmed rollup blocks. For example, if blocks 0-3 are confirmed by transaction X, + blocks 7-9 by transaction Y, and blocks 12-15 by transaction Z, there are two gaps: + blocks 4-6 and 10-11. According to Arbitrum's nature, this indicates that the confirmation + transactions for blocks 6 and 11 have not yet been indexed. + + In the example above, the function will return the tuple with the numbers of the L1 blocks + where transactions Y and Z were included. + + ## Returns + - A tuple of the L1 block numbers between which missing confirmation transactions are suspected, + or `nil` if no gaps in confirmed blocks are found or if there are no missed confirmation transactions. + """ + @spec l1_blocks_of_confirmations_bounding_first_unconfirmed_rollup_blocks_gap() :: + {FullBlock.block_number() | nil, FullBlock.block_number()} | nil + def l1_blocks_of_confirmations_bounding_first_unconfirmed_rollup_blocks_gap do + # The first subquery retrieves the numbers of confirmed rollup blocks. + rollup_blocks_query = + from( + rb in BatchBlock, + select: %{ + block_number: rb.block_number, + confirmation_id: rb.confirmation_id + }, + where: not is_nil(rb.confirmation_id) + ) + + # The second subquery builds on the first one, grouping block numbers by their + # confirmation transactions. As a result, it identifies the starting and ending + # rollup blocks for every transaction. + confirmed_ranges_query = + from( + subquery in subquery(rollup_blocks_query), + select: %{ + confirmation_id: subquery.confirmation_id, + min_block_num: min(subquery.block_number), + max_block_num: max(subquery.block_number) + }, + group_by: subquery.confirmation_id + ) + + # The third subquery utilizes the window function LAG to associate each confirmation + # transaction with the starting rollup block of the preceding transaction. + confirmed_combined_ranges_query = + from( + subquery in subquery(confirmed_ranges_query), + select: %{ + confirmation_id: subquery.confirmation_id, + min_block_num: subquery.min_block_num, + max_block_num: subquery.max_block_num, + prev_max_number: fragment("LAG(?, 1) OVER (ORDER BY ?)", subquery.max_block_num, subquery.min_block_num), + prev_confirmation_id: + fragment("LAG(?, 1) OVER (ORDER BY ?)", subquery.confirmation_id, subquery.min_block_num) + } + ) + + # The final query identifies confirmation transactions for which the ending block does + # not precede the starting block of the subsequent confirmation transaction. + main_query = + from( + subquery in subquery(confirmed_combined_ranges_query), + inner_join: tx_cur in LifecycleTransaction, + on: subquery.confirmation_id == tx_cur.id, + left_join: tx_prev in LifecycleTransaction, + on: subquery.prev_confirmation_id == tx_prev.id, + select: {tx_prev.block_number, tx_cur.block_number}, + where: subquery.min_block_num - 1 != subquery.prev_max_number or is_nil(subquery.prev_max_number), + order_by: [desc: subquery.min_block_num], + limit: 1 + ) + + main_query + |> Repo.one() + end + + @doc """ + Retrieves the count of cross-chain messages either sent to or from the rollup. + + ## Parameters + - `direction`: A string that specifies the message direction; can be "from-rollup" or "to-rollup". + - `options`: A keyword list of options that may include whether to use a replica database. + + ## Returns + - The total count of cross-chain messages. + """ + @spec messages_count(binary(), api?: boolean()) :: non_neg_integer() + def messages_count(direction, options) when direction == "from-rollup" and is_list(options) do + do_messages_count(:from_l2, options) + end + + def messages_count(direction, options) when direction == "to-rollup" and is_list(options) do + do_messages_count(:to_l2, options) + end + + # Counts the number of cross-chain messages based on the specified direction. + @spec do_messages_count(:from_l2 | :to_l2, api?: boolean()) :: non_neg_integer() + defp do_messages_count(direction, options) do + Message + |> where([msg], msg.direction == ^direction) + |> select_repo(options).aggregate(:count, timeout: :infinity) + end + + @doc """ + Retrieves cross-chain messages based on the specified direction. + + This function constructs and executes a query to retrieve messages either sent + to or from the rollup layer, applying pagination options. These options dictate + not only the number of items to retrieve but also how many items to skip from + the top. + + ## Parameters + - `direction`: A string that can be "from-rollup" or "to-rollup", translated internally to `:from_l2` or `:to_l2`. + - `options`: A keyword list specifying pagination details and database preferences. + + ## Returns + - A list of `Explorer.Chain.Arbitrum.Message` entries. + """ + @spec messages(binary(), + paging_options: PagingOptions.t(), + api?: boolean() + ) :: [Message] + def messages(direction, options) when direction == "from-rollup" do + do_messages(:from_l2, options) + end + + def messages(direction, options) when direction == "to-rollup" do + do_messages(:to_l2, options) + end + + # Executes the query to fetch cross-chain messages based on the specified direction. + # + # This function constructs and executes a query to retrieve messages either sent + # to or from the rollup layer, applying pagination options. These options dictate + # not only the number of items to retrieve but also how many items to skip from + # the top. + # + # ## Parameters + # - `direction`: Can be either `:from_l2` or `:to_l2`, indicating the direction of the messages. + # - `options`: A keyword list of options specifying pagination details and whether to use a replica database. + # + # ## Returns + # - A list of `Explorer.Chain.Arbitrum.Message` entries matching the specified direction. + @spec do_messages(:from_l2 | :to_l2, + paging_options: PagingOptions.t(), + api?: boolean() + ) :: [Message] + defp do_messages(direction, options) do + base_query = + from(msg in Message, + where: msg.direction == ^direction, + order_by: [desc: msg.message_id] + ) + + paging_options = Keyword.get(options, :paging_options, Chain.default_paging_options()) + + query = + base_query + |> page_messages(paging_options) + |> limit(^paging_options.page_size) + + select_repo(options).all(query) + end + + defp page_messages(query, %PagingOptions{key: nil}), do: query + + defp page_messages(query, %PagingOptions{key: {id}}) do + from(msg in query, where: msg.message_id < ^id) + end + + @doc """ + Retrieves a list of relayed L1 to L2 messages that have been completed. + + ## Parameters + - `options`: A keyword list of options specifying whether to use a replica database and how pagination should be handled. + + ## Returns + - A list of `Explorer.Chain.Arbitrum.Message` representing relayed messages from L1 to L2 that have been completed. + """ + @spec relayed_l1_to_l2_messages( + paging_options: PagingOptions.t(), + api?: boolean() + ) :: [Message] + def relayed_l1_to_l2_messages(options) do + paging_options = Keyword.get(options, :paging_options, Chain.default_paging_options()) + + query = + from(msg in Message, + where: msg.direction == :to_l2 and not is_nil(msg.completion_transaction_hash), + order_by: [desc: msg.message_id], + limit: ^paging_options.page_size + ) + + select_repo(options).all(query) + end + + @doc """ + Retrieves the total count of rollup batches indexed up to the current moment. + + This function uses an estimated count from system catalogs if available. + If the estimate is unavailable, it performs an exact count using an aggregate query. + + ## Parameters + - `options`: A keyword list specifying options, including whether to use a replica database. + + ## Returns + - The count of indexed batches. + """ + @spec batches_count(api?: boolean()) :: non_neg_integer() + def batches_count(options) do + Chain.get_table_rows_total_count(L1Batch, options) + end + + @doc """ + Retrieves a specific batch by its number or fetches the latest batch if `:latest` is specified. + + ## Parameters + - `number`: Can be either the specific batch number or `:latest` to retrieve + the most recent batch in the database. + - `options`: A keyword list specifying the necessity for joining associations + and whether to use a replica database. + + ## Returns + - `{:ok, Explorer.Chain.Arbitrum.L1Batch}` if the batch is found. + - `{:error, :not_found}` if no batch with the specified number exists. + """ + def batch(number, options) + + @spec batch(:latest, api?: boolean()) :: {:error, :not_found} | {:ok, L1Batch} + def batch(:latest, options) do + L1Batch + |> order_by(desc: :number) + |> limit(1) + |> select_repo(options).one() + |> case do + nil -> {:error, :not_found} + batch -> {:ok, batch} + end + end + + @spec batch(binary() | non_neg_integer(), + necessity_by_association: %{atom() => :optional | :required}, + api?: boolean() + ) :: {:error, :not_found} | {:ok, L1Batch} + def batch(number, options) do + necessity_by_association = Keyword.get(options, :necessity_by_association, %{}) + + L1Batch + |> where(number: ^number) + |> Chain.join_associations(necessity_by_association) + |> select_repo(options).one() + |> case do + nil -> {:error, :not_found} + batch -> {:ok, batch} + end + end + + @doc """ + Retrieves a list of batches from the database. + + This function constructs and executes a query to retrieve batches based on provided + pagination options. These options dictate not only the number of items to retrieve + but also how many items to skip from the top. If the `committed?` option is set to true, + it returns the ten most recent committed batches; otherwise, it fetches batches as + dictated by other pagination parameters. + + ## Parameters + - `options`: A keyword list of options specifying pagination, necessity for joining associations, + and whether to use a replica database. + + ## Returns + - A list of `Explorer.Chain.Arbitrum.L1Batch` entries, filtered and ordered according to the provided options. + """ + @spec batches( + necessity_by_association: %{atom() => :optional | :required}, + committed?: boolean(), + paging_options: PagingOptions.t(), + api?: boolean() + ) :: [L1Batch] + def batches(options) do + necessity_by_association = Keyword.get(options, :necessity_by_association, %{}) + + base_query = + from(batch in L1Batch, + order_by: [desc: batch.number] + ) + + query = + if Keyword.get(options, :committed?, false) do + base_query + |> Chain.join_associations(necessity_by_association) + |> where([batch], not is_nil(batch.commitment_id) and batch.commitment_id > 0) + |> limit(10) + else + paging_options = Keyword.get(options, :paging_options, Chain.default_paging_options()) + + base_query + |> Chain.join_associations(necessity_by_association) + |> page_batches(paging_options) + |> limit(^paging_options.page_size) + end + + select_repo(options).all(query) + end + + defp page_batches(query, %PagingOptions{key: nil}), do: query + + defp page_batches(query, %PagingOptions{key: {number}}) do + from(batch in query, where: batch.number < ^number) + end + + @doc """ + Retrieves a list of rollup transactions included in a specific batch. + + ## Parameters + - `batch_number`: The batch number whose transactions were included in L1. + - `options`: A keyword list specifying options, including whether to use a replica database. + + ## Returns + - A list of `Explorer.Chain.Arbitrum.BatchTransaction` entries belonging to the specified batch. + """ + @spec batch_transactions(non_neg_integer() | binary(), api?: boolean()) :: [BatchTransaction] + def batch_transactions(batch_number, options) do + query = from(tx in BatchTransaction, where: tx.batch_number == ^batch_number) + + select_repo(options).all(query) + end + + @doc """ + Retrieves a list of rollup blocks included in a specific batch. + + This function constructs and executes a database query to retrieve a list of rollup blocks, + considering pagination options specified in the `options` parameter. These options dictate + the number of items to retrieve and how many items to skip from the top. + + ## Parameters + - `batch_number`: The batch number whose transactions are included on L1. + - `options`: A keyword list of options specifying pagination, association necessity, and + whether to use a replica database. + + ## Returns + - A list of `Explorer.Chain.Block` entries belonging to the specified batch. + """ + @spec batch_blocks(non_neg_integer() | binary(), + necessity_by_association: %{atom() => :optional | :required}, + api?: boolean(), + paging_options: PagingOptions.t() + ) :: [FullBlock] + def batch_blocks(batch_number, options) do + necessity_by_association = Keyword.get(options, :necessity_by_association, %{}) + paging_options = Keyword.get(options, :paging_options, Chain.default_paging_options()) + + query = + from( + fb in FullBlock, + inner_join: rb in BatchBlock, + on: fb.number == rb.block_number, + select: fb, + where: fb.consensus == true and rb.batch_number == ^batch_number + ) + + query + |> FullBlock.block_type_filter("Block") + |> page_blocks(paging_options) + |> limit(^paging_options.page_size) + |> order_by(desc: :number) + |> Chain.join_associations(necessity_by_association) + |> select_repo(options).all() + end + + defp page_blocks(query, %PagingOptions{key: nil}), do: query + + defp page_blocks(query, %PagingOptions{key: {block_number}}) do + where(query, [block], block.number < ^block_number) + end +end diff --git a/apps/explorer/lib/explorer/chain/block.ex b/apps/explorer/lib/explorer/chain/block.ex index a659789fe153..a8f982d1b342 100644 --- a/apps/explorer/lib/explorer/chain/block.ex +++ b/apps/explorer/lib/explorer/chain/block.ex @@ -1,7 +1,13 @@ defmodule Explorer.Chain.Block.Schema do - @moduledoc false + @moduledoc """ + Models blocks. + + Changes in the schema should be reflected in the bulk import module: + - Explorer.Chain.Import.Runner.Blocks + """ alias Explorer.Chain.{Address, Block, Hash, PendingBlockOperation, Transaction, Wei, Withdrawal} + alias Explorer.Chain.Arbitrum.BatchBlock, as: ArbitrumBatchBlock alias Explorer.Chain.Block.{Reward, SecondDegreeRelation} alias Explorer.Chain.ZkSync.BatchBlock, as: ZkSyncBatchBlock @@ -39,6 +45,31 @@ defmodule Explorer.Chain.Block.Schema do 2 ) + :arbitrum -> + elem( + quote do + field(:send_count, :integer) + field(:send_root, Hash.Full) + field(:l1_block_number, :integer) + + has_one(:arbitrum_batch_block, ArbitrumBatchBlock, + foreign_key: :block_number, + references: :number + ) + + has_one(:arbitrum_batch, through: [:arbitrum_batch_block, :batch]) + + has_one(:arbitrum_commitment_transaction, + through: [:arbitrum_batch, :commitment_transaction] + ) + + has_one(:arbitrum_confirmation_transaction, + through: [:arbitrum_batch_block, :confirmation_transaction] + ) + end, + 2 + ) + _ -> [] end) @@ -105,18 +136,20 @@ defmodule Explorer.Chain.Block do alias Explorer.Utility.MissingRangesManipulator @optional_attrs ~w(size refetch_needed total_difficulty difficulty base_fee_per_gas)a - |> (&(case Application.compile_env(:explorer, :chain_type) do - :rsk -> - &1 ++ - ~w(minimum_gas_price bitcoin_merged_mining_header bitcoin_merged_mining_coinbase_transaction bitcoin_merged_mining_merkle_proof hash_for_merged_mining)a - :ethereum -> - &1 ++ - ~w(blob_gas_used excess_blob_gas)a + @chain_type_optional_attrs (case Application.compile_env(:explorer, :chain_type) do + :rsk -> + ~w(minimum_gas_price bitcoin_merged_mining_header bitcoin_merged_mining_coinbase_transaction bitcoin_merged_mining_merkle_proof hash_for_merged_mining)a + + :ethereum -> + ~w(blob_gas_used excess_blob_gas)a + + :arbitrum -> + ~w(send_count send_root l1_block_number)a - _ -> - &1 - end)).() + _ -> + ~w()a + end) @required_attrs ~w(consensus gas_limit gas_used hash miner_hash nonce number parent_hash timestamp)a @@ -173,7 +206,7 @@ defmodule Explorer.Chain.Block do def changeset(%__MODULE__{} = block, attrs) do block - |> cast(attrs, @required_attrs ++ @optional_attrs) + |> cast(attrs, @required_attrs ++ @optional_attrs ++ @chain_type_optional_attrs) |> validate_required(@required_attrs) |> foreign_key_constraint(:parent_hash) |> unique_constraint(:hash, name: :blocks_pkey) @@ -181,7 +214,7 @@ defmodule Explorer.Chain.Block do def number_only_changeset(%__MODULE__{} = block, attrs) do block - |> cast(attrs, @required_attrs ++ @optional_attrs) + |> cast(attrs, @required_attrs ++ @optional_attrs ++ @chain_type_optional_attrs) |> validate_required([:number]) |> foreign_key_constraint(:parent_hash) |> unique_constraint(:hash, name: :blocks_pkey) diff --git a/apps/explorer/lib/explorer/chain/cache/helper.ex b/apps/explorer/lib/explorer/chain/cache/helper.ex index 4e9bd92bd05f..f82925fe10f1 100644 --- a/apps/explorer/lib/explorer/chain/cache/helper.ex +++ b/apps/explorer/lib/explorer/chain/cache/helper.ex @@ -4,6 +4,22 @@ defmodule Explorer.Chain.Cache.Helper do """ alias Explorer.Chain + @doc """ + Estimates the row count of a given table using PostgreSQL system catalogs. + + This function executes a query to estimate the number of rows in the specified + table based on the table's reltuples and relpages values from the pg_class catalog. + It provides a fast estimation rather than an exact count. + + ## Parameters + - `table_name`: The name of the table to estimate the row count for. + - `options`: An optional keyword list of options, such as selecting a specific repository. + + ## Returns + - An estimated count of rows in the specified table or `nil` if the estimation is not available. + """ + @spec estimated_count_from(binary(), keyword()) :: non_neg_integer() | nil + @spec estimated_count_from(binary()) :: non_neg_integer() | nil def estimated_count_from(table_name, options \\ []) do %Postgrex.Result{rows: [[count]]} = Chain.select_repo(options).query!( diff --git a/apps/explorer/lib/explorer/chain/import/runner/arbitrum/batch_blocks.ex b/apps/explorer/lib/explorer/chain/import/runner/arbitrum/batch_blocks.ex new file mode 100644 index 000000000000..2b97c22e75f1 --- /dev/null +++ b/apps/explorer/lib/explorer/chain/import/runner/arbitrum/batch_blocks.ex @@ -0,0 +1,104 @@ +defmodule Explorer.Chain.Import.Runner.Arbitrum.BatchBlocks do + @moduledoc """ + Bulk imports of Explorer.Chain.Arbitrum.BatchBlock. + """ + + require Ecto.Query + + alias Ecto.{Changeset, Multi, Repo} + alias Explorer.Chain.Arbitrum.BatchBlock + alias Explorer.Chain.Import + alias Explorer.Prometheus.Instrumenter + + import Ecto.Query, only: [from: 2] + + @behaviour Import.Runner + + # milliseconds + @timeout 60_000 + + @type imported :: [BatchBlock.t()] + + @impl Import.Runner + def ecto_schema_module, do: BatchBlock + + @impl Import.Runner + def option_key, do: :arbitrum_batch_blocks + + @impl Import.Runner + @spec imported_table_row() :: %{:value_description => binary(), :value_type => binary()} + def imported_table_row do + %{ + value_type: "[#{ecto_schema_module()}.t()]", + value_description: "List of `t:#{ecto_schema_module()}.t/0`s" + } + end + + @impl Import.Runner + @spec run(Multi.t(), list(), map()) :: Multi.t() + def run(multi, changes_list, %{timestamps: timestamps} = options) do + insert_options = + options + |> Map.get(option_key(), %{}) + |> Map.take(~w(on_conflict timeout)a) + |> Map.put_new(:timeout, @timeout) + |> Map.put(:timestamps, timestamps) + + Multi.run(multi, :insert_arbitrum_batch_blocks, fn repo, _ -> + Instrumenter.block_import_stage_runner( + fn -> insert(repo, changes_list, insert_options) end, + :block_referencing, + :arbitrum_batch_blocks, + :arbitrum_batch_blocks + ) + end) + end + + @impl Import.Runner + def timeout, do: @timeout + + @spec insert(Repo.t(), [map()], %{required(:timeout) => timeout(), required(:timestamps) => Import.timestamps()}) :: + {:ok, [BatchBlock.t()]} + | {:error, [Changeset.t()]} + def insert(repo, changes_list, %{timeout: timeout, timestamps: timestamps} = options) when is_list(changes_list) do + on_conflict = Map.get_lazy(options, :on_conflict, &default_on_conflict/0) + + # Enforce Arbitrum.BatchBlock ShareLocks order (see docs: sharelock.md) + ordered_changes_list = Enum.sort_by(changes_list, & &1.block_number) + + {:ok, inserted} = + Import.insert_changes_list( + repo, + ordered_changes_list, + for: BatchBlock, + returning: true, + timeout: timeout, + timestamps: timestamps, + conflict_target: :block_number, + on_conflict: on_conflict + ) + + {:ok, inserted} + end + + defp default_on_conflict do + from( + tb in BatchBlock, + update: [ + set: [ + # don't update `block_number` as it is a primary key and used for the conflict target + batch_number: fragment("EXCLUDED.batch_number"), + confirmation_id: fragment("EXCLUDED.confirmation_id"), + inserted_at: fragment("LEAST(?, EXCLUDED.inserted_at)", tb.inserted_at), + updated_at: fragment("GREATEST(?, EXCLUDED.updated_at)", tb.updated_at) + ] + ], + where: + fragment( + "(EXCLUDED.batch_number, EXCLUDED.confirmation_id) IS DISTINCT FROM (?, ?)", + tb.batch_number, + tb.confirmation_id + ) + ) + end +end diff --git a/apps/explorer/lib/explorer/chain/import/runner/arbitrum/batch_transactions.ex b/apps/explorer/lib/explorer/chain/import/runner/arbitrum/batch_transactions.ex new file mode 100644 index 000000000000..f4cda6473b6b --- /dev/null +++ b/apps/explorer/lib/explorer/chain/import/runner/arbitrum/batch_transactions.ex @@ -0,0 +1,79 @@ +defmodule Explorer.Chain.Import.Runner.Arbitrum.BatchTransactions do + @moduledoc """ + Bulk imports of Explorer.Chain.Arbitrum.BatchTransaction. + """ + + require Ecto.Query + + alias Ecto.{Changeset, Multi, Repo} + alias Explorer.Chain.Arbitrum.BatchTransaction + alias Explorer.Chain.Import + alias Explorer.Prometheus.Instrumenter + + @behaviour Import.Runner + + # milliseconds + @timeout 60_000 + + @type imported :: [BatchTransaction.t()] + + @impl Import.Runner + def ecto_schema_module, do: BatchTransaction + + @impl Import.Runner + def option_key, do: :arbitrum_batch_transactions + + @impl Import.Runner + @spec imported_table_row() :: %{:value_description => binary(), :value_type => binary()} + def imported_table_row do + %{ + value_type: "[#{ecto_schema_module()}.t()]", + value_description: "List of `t:#{ecto_schema_module()}.t/0`s" + } + end + + @impl Import.Runner + @spec run(Multi.t(), list(), map()) :: Multi.t() + def run(multi, changes_list, %{timestamps: timestamps} = options) do + insert_options = + options + |> Map.get(option_key(), %{}) + |> Map.take(~w(on_conflict timeout)a) + |> Map.put_new(:timeout, @timeout) + |> Map.put(:timestamps, timestamps) + + Multi.run(multi, :insert_arbitrum_batch_transactions, fn repo, _ -> + Instrumenter.block_import_stage_runner( + fn -> insert(repo, changes_list, insert_options) end, + :block_referencing, + :arbitrum_batch_transactions, + :arbitrum_batch_transactions + ) + end) + end + + @impl Import.Runner + def timeout, do: @timeout + + @spec insert(Repo.t(), [map()], %{required(:timeout) => timeout(), required(:timestamps) => Import.timestamps()}) :: + {:ok, [BatchTransaction.t()]} + | {:error, [Changeset.t()]} + def insert(repo, changes_list, %{timeout: timeout, timestamps: timestamps} = _options) when is_list(changes_list) do + # Enforce Arbitrum.BatchTransaction ShareLocks order (see docs: sharelock.md) + ordered_changes_list = Enum.sort_by(changes_list, & &1.tx_hash) + + {:ok, inserted} = + Import.insert_changes_list( + repo, + ordered_changes_list, + for: BatchTransaction, + returning: true, + timeout: timeout, + timestamps: timestamps, + conflict_target: :tx_hash, + on_conflict: :nothing + ) + + {:ok, inserted} + end +end diff --git a/apps/explorer/lib/explorer/chain/import/runner/arbitrum/l1_batches.ex b/apps/explorer/lib/explorer/chain/import/runner/arbitrum/l1_batches.ex new file mode 100644 index 000000000000..8003f94522f1 --- /dev/null +++ b/apps/explorer/lib/explorer/chain/import/runner/arbitrum/l1_batches.ex @@ -0,0 +1,112 @@ +defmodule Explorer.Chain.Import.Runner.Arbitrum.L1Batches do + @moduledoc """ + Bulk imports of Explorer.Chain.Arbitrum.L1Batch. + """ + + require Ecto.Query + + alias Ecto.{Changeset, Multi, Repo} + alias Explorer.Chain.Arbitrum.L1Batch + alias Explorer.Chain.Import + alias Explorer.Prometheus.Instrumenter + + import Ecto.Query, only: [from: 2] + + @behaviour Import.Runner + + # milliseconds + @timeout 60_000 + + @type imported :: [L1Batch.t()] + + @impl Import.Runner + def ecto_schema_module, do: L1Batch + + @impl Import.Runner + def option_key, do: :arbitrum_l1_batches + + @impl Import.Runner + @spec imported_table_row() :: %{:value_description => binary(), :value_type => binary()} + def imported_table_row do + %{ + value_type: "[#{ecto_schema_module()}.t()]", + value_description: "List of `t:#{ecto_schema_module()}.t/0`s" + } + end + + @impl Import.Runner + @spec run(Multi.t(), list(), map()) :: Multi.t() + def run(multi, changes_list, %{timestamps: timestamps} = options) do + insert_options = + options + |> Map.get(option_key(), %{}) + |> Map.take(~w(on_conflict timeout)a) + |> Map.put_new(:timeout, @timeout) + |> Map.put(:timestamps, timestamps) + + Multi.run(multi, :insert_arbitrum_l1_batches, fn repo, _ -> + Instrumenter.block_import_stage_runner( + fn -> insert(repo, changes_list, insert_options) end, + :block_referencing, + :arbitrum_l1_batches, + :arbitrum_l1_batches + ) + end) + end + + @impl Import.Runner + def timeout, do: @timeout + + @spec insert(Repo.t(), [map()], %{required(:timeout) => timeout(), required(:timestamps) => Import.timestamps()}) :: + {:ok, [L1Batch.t()]} + | {:error, [Changeset.t()]} + def insert(repo, changes_list, %{timeout: timeout, timestamps: timestamps} = options) when is_list(changes_list) do + on_conflict = Map.get_lazy(options, :on_conflict, &default_on_conflict/0) + + # Enforce Arbitrum.L1Batch ShareLocks order (see docs: sharelock.md) + ordered_changes_list = Enum.sort_by(changes_list, & &1.number) + + {:ok, inserted} = + Import.insert_changes_list( + repo, + ordered_changes_list, + for: L1Batch, + returning: true, + timeout: timeout, + timestamps: timestamps, + conflict_target: :number, + on_conflict: on_conflict + ) + + {:ok, inserted} + end + + defp default_on_conflict do + from( + tb in L1Batch, + update: [ + set: [ + # don't update `number` as it is a primary key and used for the conflict target + transactions_count: fragment("EXCLUDED.transactions_count"), + start_block: fragment("EXCLUDED.start_block"), + end_block: fragment("EXCLUDED.end_block"), + before_acc: fragment("EXCLUDED.before_acc"), + after_acc: fragment("EXCLUDED.after_acc"), + commitment_id: fragment("EXCLUDED.commitment_id"), + inserted_at: fragment("LEAST(?, EXCLUDED.inserted_at)", tb.inserted_at), + updated_at: fragment("GREATEST(?, EXCLUDED.updated_at)", tb.updated_at) + ] + ], + where: + fragment( + "(EXCLUDED.transactions_count, EXCLUDED.start_block, EXCLUDED.end_block, EXCLUDED.before_acc, EXCLUDED.after_acc, EXCLUDED.commitment_id) IS DISTINCT FROM (?, ?, ?, ?, ?, ?)", + tb.transactions_count, + tb.start_block, + tb.end_block, + tb.before_acc, + tb.after_acc, + tb.commitment_id + ) + ) + end +end diff --git a/apps/explorer/lib/explorer/chain/import/runner/arbitrum/l1_executions.ex b/apps/explorer/lib/explorer/chain/import/runner/arbitrum/l1_executions.ex new file mode 100644 index 000000000000..e597ba55f0e2 --- /dev/null +++ b/apps/explorer/lib/explorer/chain/import/runner/arbitrum/l1_executions.ex @@ -0,0 +1,102 @@ +defmodule Explorer.Chain.Import.Runner.Arbitrum.L1Executions do + @moduledoc """ + Bulk imports of Explorer.Chain.Arbitrum.L1Execution. + """ + + require Ecto.Query + + alias Ecto.{Changeset, Multi, Repo} + alias Explorer.Chain.Arbitrum.L1Execution + alias Explorer.Chain.Import + alias Explorer.Prometheus.Instrumenter + + import Ecto.Query, only: [from: 2] + + @behaviour Import.Runner + + # milliseconds + @timeout 60_000 + + @type imported :: [L1Execution.t()] + + @impl Import.Runner + def ecto_schema_module, do: L1Execution + + @impl Import.Runner + def option_key, do: :arbitrum_l1_executions + + @impl Import.Runner + @spec imported_table_row() :: %{:value_description => binary(), :value_type => binary()} + def imported_table_row do + %{ + value_type: "[#{ecto_schema_module()}.t()]", + value_description: "List of `t:#{ecto_schema_module()}.t/0`s" + } + end + + @impl Import.Runner + @spec run(Multi.t(), list(), map()) :: Multi.t() + def run(multi, changes_list, %{timestamps: timestamps} = options) do + insert_options = + options + |> Map.get(option_key(), %{}) + |> Map.take(~w(on_conflict timeout)a) + |> Map.put_new(:timeout, @timeout) + |> Map.put(:timestamps, timestamps) + + Multi.run(multi, :insert_arbitrum_l1_executions, fn repo, _ -> + Instrumenter.block_import_stage_runner( + fn -> insert(repo, changes_list, insert_options) end, + :block_referencing, + :arbitrum_l1_executions, + :arbitrum_l1_executions + ) + end) + end + + @impl Import.Runner + def timeout, do: @timeout + + @spec insert(Repo.t(), [map()], %{required(:timeout) => timeout(), required(:timestamps) => Import.timestamps()}) :: + {:ok, [L1Execution.t()]} + | {:error, [Changeset.t()]} + def insert(repo, changes_list, %{timeout: timeout, timestamps: timestamps} = options) when is_list(changes_list) do + on_conflict = Map.get_lazy(options, :on_conflict, &default_on_conflict/0) + + # Enforce Arbitrum.L1Execution ShareLocks order (see docs: sharelock.md) + ordered_changes_list = Enum.sort_by(changes_list, & &1.message_id) + + {:ok, inserted} = + Import.insert_changes_list( + repo, + ordered_changes_list, + for: L1Execution, + returning: true, + timeout: timeout, + timestamps: timestamps, + conflict_target: :message_id, + on_conflict: on_conflict + ) + + {:ok, inserted} + end + + defp default_on_conflict do + from( + tb in L1Execution, + update: [ + set: [ + # don't update `message_id` as it is a primary key and used for the conflict target + execution_id: fragment("EXCLUDED.execution_id"), + inserted_at: fragment("LEAST(?, EXCLUDED.inserted_at)", tb.inserted_at), + updated_at: fragment("GREATEST(?, EXCLUDED.updated_at)", tb.updated_at) + ] + ], + where: + fragment( + "(EXCLUDED.execution_id) IS DISTINCT FROM (?)", + tb.execution_id + ) + ) + end +end diff --git a/apps/explorer/lib/explorer/chain/import/runner/arbitrum/lifecycle_transactions.ex b/apps/explorer/lib/explorer/chain/import/runner/arbitrum/lifecycle_transactions.ex new file mode 100644 index 000000000000..f5a2c07a3249 --- /dev/null +++ b/apps/explorer/lib/explorer/chain/import/runner/arbitrum/lifecycle_transactions.ex @@ -0,0 +1,107 @@ +defmodule Explorer.Chain.Import.Runner.Arbitrum.LifecycleTransactions do + @moduledoc """ + Bulk imports of Explorer.Chain.Arbitrum.LifecycleTransaction. + """ + + require Ecto.Query + + alias Ecto.{Changeset, Multi, Repo} + alias Explorer.Chain.Arbitrum.LifecycleTransaction + alias Explorer.Chain.Import + alias Explorer.Prometheus.Instrumenter + + import Ecto.Query, only: [from: 2] + + @behaviour Import.Runner + + # milliseconds + @timeout 60_000 + + @type imported :: [LifecycleTransaction.t()] + + @impl Import.Runner + def ecto_schema_module, do: LifecycleTransaction + + @impl Import.Runner + def option_key, do: :arbitrum_lifecycle_transactions + + @impl Import.Runner + @spec imported_table_row() :: %{:value_description => binary(), :value_type => binary()} + def imported_table_row do + %{ + value_type: "[#{ecto_schema_module()}.t()]", + value_description: "List of `t:#{ecto_schema_module()}.t/0`s" + } + end + + @impl Import.Runner + @spec run(Multi.t(), list(), map()) :: Multi.t() + def run(multi, changes_list, %{timestamps: timestamps} = options) do + insert_options = + options + |> Map.get(option_key(), %{}) + |> Map.take(~w(on_conflict timeout)a) + |> Map.put_new(:timeout, @timeout) + |> Map.put(:timestamps, timestamps) + + Multi.run(multi, :insert_arbitrum_lifecycle_transactions, fn repo, _ -> + Instrumenter.block_import_stage_runner( + fn -> insert(repo, changes_list, insert_options) end, + :block_referencing, + :arbitrum_lifecycle_transactions, + :arbitrum_lifecycle_transactions + ) + end) + end + + @impl Import.Runner + def timeout, do: @timeout + + @spec insert(Repo.t(), [map()], %{required(:timeout) => timeout(), required(:timestamps) => Import.timestamps()}) :: + {:ok, [LifecycleTransaction.t()]} + | {:error, [Changeset.t()]} + def insert(repo, changes_list, %{timeout: timeout, timestamps: timestamps} = options) when is_list(changes_list) do + on_conflict = Map.get_lazy(options, :on_conflict, &default_on_conflict/0) + + # Enforce Arbitrum.LifecycleTransaction ShareLocks order (see docs: sharelock.md) + ordered_changes_list = Enum.sort_by(changes_list, & &1.id) + + {:ok, inserted} = + Import.insert_changes_list( + repo, + ordered_changes_list, + for: LifecycleTransaction, + returning: true, + timeout: timeout, + timestamps: timestamps, + conflict_target: :hash, + on_conflict: on_conflict + ) + + {:ok, inserted} + end + + defp default_on_conflict do + from( + tx in LifecycleTransaction, + update: [ + set: [ + # don't update `id` as it is a primary key + # don't update `hash` as it is a unique index and used for the conflict target + timestamp: fragment("EXCLUDED.timestamp"), + block_number: fragment("EXCLUDED.block_number"), + status: fragment("GREATEST(?, EXCLUDED.status)", tx.status), + inserted_at: fragment("LEAST(?, EXCLUDED.inserted_at)", tx.inserted_at), + updated_at: fragment("GREATEST(?, EXCLUDED.updated_at)", tx.updated_at) + ] + ], + where: + fragment( + "(EXCLUDED.timestamp, EXCLUDED.block_number, EXCLUDED.status) IS DISTINCT FROM (?, ?, ?)", + tx.timestamp, + tx.block_number, + tx.status + ) + ) + end +end diff --git a/apps/explorer/lib/explorer/chain/import/runner/arbitrum/messages.ex b/apps/explorer/lib/explorer/chain/import/runner/arbitrum/messages.ex new file mode 100644 index 000000000000..9aef34064ffa --- /dev/null +++ b/apps/explorer/lib/explorer/chain/import/runner/arbitrum/messages.ex @@ -0,0 +1,117 @@ +defmodule Explorer.Chain.Import.Runner.Arbitrum.Messages do + @moduledoc """ + Bulk imports of Explorer.Chain.Arbitrum.Message. + """ + + require Ecto.Query + + import Ecto.Query, only: [from: 2] + + alias Ecto.{Changeset, Multi, Repo} + alias Explorer.Chain.Arbitrum.Message, as: CrosslevelMessage + alias Explorer.Chain.Import + alias Explorer.Prometheus.Instrumenter + + @behaviour Import.Runner + + # milliseconds + @timeout 60_000 + + @type imported :: [CrosslevelMessage.t()] + + @impl Import.Runner + def ecto_schema_module, do: CrosslevelMessage + + @impl Import.Runner + def option_key, do: :arbitrum_messages + + @impl Import.Runner + def imported_table_row do + %{ + value_type: "[#{ecto_schema_module()}.t()]", + value_description: "List of `t:#{ecto_schema_module()}.t/0`s" + } + end + + @impl Import.Runner + def run(multi, changes_list, %{timestamps: timestamps} = options) do + insert_options = + options + |> Map.get(option_key(), %{}) + |> Map.take(~w(on_conflict timeout)a) + |> Map.put_new(:timeout, @timeout) + |> Map.put(:timestamps, timestamps) + + Multi.run(multi, :insert_arbitrum_messages, fn repo, _ -> + Instrumenter.block_import_stage_runner( + fn -> insert(repo, changes_list, insert_options) end, + :block_referencing, + :arbitrum_messages, + :arbitrum_messages + ) + end) + end + + @impl Import.Runner + def timeout, do: @timeout + + @spec insert(Repo.t(), [map()], %{required(:timeout) => timeout(), required(:timestamps) => Import.timestamps()}) :: + {:ok, [CrosslevelMessage.t()]} + | {:error, [Changeset.t()]} + def insert(repo, changes_list, %{timeout: timeout, timestamps: timestamps} = options) when is_list(changes_list) do + on_conflict = Map.get_lazy(options, :on_conflict, &default_on_conflict/0) + + # Enforce Message ShareLocks order (see docs: sharelock.md) + ordered_changes_list = Enum.sort_by(changes_list, &{&1.direction, &1.message_id}) + + {:ok, inserted} = + Import.insert_changes_list( + repo, + ordered_changes_list, + conflict_target: [:direction, :message_id], + on_conflict: on_conflict, + for: CrosslevelMessage, + returning: true, + timeout: timeout, + timestamps: timestamps + ) + + {:ok, inserted} + end + + defp default_on_conflict do + from( + op in CrosslevelMessage, + update: [ + set: [ + # Don't update `direction` as it is part of the composite primary key and used for the conflict target + # Don't update `message_id` as it is part of the composite primary key and used for the conflict target + originator_address: fragment("COALESCE(EXCLUDED.originator_address, ?)", op.originator_address), + originating_transaction_hash: + fragment("COALESCE(EXCLUDED.originating_transaction_hash, ?)", op.originating_transaction_hash), + origination_timestamp: fragment("COALESCE(EXCLUDED.origination_timestamp, ?)", op.origination_timestamp), + originating_transaction_block_number: + fragment( + "COALESCE(EXCLUDED.originating_transaction_block_number, ?)", + op.originating_transaction_block_number + ), + completion_transaction_hash: + fragment("COALESCE(EXCLUDED.completion_transaction_hash, ?)", op.completion_transaction_hash), + status: fragment("GREATEST(?, EXCLUDED.status)", op.status), + inserted_at: fragment("LEAST(?, EXCLUDED.inserted_at)", op.inserted_at), + updated_at: fragment("GREATEST(?, EXCLUDED.updated_at)", op.updated_at) + ] + ], + where: + fragment( + "(EXCLUDED.originator_address, EXCLUDED.originating_transaction_hash, EXCLUDED.origination_timestamp, EXCLUDED.originating_transaction_block_number, EXCLUDED.completion_transaction_hash, EXCLUDED.status) IS DISTINCT FROM (?, ?, ?, ?, ?, ?)", + op.originator_address, + op.originating_transaction_hash, + op.origination_timestamp, + op.originating_transaction_block_number, + op.completion_transaction_hash, + op.status + ) + ) + end +end diff --git a/apps/explorer/lib/explorer/chain/import/runner/transactions.ex b/apps/explorer/lib/explorer/chain/import/runner/transactions.ex index 121ad51e590f..1b1772afd9eb 100644 --- a/apps/explorer/lib/explorer/chain/import/runner/transactions.ex +++ b/apps/explorer/lib/explorer/chain/import/runner/transactions.ex @@ -107,9 +107,9 @@ defmodule Explorer.Chain.Import.Runner.Transactions do ) end - defp default_on_conflict do - case Application.get_env(:explorer, :chain_type) do - :suave -> + case Application.compile_env(:explorer, :chain_type) do + :suave -> + defp default_on_conflict do from( transaction in Transaction, update: [ @@ -204,8 +204,10 @@ defmodule Explorer.Chain.Import.Runner.Transactions do transaction.wrapped_hash ) ) + end - :optimism -> + :optimism -> + defp default_on_conflict do from( transaction in Transaction, update: [ @@ -284,8 +286,82 @@ defmodule Explorer.Chain.Import.Runner.Transactions do transaction.l1_block_number ) ) + end - _ -> + :arbitrum -> + defp default_on_conflict do + from( + transaction in Transaction, + update: [ + set: [ + block_hash: fragment("EXCLUDED.block_hash"), + old_block_hash: transaction.block_hash, + block_number: fragment("EXCLUDED.block_number"), + block_consensus: fragment("EXCLUDED.block_consensus"), + block_timestamp: fragment("EXCLUDED.block_timestamp"), + created_contract_address_hash: fragment("EXCLUDED.created_contract_address_hash"), + created_contract_code_indexed_at: fragment("EXCLUDED.created_contract_code_indexed_at"), + cumulative_gas_used: fragment("EXCLUDED.cumulative_gas_used"), + error: fragment("EXCLUDED.error"), + from_address_hash: fragment("EXCLUDED.from_address_hash"), + gas: fragment("EXCLUDED.gas"), + gas_price: fragment("EXCLUDED.gas_price"), + gas_used: fragment("EXCLUDED.gas_used"), + index: fragment("EXCLUDED.index"), + input: fragment("EXCLUDED.input"), + nonce: fragment("EXCLUDED.nonce"), + r: fragment("EXCLUDED.r"), + s: fragment("EXCLUDED.s"), + status: fragment("EXCLUDED.status"), + to_address_hash: fragment("EXCLUDED.to_address_hash"), + v: fragment("EXCLUDED.v"), + value: fragment("EXCLUDED.value"), + earliest_processing_start: fragment("EXCLUDED.earliest_processing_start"), + revert_reason: fragment("EXCLUDED.revert_reason"), + max_priority_fee_per_gas: fragment("EXCLUDED.max_priority_fee_per_gas"), + max_fee_per_gas: fragment("EXCLUDED.max_fee_per_gas"), + type: fragment("EXCLUDED.type"), + gas_used_for_l1: fragment("EXCLUDED.gas_used_for_l1"), + # Don't update `hash` as it is part of the primary key and used for the conflict target + inserted_at: fragment("LEAST(?, EXCLUDED.inserted_at)", transaction.inserted_at), + updated_at: fragment("GREATEST(?, EXCLUDED.updated_at)", transaction.updated_at) + ] + ], + where: + fragment( + "(EXCLUDED.block_hash, EXCLUDED.block_number, EXCLUDED.block_consensus, EXCLUDED.block_timestamp, EXCLUDED.created_contract_address_hash, EXCLUDED.created_contract_code_indexed_at, EXCLUDED.cumulative_gas_used, EXCLUDED.from_address_hash, EXCLUDED.gas, EXCLUDED.gas_price, EXCLUDED.gas_used, EXCLUDED.index, EXCLUDED.input, EXCLUDED.nonce, EXCLUDED.r, EXCLUDED.s, EXCLUDED.status, EXCLUDED.to_address_hash, EXCLUDED.v, EXCLUDED.value, EXCLUDED.earliest_processing_start, EXCLUDED.revert_reason, EXCLUDED.max_priority_fee_per_gas, EXCLUDED.max_fee_per_gas, EXCLUDED.type, EXCLUDED.gas_used_for_l1) IS DISTINCT FROM (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", + transaction.block_hash, + transaction.block_number, + transaction.block_consensus, + transaction.block_timestamp, + transaction.created_contract_address_hash, + transaction.created_contract_code_indexed_at, + transaction.cumulative_gas_used, + transaction.from_address_hash, + transaction.gas, + transaction.gas_price, + transaction.gas_used, + transaction.index, + transaction.input, + transaction.nonce, + transaction.r, + transaction.s, + transaction.status, + transaction.to_address_hash, + transaction.v, + transaction.value, + transaction.earliest_processing_start, + transaction.revert_reason, + transaction.max_priority_fee_per_gas, + transaction.max_fee_per_gas, + transaction.type, + transaction.gas_used_for_l1 + ) + ) + end + + _ -> + defp default_on_conflict do from( transaction in Transaction, update: [ @@ -352,7 +428,7 @@ defmodule Explorer.Chain.Import.Runner.Transactions do transaction.type ) ) - end + end end defp discard_blocks_for_recollated_transactions(repo, changes_list, %{ diff --git a/apps/explorer/lib/explorer/chain/import/stage/block_referencing.ex b/apps/explorer/lib/explorer/chain/import/stage/block_referencing.ex index dd0cf9e1899b..7ee67fe8565f 100644 --- a/apps/explorer/lib/explorer/chain/import/stage/block_referencing.ex +++ b/apps/explorer/lib/explorer/chain/import/stage/block_referencing.ex @@ -59,6 +59,15 @@ defmodule Explorer.Chain.Import.Stage.BlockReferencing do Runner.Beacon.BlobTransactions ] + @arbitrum_runners [ + Runner.Arbitrum.Messages, + Runner.Arbitrum.LifecycleTransactions, + Runner.Arbitrum.L1Executions, + Runner.Arbitrum.L1Batches, + Runner.Arbitrum.BatchBlocks, + Runner.Arbitrum.BatchTransactions + ] + @impl Stage def runners do case Application.get_env(:explorer, :chain_type) do @@ -80,6 +89,9 @@ defmodule Explorer.Chain.Import.Stage.BlockReferencing do :zksync -> @default_runners ++ @zksync_runners + :arbitrum -> + @default_runners ++ @arbitrum_runners + _ -> @default_runners end @@ -88,7 +100,9 @@ defmodule Explorer.Chain.Import.Stage.BlockReferencing do @impl Stage def all_runners do @default_runners ++ - @optimism_runners ++ @polygon_edge_runners ++ @polygon_zkevm_runners ++ @shibarium_runners ++ @zksync_runners + @ethereum_runners ++ + @optimism_runners ++ + @polygon_edge_runners ++ @polygon_zkevm_runners ++ @shibarium_runners ++ @zksync_runners ++ @arbitrum_runners end @impl Stage diff --git a/apps/explorer/lib/explorer/chain/transaction.ex b/apps/explorer/lib/explorer/chain/transaction.ex index 64eb8d07801c..d03c39db60c2 100644 --- a/apps/explorer/lib/explorer/chain/transaction.ex +++ b/apps/explorer/lib/explorer/chain/transaction.ex @@ -1,5 +1,10 @@ defmodule Explorer.Chain.Transaction.Schema do - @moduledoc false + @moduledoc """ + Models transactions. + + Changes in the schema should be reflected in the bulk import module: + - Explorer.Chain.Import.Runner.Transactions + """ alias Explorer.Chain.{ Address, @@ -14,6 +19,9 @@ defmodule Explorer.Chain.Transaction.Schema do Wei } + alias Explorer.Chain.Arbitrum.BatchBlock, as: ArbitrumBatchBlock + alias Explorer.Chain.Arbitrum.BatchTransaction, as: ArbitrumBatchTransaction + alias Explorer.Chain.Arbitrum.Message, as: ArbitrumMessage alias Explorer.Chain.PolygonZkevm.BatchTransaction, as: ZkevmBatchTransaction alias Explorer.Chain.Transaction.{Fork, Status} alias Explorer.Chain.ZkSync.BatchTransaction, as: ZkSyncBatchTransaction @@ -114,6 +122,44 @@ defmodule Explorer.Chain.Transaction.Schema do 2 ) + :arbitrum -> + elem( + quote do + field(:gas_used_for_l1, :decimal) + + has_one(:arbitrum_batch_transaction, ArbitrumBatchTransaction, + foreign_key: :tx_hash, + references: :hash + ) + + has_one(:arbitrum_batch, through: [:arbitrum_batch_transaction, :batch]) + + has_one(:arbitrum_commitment_transaction, + through: [:arbitrum_batch, :commitment_transaction] + ) + + has_one(:arbitrum_batch_block, ArbitrumBatchBlock, + foreign_key: :block_number, + references: :block_number + ) + + has_one(:arbitrum_confirmation_transaction, + through: [:arbitrum_batch_block, :confirmation_transaction] + ) + + has_one(:arbitrum_message_to_l2, ArbitrumMessage, + foreign_key: :completion_transaction_hash, + references: :hash + ) + + has_one(:arbitrum_message_from_l2, ArbitrumMessage, + foreign_key: :originating_transaction_hash, + references: :hash + ) + end, + 2 + ) + _ -> [] end) @@ -234,16 +280,27 @@ defmodule Explorer.Chain.Transaction do alias Explorer.SmartContract.SigProviderInterface - @optional_attrs ~w(max_priority_fee_per_gas max_fee_per_gas block_hash block_number block_consensus block_timestamp created_contract_address_hash cumulative_gas_used earliest_processing_start - error gas_price gas_used index created_contract_code_indexed_at status + @optional_attrs ~w(max_priority_fee_per_gas max_fee_per_gas block_hash block_number + block_consensus block_timestamp created_contract_address_hash + cumulative_gas_used earliest_processing_start error gas_price + gas_used index created_contract_code_indexed_at status to_address_hash revert_reason type has_error_in_internal_txs r s v)a - @optimism_optional_attrs ~w(l1_fee l1_fee_scalar l1_gas_price l1_gas_used l1_tx_origin l1_block_number)a - @suave_optional_attrs ~w(execution_node_hash wrapped_type wrapped_nonce wrapped_to_address_hash wrapped_gas wrapped_gas_price wrapped_max_priority_fee_per_gas wrapped_max_fee_per_gas wrapped_value wrapped_input wrapped_v wrapped_r wrapped_s wrapped_hash)a + @chain_type_optional_attrs (case Application.compile_env(:explorer, :chain_type) do + :optimism -> + ~w(l1_fee l1_fee_scalar l1_gas_price l1_gas_used l1_tx_origin l1_block_number)a - @required_attrs ~w(from_address_hash gas hash input nonce value)a + :suave -> + ~w(execution_node_hash wrapped_type wrapped_nonce wrapped_to_address_hash wrapped_gas wrapped_gas_price wrapped_max_priority_fee_per_gas wrapped_max_fee_per_gas wrapped_value wrapped_input wrapped_v wrapped_r wrapped_s wrapped_hash)a + + :arbitrum -> + ~w(gas_used_for_l1)a - @empty_attrs ~w()a + _ -> + ~w()a + end) + + @required_attrs ~w(from_address_hash gas hash input nonce value)a @typedoc """ X coordinate module n in @@ -562,7 +619,7 @@ defmodule Explorer.Chain.Transaction do attrs_to_cast = @required_attrs ++ @optional_attrs ++ - custom_optional_attrs() + @chain_type_optional_attrs transaction |> cast(attrs, attrs_to_cast) @@ -577,14 +634,6 @@ defmodule Explorer.Chain.Transaction do |> unique_constraint(:hash) end - defp custom_optional_attrs do - case Application.get_env(:explorer, :chain_type) do - :suave -> @suave_optional_attrs - :optimism -> @optimism_optional_attrs - _ -> @empty_attrs - end - end - @spec block_timestamp(t()) :: DateTime.t() def block_timestamp(%{block_number: nil, inserted_at: time}), do: time def block_timestamp(%{block_timestamp: time}) when not is_nil(time), do: time diff --git a/apps/explorer/lib/explorer/chain_spec/genesis_data.ex b/apps/explorer/lib/explorer/chain_spec/genesis_data.ex index 71be06092857..abab4aeccb44 100644 --- a/apps/explorer/lib/explorer/chain_spec/genesis_data.ex +++ b/apps/explorer/lib/explorer/chain_spec/genesis_data.ex @@ -87,6 +87,7 @@ defmodule Explorer.ChainSpec.GenesisData do def fetch_genesis_data do chain_spec_path = get_path(:chain_spec_path) precompiled_config_path = get_path(:precompiled_config_path) + Logger.info(fn -> "Fetching precompiled config path: #{inspect(precompiled_config_path)}." end) if is_nil(chain_spec_path) and is_nil(precompiled_config_path) do Logger.warn(fn -> "Genesis data is not fetched. Neither chain spec path or precompiles config path are set." end) diff --git a/apps/explorer/lib/explorer/repo.ex b/apps/explorer/lib/explorer/repo.ex index 38b19e86392b..8105224b4f92 100644 --- a/apps/explorer/lib/explorer/repo.ex +++ b/apps/explorer/lib/explorer/repo.ex @@ -217,6 +217,16 @@ defmodule Explorer.Repo do end end + defmodule Arbitrum do + use Ecto.Repo, + otp_app: :explorer, + adapter: Ecto.Adapters.Postgres + + def init(_, opts) do + ConfigHelper.init_repo_module(__MODULE__, opts) + end + end + defmodule BridgedTokens do use Ecto.Repo, otp_app: :explorer, diff --git a/apps/explorer/lib/explorer/utility/missing_block_range.ex b/apps/explorer/lib/explorer/utility/missing_block_range.ex index 54fce3762687..ac8c8a39e5a5 100644 --- a/apps/explorer/lib/explorer/utility/missing_block_range.ex +++ b/apps/explorer/lib/explorer/utility/missing_block_range.ex @@ -4,7 +4,7 @@ defmodule Explorer.Utility.MissingBlockRange do """ use Explorer.Schema - alias Explorer.Chain.BlockNumberHelper + alias Explorer.Chain.{Block, BlockNumberHelper} alias Explorer.Repo @default_returning_batch_size 10 @@ -129,6 +129,38 @@ defmodule Explorer.Utility.MissingBlockRange do |> Enum.map(&save_range/1) end + @doc """ + Finds the first range in the table where the set, consisting of numbers from `lower_number` to `higher_number`, intersects. + + ## Parameters + - `lower_number`: The lower bound of the range to check. + - `higher_number`: The upper bound of the range to check. + + ## Returns + - Returns `nil` if no intersecting ranges are found, or an `Explorer.Utility.MissingBlockRange` instance of the first intersecting range otherwise. + """ + @spec intersects_with_range(Block.block_number(), Block.block_number()) :: nil | Explorer.Utility.MissingBlockRange + def intersects_with_range(lower_number, higher_number) + when is_integer(lower_number) and lower_number >= 0 and + is_integer(higher_number) and lower_number <= higher_number do + query = + from( + r in __MODULE__, + # Note: from_number is higher than to_number, so in fact the range is to_number..from_number + # The first case: lower_number..to_number..higher_number + # The second case: lower_number..from_number..higher_number + # The third case: to_number..lower_number..higher_number..from_number + where: + (^lower_number <= r.to_number and ^higher_number >= r.to_number) or + (^lower_number <= r.from_number and ^higher_number >= r.from_number) or + (^lower_number >= r.to_number and ^higher_number <= r.from_number), + limit: 1 + ) + + query + |> Repo.one() + end + defp insert_range(params) do params |> changeset() diff --git a/apps/explorer/priv/arbitrum/migrations/20240201125730_create_arbitrum_tables.exs b/apps/explorer/priv/arbitrum/migrations/20240201125730_create_arbitrum_tables.exs new file mode 100644 index 000000000000..3181ad01932b --- /dev/null +++ b/apps/explorer/priv/arbitrum/migrations/20240201125730_create_arbitrum_tables.exs @@ -0,0 +1,124 @@ +defmodule Explorer.Repo.Arbitrum.Migrations.CreateArbitrumTables do + use Ecto.Migration + + def change do + execute( + "CREATE TYPE arbitrum_messages_op_type AS ENUM ('to_l2', 'from_l2')", + "DROP TYPE arbitrum_messages_op_type" + ) + + execute( + "CREATE TYPE arbitrum_messages_status AS ENUM ('initiated', 'sent', 'confirmed', 'relayed')", + "DROP TYPE arbitrum_messages_status" + ) + + execute( + "CREATE TYPE l1_tx_status AS ENUM ('unfinalized', 'finalized')", + "DROP TYPE l1_tx_status" + ) + + create table(:arbitrum_crosslevel_messages, primary_key: false) do + add(:direction, :arbitrum_messages_op_type, null: false, primary_key: true) + add(:message_id, :integer, null: false, primary_key: true) + add(:originator_address, :bytea, null: true) + add(:originating_transaction_hash, :bytea, null: true) + add(:origination_timestamp, :"timestamp without time zone", null: true) + add(:originating_transaction_block_number, :bigint, null: true) + add(:completion_transaction_hash, :bytea, null: true) + add(:status, :arbitrum_messages_status, null: false) + timestamps(null: false, type: :utc_datetime_usec) + end + + create(index(:arbitrum_crosslevel_messages, [:direction, :originating_transaction_block_number, :status])) + create(index(:arbitrum_crosslevel_messages, [:direction, :completion_transaction_hash])) + + create table(:arbitrum_lifecycle_l1_transactions, primary_key: false) do + add(:id, :integer, null: false, primary_key: true) + add(:hash, :bytea, null: false) + add(:block_number, :integer, null: false) + add(:timestamp, :"timestamp without time zone", null: false) + add(:status, :l1_tx_status, null: false) + timestamps(null: false, type: :utc_datetime_usec) + end + + create(unique_index(:arbitrum_lifecycle_l1_transactions, :hash)) + create(index(:arbitrum_lifecycle_l1_transactions, [:block_number, :status])) + + create table(:arbitrum_l1_executions, primary_key: false) do + add(:message_id, :integer, null: false, primary_key: true) + + add( + :execution_id, + references(:arbitrum_lifecycle_l1_transactions, on_delete: :restrict, on_update: :update_all, type: :integer), + null: false + ) + + timestamps(null: false, type: :utc_datetime_usec) + end + + create table(:arbitrum_l1_batches, primary_key: false) do + add(:number, :integer, null: false, primary_key: true) + add(:transactions_count, :integer, null: false) + add(:start_block, :integer, null: false) + add(:end_block, :integer, null: false) + add(:before_acc, :bytea, null: false) + add(:after_acc, :bytea, null: false) + + add( + :commitment_id, + references(:arbitrum_lifecycle_l1_transactions, on_delete: :restrict, on_update: :update_all, type: :integer), + null: false + ) + + timestamps(null: false, type: :utc_datetime_usec) + end + + create table(:arbitrum_batch_l2_blocks, primary_key: false) do + add( + :batch_number, + references(:arbitrum_l1_batches, + column: :number, + on_delete: :delete_all, + on_update: :update_all, + type: :integer + ), + null: false + ) + + add( + :confirmation_id, + references(:arbitrum_lifecycle_l1_transactions, on_delete: :restrict, on_update: :update_all, type: :integer), + null: true + ) + + # Although it is possible to recover the block number from the block hash, + # it is more efficient to store it directly + # There could be no DB inconsistencies with `blocks` table caused be re-orgs + # because the blocks will appear in the table `arbitrum_batch_l2_blocks` + # only when they are included in the batch. + add(:block_number, :integer, null: false, primary_key: true) + timestamps(null: false, type: :utc_datetime_usec) + end + + create(index(:arbitrum_batch_l2_blocks, :batch_number)) + create(index(:arbitrum_batch_l2_blocks, :confirmation_id)) + + create table(:arbitrum_batch_l2_transactions, primary_key: false) do + add( + :batch_number, + references(:arbitrum_l1_batches, + column: :number, + on_delete: :delete_all, + on_update: :update_all, + type: :integer + ), + null: false + ) + + add(:tx_hash, :bytea, null: false, primary_key: true) + timestamps(null: false, type: :utc_datetime_usec) + end + + create(index(:arbitrum_batch_l2_transactions, :batch_number)) + end +end diff --git a/apps/explorer/priv/arbitrum/migrations/20240510184858_extend_transaction_and_block_tables.exs b/apps/explorer/priv/arbitrum/migrations/20240510184858_extend_transaction_and_block_tables.exs new file mode 100644 index 000000000000..3bc802b127e1 --- /dev/null +++ b/apps/explorer/priv/arbitrum/migrations/20240510184858_extend_transaction_and_block_tables.exs @@ -0,0 +1,15 @@ +defmodule Explorer.Repo.Arbitrum.Migrations.ExtendTransactionAndBlockTables do + use Ecto.Migration + + def change do + alter table(:blocks) do + add(:send_count, :integer) + add(:send_root, :bytea) + add(:l1_block_number, :integer) + end + + alter table(:transactions) do + add(:gas_used_for_l1, :numeric, precision: 100) + end + end +end diff --git a/apps/explorer/test/support/factory.ex b/apps/explorer/test/support/factory.ex index c186da66ec7a..659b40f2529a 100644 --- a/apps/explorer/test/support/factory.ex +++ b/apps/explorer/test/support/factory.ex @@ -522,6 +522,21 @@ defmodule Explorer.Factory do timestamp: DateTime.utc_now(), refetch_needed: false } + |> Map.merge(block_factory_chain_type_fields()) + end + + case Application.compile_env(:explorer, :chain_type) do + :arbitrum -> + defp block_factory_chain_type_fields() do + %{ + send_count: Enum.random(1..100_000), + send_root: block_hash(), + l1_block_number: Enum.random(1..100_000) + } + end + + _ -> + defp block_factory_chain_type_fields(), do: %{} end def contract_method_factory() do @@ -871,6 +886,19 @@ defmodule Explorer.Factory do value: Enum.random(1..100_000), block_timestamp: DateTime.utc_now() } + |> Map.merge(transaction_factory_chain_type_fields()) + end + + case Application.compile_env(:explorer, :chain_type) do + :arbitrum -> + defp transaction_factory_chain_type_fields() do + %{ + gas_used_for_l1: Enum.random(1..100_000) + } + end + + _ -> + defp transaction_factory_chain_type_fields(), do: %{} end def transaction_to_verified_contract_factory do diff --git a/apps/indexer/lib/indexer/block/fetcher.ex b/apps/indexer/lib/indexer/block/fetcher.ex index 0c2ff92aa3e6..44fa80684839 100644 --- a/apps/indexer/lib/indexer/block/fetcher.ex +++ b/apps/indexer/lib/indexer/block/fetcher.ex @@ -48,6 +48,7 @@ defmodule Indexer.Block.Fetcher do alias Indexer.Transform.PolygonEdge.{DepositExecutes, Withdrawals} + alias Indexer.Transform.Arbitrum.Messaging, as: ArbitrumMessaging alias Indexer.Transform.Shibarium.Bridge, as: ShibariumBridge alias Indexer.Transform.Blocks, as: TransformBlocks @@ -171,6 +172,7 @@ defmodule Indexer.Block.Fetcher do do: PolygonZkevmBridge.parse(blocks, logs), else: [] ), + arbitrum_xlevel_messages = ArbitrumMessaging.parse(transactions_with_receipts, logs), %FetchedBeneficiaries{params_set: beneficiary_params_set, errors: beneficiaries_errors} = fetch_beneficiaries(blocks, transactions_with_receipts, json_rpc_named_arguments), addresses = @@ -226,7 +228,8 @@ defmodule Indexer.Block.Fetcher do polygon_edge_withdrawals: polygon_edge_withdrawals, polygon_edge_deposit_executes: polygon_edge_deposit_executes, polygon_zkevm_bridge_operations: polygon_zkevm_bridge_operations, - shibarium_bridge_operations: shibarium_bridge_operations + shibarium_bridge_operations: shibarium_bridge_operations, + arbitrum_messages: arbitrum_xlevel_messages }, {:ok, inserted} <- __MODULE__.import( @@ -260,7 +263,8 @@ defmodule Indexer.Block.Fetcher do polygon_edge_withdrawals: polygon_edge_withdrawals, polygon_edge_deposit_executes: polygon_edge_deposit_executes, polygon_zkevm_bridge_operations: polygon_zkevm_bridge_operations, - shibarium_bridge_operations: shibarium_bridge_operations + shibarium_bridge_operations: shibarium_bridge_operations, + arbitrum_messages: arbitrum_xlevel_messages }) do case Application.get_env(:explorer, :chain_type) do :ethereum -> @@ -286,6 +290,10 @@ defmodule Indexer.Block.Fetcher do basic_import_options |> Map.put_new(:shibarium_bridge_operations, %{params: shibarium_bridge_operations}) + :arbitrum -> + basic_import_options + |> Map.put_new(:arbitrum_messages, %{params: arbitrum_xlevel_messages}) + _ -> basic_import_options end diff --git a/apps/indexer/lib/indexer/fetcher/arbitrum/messaging.ex b/apps/indexer/lib/indexer/fetcher/arbitrum/messaging.ex new file mode 100644 index 000000000000..e840f04fee16 --- /dev/null +++ b/apps/indexer/lib/indexer/fetcher/arbitrum/messaging.ex @@ -0,0 +1,295 @@ +defmodule Indexer.Fetcher.Arbitrum.Messaging do + @moduledoc """ + Provides functionality for filtering and handling messaging between Layer 1 (L1) and Layer 2 (L2) in the Arbitrum protocol. + + This module is responsible for identifying and processing messages that are transmitted + between L1 and L2. It includes functions to filter incoming logs and transactions to + find those that represent messages moving between the layers, and to handle the data of + these messages appropriately. + """ + + import EthereumJSONRPC, only: [quantity_to_integer: 1] + + import Explorer.Helper, only: [decode_data: 2] + + import Indexer.Fetcher.Arbitrum.Utils.Logging, only: [log_info: 1, log_debug: 1] + + alias Indexer.Fetcher.Arbitrum.Utils.Db + + require Logger + + @l2_to_l1_event_unindexed_params [ + :address, + {:uint, 256}, + {:uint, 256}, + {:uint, 256}, + {:uint, 256}, + :bytes + ] + + @type arbitrum_message :: %{ + direction: :to_l2 | :from_l2, + message_id: non_neg_integer(), + originator_address: binary(), + originating_transaction_hash: binary(), + origination_timestamp: DateTime.t(), + originating_transaction_block_number: non_neg_integer(), + completion_transaction_hash: binary(), + status: :initiated | :sent | :confirmed | :relayed + } + + @typep min_transaction :: %{ + :hash => binary(), + :type => non_neg_integer(), + optional(:request_id) => non_neg_integer(), + optional(any()) => any() + } + + @typep min_log :: %{ + :data => binary(), + :index => non_neg_integer(), + :first_topic => binary(), + :second_topic => binary(), + :third_topic => binary(), + :fourth_topic => binary(), + :address_hash => binary(), + :transaction_hash => binary(), + :block_hash => binary(), + :block_number => non_neg_integer(), + optional(any()) => any() + } + + @doc """ + Filters a list of rollup transactions to identify L1-to-L2 messages and composes a map for each with the related message information. + + This function filters through a list of rollup transactions, selecting those + with a non-nil `request_id`, indicating they are L1-to-L2 message completions. + These filtered transactions are then processed to construct a detailed message + structure for each. + + ## Parameters + - `transactions`: A list of rollup transaction entries. + - `report`: An optional boolean flag (default `true`) that, when `true`, logs + the number of processed L1-to-L2 messages if any are found. + + ## Returns + - A list of L1-to-L2 messages with detailed information and current status. Every + map in the list compatible with the database import operation. All messages in + this context are considered `:relayed` as they represent completed actions from + L1 to L2. + """ + @spec filter_l1_to_l2_messages(maybe_improper_list(min_transaction, [])) :: [arbitrum_message] + @spec filter_l1_to_l2_messages(maybe_improper_list(min_transaction, []), boolean()) :: [arbitrum_message] + def filter_l1_to_l2_messages(transactions, report \\ true) + when is_list(transactions) and is_boolean(report) do + messages = + transactions + |> Enum.filter(fn tx -> + tx[:request_id] != nil + end) + |> handle_filtered_l1_to_l2_messages() + + if report && not (messages == []) do + log_info("#{length(messages)} completions of L1-to-L2 messages will be imported") + end + + messages + end + + @doc """ + Filters logs for L2-to-L1 messages and composes a map for each with the related message information. + + This function filters a list of logs to identify those representing L2-to-L1 messages. + It checks each log against the ArbSys contract address and the `L2ToL1Tx` event + signature to determine if it corresponds to an L2-to-L1 message. + + ## Parameters + - `logs`: A list of log entries. + + ## Returns + - A list of L2-to-L1 messages with detailed information and current status. Each map + in the list is compatible with the database import operation. + """ + @spec filter_l2_to_l1_messages(maybe_improper_list(min_log, [])) :: [arbitrum_message] + def filter_l2_to_l1_messages(logs) when is_list(logs) do + arbsys_contract = Application.get_env(:indexer, __MODULE__)[:arbsys_contract] + + filtered_logs = + logs + |> Enum.filter(fn event -> + event.address_hash == arbsys_contract and event.first_topic == Db.l2_to_l1_event() + end) + + handle_filtered_l2_to_l1_messages(filtered_logs) + end + + @doc """ + Processes a list of filtered rollup transactions representing L1-to-L2 messages, constructing a detailed message structure for each. + + ## Parameters + - `filtered_txs`: A list of rollup transaction entries, each representing an L1-to-L2 + message transaction. + + ## Returns + - A list of L1-to-L2 messages with detailed information and current status. Every map + in the list compatible with the database import operation. All messages in this context + are considered `:relayed` as they represent completed actions from L1 to L2. + """ + @spec handle_filtered_l1_to_l2_messages(maybe_improper_list(min_transaction, [])) :: [arbitrum_message] + def handle_filtered_l1_to_l2_messages([]) do + [] + end + + def handle_filtered_l1_to_l2_messages(filtered_txs) when is_list(filtered_txs) do + filtered_txs + |> Enum.map(fn tx -> + log_debug("L1 to L2 message #{tx.hash} found with the type #{tx.type}") + + %{direction: :to_l2, message_id: tx.request_id, completion_transaction_hash: tx.hash, status: :relayed} + |> complete_to_params() + end) + end + + @doc """ + Processes a list of filtered logs representing L2-to-L1 messages, enriching and categorizing them based on their current state and optionally updating their execution status. + + This function takes filtered log events, typically representing L2-to-L1 messages, and + processes each to construct a comprehensive message structure. It also determines the + status of each message by comparing its block number against the highest committed and + confirmed block numbers. If a `caller` module is provided, it further updates the + messages' execution status. + + ## Parameters + - `filtered_logs`: A list of log entries, each representing an L2-to-L1 message event. + - `caller`: An optional module that uses as a flag to determine if the discovered + should be checked for execution. + + ## Returns + - A list of L2-to-L1 messages with detailed information and current status, ready for + database import. + """ + @spec handle_filtered_l2_to_l1_messages([min_log]) :: [arbitrum_message] + @spec handle_filtered_l2_to_l1_messages([min_log], module()) :: [arbitrum_message] + def handle_filtered_l2_to_l1_messages(filtered_logs, caller \\ nil) + + def handle_filtered_l2_to_l1_messages([], _) do + [] + end + + def handle_filtered_l2_to_l1_messages(filtered_logs, caller) when is_list(filtered_logs) do + # Get values before the loop parsing the events to reduce number of DB requests + highest_committed_block = Db.highest_committed_block(-1) + highest_confirmed_block = Db.highest_confirmed_block(-1) + + messages_map = + filtered_logs + |> Enum.reduce(%{}, fn event, messages_acc -> + log_debug("L2 to L1 message #{event.transaction_hash} found") + + {message_id, caller, blocknum, timestamp} = l2_to_l1_event_parse(event) + + message = + %{ + direction: :from_l2, + message_id: message_id, + originator_address: caller, + originating_transaction_hash: event.transaction_hash, + origination_timestamp: timestamp, + originating_transaction_block_number: blocknum, + status: status_l2_to_l1_message(blocknum, highest_committed_block, highest_confirmed_block) + } + |> complete_to_params() + + Map.put( + messages_acc, + message_id, + message + ) + end) + + log_info("Origins of #{length(Map.values(messages_map))} L2-to-L1 messages will be imported") + + # The check if messages are executed is required only for the case when l2-to-l1 + # messages are found by block catchup fetcher + updated_messages_map = + case caller do + nil -> + messages_map + + _ -> + messages_map + |> find_and_update_executed_messages() + end + + updated_messages_map + |> Map.values() + end + + # Converts an incomplete message structure into a complete parameters map for database updates. + defp complete_to_params(incomplete) do + [ + :direction, + :message_id, + :originator_address, + :originating_transaction_hash, + :origination_timestamp, + :originating_transaction_block_number, + :completion_transaction_hash, + :status + ] + |> Enum.reduce(%{}, fn key, out -> + Map.put(out, key, Map.get(incomplete, key)) + end) + end + + # Parses an L2-to-L1 event, extracting relevant information from the event's data. + defp l2_to_l1_event_parse(event) do + [ + caller, + arb_block_num, + _eth_block_num, + timestamp, + _callvalue, + _data + ] = decode_data(event.data, @l2_to_l1_event_unindexed_params) + + position = quantity_to_integer(event.fourth_topic) + + {position, caller, arb_block_num, Timex.from_unix(timestamp)} + end + + # Determines the status of an L2-to-L1 message based on its block number and the highest + # committed and confirmed block numbers. + defp status_l2_to_l1_message(msg_block, highest_committed_block, highest_confirmed_block) do + cond do + highest_confirmed_block >= msg_block -> :confirmed + highest_committed_block >= msg_block -> :sent + true -> :initiated + end + end + + # Finds and updates the status of L2-to-L1 messages that have been executed on L1. + # This function iterates over the given messages, identifies those with corresponding L1 executions, + # and updates their `completion_transaction_hash` and `status` accordingly. + # + # ## Parameters + # - `messages`: A map where each key is a message ID, and each value is the message's details. + # + # ## Returns + # - The updated map of messages with the `completion_transaction_hash` and `status` fields updated + # for messages that have been executed. + defp find_and_update_executed_messages(messages) do + messages + |> Map.keys() + |> Db.l1_executions() + |> Enum.reduce(messages, fn execution, messages_acc -> + message = + messages_acc + |> Map.get(execution.message_id) + |> Map.put(:completion_transaction_hash, execution.execution_transaction.hash.bytes) + |> Map.put(:status, :relayed) + + Map.put(messages_acc, execution.message_id, message) + end) + end +end diff --git a/apps/indexer/lib/indexer/fetcher/arbitrum/rollup_messages_catchup.ex b/apps/indexer/lib/indexer/fetcher/arbitrum/rollup_messages_catchup.ex new file mode 100644 index 000000000000..0f6f8b09cdeb --- /dev/null +++ b/apps/indexer/lib/indexer/fetcher/arbitrum/rollup_messages_catchup.ex @@ -0,0 +1,365 @@ +defmodule Indexer.Fetcher.Arbitrum.RollupMessagesCatchup do + @moduledoc """ + Manages the catch-up process for historical rollup messages between Layer 1 (L1) and Layer 2 (L2) within the Arbitrum network. + + This module aims to discover historical messages that were not captured by the block + fetcher or the catch-up block fetcher. This situation arises during the upgrade of an + existing instance of BlockScout (BS) that already has indexed blocks but lacks + a crosschain messages discovery mechanism. Therefore, it becomes necessary to traverse + the already indexed blocks to extract crosschain messages contained within them. + + The fetcher's operation cycle consists of five phases, initiated by sending specific + messages: + - `:wait_for_new_block`: Waits for the block fetcher to index new blocks before + proceeding with message discovery. + - `:init_worker`: Sets up the initial parameters for the message discovery process, + identifying the ending blocks for the search. + - `:historical_msg_from_l2` and `:historical_msg_to_l2`: Manage the discovery and + processing of messages sent from L2 to L1 and from L1 to L2, respectively. + - `:plan_next_iteration`: Schedules the next iteration of the catch-up process. + + Workflow diagram of the fetcher state changes: + + wait_for_new_block + | + V + init_worker + | + V + |-> historical_msg_from_l2 -> historical_msg_to_l2 -> plan_next_iteration ->| + |---------------------------------------------------------------------------| + + `historical_msg_from_l2` discovers L2-to-L1 messages by analyzing logs from already + indexed rollup transactions. Logs representing the `L2ToL1Tx` event are utilized + to construct messages. The current rollup state, including information about + committed batches and confirmed blocks, is used to assign the appropriate status + to the messages before importing them into the database. + + `historical_msg_to_l2` discovers L1-to-L2 messages by requesting rollup + transactions through RPC. Transactions containing a `requestId` in their body are + utilized to construct messages. These messages are marked as `:relayed`, indicating + that they have been successfully received on L2 and are considered completed, and + are then imported into the database. This approach is adopted because it parallels + the action of re-indexing existing transactions to include Arbitrum-specific fields, + which are absent in the currently indexed transactions. However, permanently adding + these fields to the database model for the sake of historical message catch-up is + impractical. Therefore, to avoid the extensive process of re-indexing and to + minimize changes to the database schema, fetching the required data directly from + an external node via RPC is preferred for historical message discovery. + """ + + use GenServer + use Indexer.Fetcher + + import Indexer.Fetcher.Arbitrum.Utils.Helper, only: [increase_duration: 2] + + import Indexer.Fetcher.Arbitrum.Utils.Logging, only: [log_warning: 1] + + alias Indexer.Fetcher.Arbitrum.Utils.Db + alias Indexer.Fetcher.Arbitrum.Workers.HistoricalMessagesOnL2 + + require Logger + + @wait_for_new_block_delay 15 + @release_cpu_delay 1 + + def child_spec(start_link_arguments) do + spec = %{ + id: __MODULE__, + start: {__MODULE__, :start_link, start_link_arguments}, + restart: :transient, + type: :worker + } + + Supervisor.child_spec(spec, []) + end + + def start_link(args, gen_server_options \\ []) do + GenServer.start_link(__MODULE__, args, Keyword.put_new(gen_server_options, :name, __MODULE__)) + end + + @impl GenServer + def init(args) do + Logger.metadata(fetcher: :arbitrum_bridge_l2_catchup) + + config_common = Application.get_all_env(:indexer)[Indexer.Fetcher.Arbitrum] + rollup_chunk_size = config_common[:rollup_chunk_size] + + config_tracker = Application.get_all_env(:indexer)[__MODULE__] + recheck_interval = config_tracker[:recheck_interval] + messages_to_l2_blocks_depth = config_tracker[:messages_to_l2_blocks_depth] + messages_from_l2_blocks_depth = config_tracker[:messages_to_l1_blocks_depth] + + Process.send(self(), :wait_for_new_block, []) + + {:ok, + %{ + config: %{ + rollup_rpc: %{ + json_rpc_named_arguments: args[:json_rpc_named_arguments], + chunk_size: rollup_chunk_size + }, + json_l2_rpc_named_arguments: args[:json_rpc_named_arguments], + recheck_interval: recheck_interval, + messages_to_l2_blocks_depth: messages_to_l2_blocks_depth, + messages_from_l2_blocks_depth: messages_from_l2_blocks_depth + }, + data: %{} + }} + end + + @impl GenServer + def handle_info({ref, _result}, state) do + Process.demonitor(ref, [:flush]) + {:noreply, state} + end + + # Waits for the next new block to be picked up by the block fetcher before initiating + # the worker for message discovery. + # + # This function checks if a new block has been indexed by the block fetcher since + # the start of the historical messages fetcher. It queries the database to find + # the closest block timestamped after this period. If a new block is found, it + # initiates the worker process for message discovery by sending the `:init_worker` + # message. If no new block is available, it reschedules itself to check again after + # a specified delay. + # + # The number of the new block indexed by the block fetcher will be used by the worker + # initializer to establish the end of the range where new messages should be discovered. + # + # ## Parameters + # - `:wait_for_new_block`: The message that triggers the waiting process. + # - `state`: The current state of the fetcher. + # + # ## Returns + # - `{:noreply, new_state}` where the new indexed block number is stored, or retain + # the current state while awaiting new blocks. + @impl GenServer + def handle_info(:wait_for_new_block, %{data: _} = state) do + {time_of_start, interim_data} = + if is_nil(Map.get(state.data, :time_of_start)) do + now = DateTime.utc_now() + updated_data = Map.put(state.data, :time_of_start, now) + {now, updated_data} + else + {state.data.time_of_start, state.data} + end + + new_data = + case Db.closest_block_after_timestamp(time_of_start) do + {:ok, block} -> + Process.send(self(), :init_worker, []) + + interim_data + |> Map.put(:new_block, block) + |> Map.delete(:time_of_start) + + {:error, _} -> + log_warning("No progress of the block fetcher found") + Process.send_after(self(), :wait_for_new_block, :timer.seconds(@wait_for_new_block_delay)) + interim_data + end + + {:noreply, %{state | data: new_data}} + end + + # Sets the initial parameters for discovering historical messages. This function + # calculates the end blocks for both L1-to-L2 and L2-to-L1 message discovery + # processes based on th earliest messages already indexed. If no messages are + # available, the block number before the latest indexed block will be used. + # These end blocks are used to initiate the discovery process in subsequent iterations. + # + # After identifying the initial values, the function immediately transitions to + # the L2-to-L1 message discovery process by sending the `:historical_msg_from_l2` + # message. + # + # ## Parameters + # - `:init_worker`: The message that triggers the handler. + # - `state`: The current state of the fetcher. + # + # ## Returns + # - `{:noreply, new_state}` where the end blocks for both L1-to-L2 and L2-to-L1 + # message discovery are established. + @impl GenServer + def handle_info(:init_worker, %{data: _} = state) do + historical_msg_from_l2_end_block = Db.rollup_block_to_discover_missed_messages_from_l2(state.data.new_block - 1) + historical_msg_to_l2_end_block = Db.rollup_block_to_discover_missed_messages_to_l2(state.data.new_block - 1) + + Process.send(self(), :historical_msg_from_l2, []) + + new_data = + Map.merge(state.data, %{ + duration: 0, + progressed: false, + historical_msg_from_l2_end_block: historical_msg_from_l2_end_block, + historical_msg_to_l2_end_block: historical_msg_to_l2_end_block + }) + + {:noreply, %{state | data: new_data}} + end + + # Processes the next iteration of historical L2-to-L1 message discovery. + # + # This function uses the results from the previous iteration to set the end block + # for the current message discovery iteration. It identifies the start block and + # requests rollup logs within the specified range to explore `L2ToL1Tx` events. + # Discovered events are used to compose messages to be stored in the database. + # Before being stored in the database, each message is assigned the appropriate + # status based on the current state of the rollup. + # + # After importing the messages, the function immediately switches to the process + # of L1-to-L2 message discovery for the next range of blocks by sending + # the `:historical_msg_to_l2` message. + # + # ## Parameters + # - `:historical_msg_from_l2`: The message triggering the handler. + # - `state`: The current state of the fetcher containing necessary data like + # the end block identified after the previous iteration of historical + # message discovery from L2. + # + # ## Returns + # - `{:noreply, new_state}` where the end block for the next L2-to-L1 message + # discovery iteration is updated based on the results of the current iteration. + @impl GenServer + def handle_info( + :historical_msg_from_l2, + %{ + data: %{duration: _, historical_msg_from_l2_end_block: _, progressed: _} + } = state + ) do + end_block = state.data.historical_msg_from_l2_end_block + + {handle_duration, {:ok, start_block}} = + :timer.tc(&HistoricalMessagesOnL2.discover_historical_messages_from_l2/2, [end_block, state]) + + Process.send(self(), :historical_msg_to_l2, []) + + progressed = state.data.progressed || (not is_nil(start_block) && start_block - 1 < end_block) + + new_data = + Map.merge(state.data, %{ + duration: increase_duration(state.data, handle_duration), + progressed: progressed, + historical_msg_from_l2_end_block: if(is_nil(start_block), do: nil, else: start_block - 1) + }) + + {:noreply, %{state | data: new_data}} + end + + # Processes the next iteration of historical L1-to-L2 message discovery. + # + # This function uses the results from the previous iteration to set the end block for + # the current message discovery iteration. It identifies the start block and requests + # rollup blocks within the specified range through RPC to explore transactions + # containing a `requestId` in their body. This RPC request is necessary because the + # `requestId` field is not present in the transaction model of already indexed + # transactions in the database. The discovered transactions are then used to construct + # messages, which are subsequently stored in the database. These imported messages are + # marked as `:relayed`, signifying that they represent completed actions from L1 to L2. + # + # After importing the messages, the function immediately switches to the process + # of choosing a delay prior to the next iteration of historical messages discovery + # by sending the `:plan_next_iteration` message. + # + # ## Parameters + # - `:historical_msg_to_l2`: The message triggering the handler. + # - `state`: The current state of the fetcher containing necessary data, like the end + # block identified after the previous iteration of historical message discovery. + # + # ## Returns + # - `{:noreply, new_state}` where the end block for the next L1-to-L2 message discovery + # iteration is updated based on the results of the current iteration. + @impl GenServer + def handle_info( + :historical_msg_to_l2, + %{ + data: %{duration: _, historical_msg_to_l2_end_block: _, progressed: _} + } = state + ) do + end_block = state.data.historical_msg_to_l2_end_block + + {handle_duration, {:ok, start_block}} = + :timer.tc(&HistoricalMessagesOnL2.discover_historical_messages_to_l2/2, [end_block, state]) + + Process.send(self(), :plan_next_iteration, []) + + progressed = state.data.progressed || (not is_nil(start_block) && start_block - 1 < end_block) + + new_data = + Map.merge(state.data, %{ + duration: increase_duration(state.data, handle_duration), + progressed: progressed, + historical_msg_to_l2_end_block: if(is_nil(start_block), do: nil, else: start_block - 1) + }) + + {:noreply, %{state | data: new_data}} + end + + # Decides whether to stop or continue the fetcher based on the current state of message discovery. + # + # If both `historical_msg_from_l2_end_block` and `historical_msg_to_l2_end_block` are 0 or less, + # indicating that there are no more historical messages to fetch, the task is stopped with a normal + # termination. + # + # ## Parameters + # - `:plan_next_iteration`: The message that triggers this function. + # - `state`: The current state of the fetcher. + # + # ## Returns + # - `{:stop, :normal, state}`: Ends the fetcher's operation cleanly. + @impl GenServer + def handle_info( + :plan_next_iteration, + %{ + data: %{ + historical_msg_from_l2_end_block: from_l2_end_block, + historical_msg_to_l2_end_block: to_l2_end_block + } + } = state + ) + when from_l2_end_block <= 0 and to_l2_end_block <= 0 do + {:stop, :normal, state} + end + + # Plans the next iteration for the historical messages discovery based on the state's `progressed` flag. + # + # If no progress was made (`progressed` is false), schedules the next check based + # on the `recheck_interval`, adjusted by the time already spent. If progress was + # made, it imposes a shorter delay to quickly check again, helping to reduce CPU + # usage during idle periods. + # + # The chosen delay is used to schedule the next iteration of historical messages discovery + # by sending `:historical_msg_from_l2`. + # + # ## Parameters + # - `:plan_next_iteration`: The message that triggers this function. + # - `state`: The current state of the fetcher containing both the fetcher configuration + # and data needed to determine the next steps. + # + # ## Returns + # - `{:noreply, state}` where `state` contains the reset `duration` of the iteration and + # the flag if the messages discovery process `progressed`. + @impl GenServer + def handle_info( + :plan_next_iteration, + %{config: %{recheck_interval: _}, data: %{duration: _, progressed: _}} = state + ) do + next_timeout = + if state.data.progressed do + # For the case when all historical messages are not received yet + # make a small delay to release CPU a bit + :timer.seconds(@release_cpu_delay) + else + max(state.config.recheck_interval - div(state.data.duration, 1000), 0) + end + + Process.send_after(self(), :historical_msg_from_l2, next_timeout) + + new_data = + state.data + |> Map.put(:duration, 0) + |> Map.put(:progressed, false) + + {:noreply, %{state | data: new_data}} + end +end diff --git a/apps/indexer/lib/indexer/fetcher/arbitrum/tracking_batches_statuses.ex b/apps/indexer/lib/indexer/fetcher/arbitrum/tracking_batches_statuses.ex new file mode 100644 index 000000000000..1974ed14dcae --- /dev/null +++ b/apps/indexer/lib/indexer/fetcher/arbitrum/tracking_batches_statuses.ex @@ -0,0 +1,459 @@ +defmodule Indexer.Fetcher.Arbitrum.TrackingBatchesStatuses do + @moduledoc """ + Manages the tracking and updating of the statuses of rollup batches, confirmations, and cross-chain message executions for an Arbitrum rollup. + + This module orchestrates the workflow for discovering new and historical + batches of rollup transactions, confirmations of rollup blocks, and + executions of L2-to-L1 messages. It ensures the accurate tracking and + updating of the rollup process stages. + + The fetcher's operation cycle begins with the `:init_worker` message, which + establishes the initial state with the necessary configuration. + + The process then progresses through a sequence of steps, each triggered by + specific messages: + - `:check_new_batches`: Discovers new batches of rollup transactions and + updates their statuses. + - `:check_new_confirmations`: Identifies new confirmations of rollup blocks + to update their statuses. + - `:check_new_executions`: Finds new executions of L2-to-L1 messages to + update their statuses. + - `:check_historical_batches`: Processes historical batches of rollup + transactions. + - `:check_historical_confirmations`: Handles historical confirmations of + rollup blocks. + - `:check_historical_executions`: Manages historical executions of L2-to-L1 + messages. + - `:check_lifecycle_txs_finalization`: Finalizes the status of lifecycle + transactions, confirming the blocks and messages involved. + + Discovery of rollup transaction batches is executed by requesting logs on L1 + that correspond to the `SequencerBatchDelivered` event emitted by the + Arbitrum `SequencerInbox` contract. + + Discovery of rollup block confirmations is executed by requesting logs on L1 + that correspond to the `SendRootUpdated` event emitted by the Arbitrum + `Outbox` contract. + + Discovery of the L2-to-L1 message executions occurs by requesting logs on L1 + that correspond to the `OutBoxTransactionExecuted` event emitted by the + Arbitrum `Outbox` contract. + + When processing batches or confirmations, the L2-to-L1 messages included in + the corresponding rollup blocks are updated to reflect their status changes. + """ + + use GenServer + use Indexer.Fetcher + + alias Indexer.Fetcher.Arbitrum.Workers.{L1Finalization, NewBatches, NewConfirmations, NewL1Executions} + + import Indexer.Fetcher.Arbitrum.Utils.Helper, only: [increase_duration: 2] + + alias Indexer.Helper, as: IndexerHelper + alias Indexer.Fetcher.Arbitrum.Utils.{Db, Rpc} + + require Logger + + def child_spec(start_link_arguments) do + spec = %{ + id: __MODULE__, + start: {__MODULE__, :start_link, start_link_arguments}, + restart: :transient, + type: :worker + } + + Supervisor.child_spec(spec, []) + end + + def start_link(args, gen_server_options \\ []) do + GenServer.start_link(__MODULE__, args, Keyword.put_new(gen_server_options, :name, __MODULE__)) + end + + @impl GenServer + def init(args) do + Logger.metadata(fetcher: :arbitrum_batches_tracker) + + config_common = Application.get_all_env(:indexer)[Indexer.Fetcher.Arbitrum] + l1_rpc = config_common[:l1_rpc] + l1_rpc_block_range = config_common[:l1_rpc_block_range] + l1_rollup_address = config_common[:l1_rollup_address] + l1_rollup_init_block = config_common[:l1_rollup_init_block] + l1_start_block = config_common[:l1_start_block] + l1_rpc_chunk_size = config_common[:l1_rpc_chunk_size] + rollup_chunk_size = config_common[:rollup_chunk_size] + + config_tracker = Application.get_all_env(:indexer)[__MODULE__] + recheck_interval = config_tracker[:recheck_interval] + messages_to_blocks_shift = config_tracker[:messages_to_blocks_shift] + track_l1_tx_finalization = config_tracker[:track_l1_tx_finalization] + finalized_confirmations = config_tracker[:finalized_confirmations] + confirmation_batches_depth = config_tracker[:confirmation_batches_depth] + new_batches_limit = config_tracker[:new_batches_limit] + + Process.send(self(), :init_worker, []) + + {:ok, + %{ + config: %{ + l1_rpc: %{ + json_rpc_named_arguments: IndexerHelper.json_rpc_named_arguments(l1_rpc), + logs_block_range: l1_rpc_block_range, + chunk_size: l1_rpc_chunk_size, + track_finalization: track_l1_tx_finalization, + finalized_confirmations: finalized_confirmations + }, + rollup_rpc: %{ + json_rpc_named_arguments: args[:json_rpc_named_arguments], + chunk_size: rollup_chunk_size + }, + recheck_interval: recheck_interval, + l1_rollup_address: l1_rollup_address, + l1_start_block: l1_start_block, + l1_rollup_init_block: l1_rollup_init_block, + new_batches_limit: new_batches_limit, + messages_to_blocks_shift: messages_to_blocks_shift, + confirmation_batches_depth: confirmation_batches_depth + }, + data: %{} + }} + end + + @impl GenServer + def handle_info({ref, _result}, state) do + Process.demonitor(ref, [:flush]) + {:noreply, state} + end + + # Initializes the worker for discovering batches of rollup transactions, confirmations of rollup blocks, and executions of L2-to-L1 messages. + # + # This function sets up the initial state for the fetcher, identifying the + # starting blocks for new and historical discoveries of batches, confirmations, + # and executions. It also retrieves addresses for the Arbitrum Outbox and + # SequencerInbox contracts. + # + # After initializing these parameters, it immediately sends `:check_new_batches` + # to commence the fetcher loop. + # + # ## Parameters + # - `:init_worker`: The message triggering the initialization. + # - `state`: The current state of the process, containing initial configuration + # data. + # + # ## Returns + # - `{:noreply, new_state}` where `new_state` is updated with Arbitrum contract + # addresses and starting blocks for new and historical discoveries. + @impl GenServer + def handle_info( + :init_worker, + %{ + config: %{ + l1_rpc: %{json_rpc_named_arguments: json_l1_rpc_named_arguments}, + l1_rollup_address: l1_rollup_address + } + } = state + ) do + %{outbox: outbox_address, sequencer_inbox: sequencer_inbox_address} = + Rpc.get_contracts_for_rollup( + l1_rollup_address, + :inbox_outbox, + json_l1_rpc_named_arguments + ) + + l1_start_block = Rpc.get_l1_start_block(state.config.l1_start_block, json_l1_rpc_named_arguments) + + # TODO: it is necessary to develop a way to discover missed batches to cover the case + # when the batch #1, #2 and #4 are in DB, but #3 is not + # One of the approaches is to look deeper than the latest committed batch and + # check whether batches were already handled or not. + new_batches_start_block = Db.l1_block_to_discover_latest_committed_batch(l1_start_block) + historical_batches_end_block = Db.l1_block_to_discover_earliest_committed_batch(l1_start_block - 1) + + new_confirmations_start_block = Db.l1_block_of_latest_confirmed_block(l1_start_block) + + # TODO: it is necessary to develop a way to discover missed executions. + # One of the approaches is to look deeper than the latest execution and + # check whether executions were already handled or not. + new_executions_start_block = Db.l1_block_to_discover_latest_execution(l1_start_block) + historical_executions_end_block = Db.l1_block_to_discover_earliest_execution(l1_start_block - 1) + + Process.send(self(), :check_new_batches, []) + + new_state = + state + |> Map.put( + :config, + Map.merge(state.config, %{ + l1_start_block: l1_start_block, + l1_outbox_address: outbox_address, + l1_sequencer_inbox_address: sequencer_inbox_address + }) + ) + |> Map.put( + :data, + Map.merge(state.data, %{ + new_batches_start_block: new_batches_start_block, + historical_batches_end_block: historical_batches_end_block, + new_confirmations_start_block: new_confirmations_start_block, + historical_confirmations_end_block: nil, + historical_confirmations_start_block: nil, + new_executions_start_block: new_executions_start_block, + historical_executions_end_block: historical_executions_end_block + }) + ) + + {:noreply, new_state} + end + + # Initiates the process of discovering and handling new batches of rollup transactions. + # + # This function fetches logs within the calculated L1 block range to identify new + # batches of rollup transactions. The discovered batches and their corresponding + # rollup blocks and transactions are processed and linked. The L2-to-L1 messages + # included in these rollup blocks are also updated to reflect their commitment. + # + # After processing, it immediately transitions to checking new confirmations of + # rollup blocks by sending the `:check_new_confirmations` message. + # + # ## Parameters + # - `:check_new_batches`: The message that triggers the function. + # - `state`: The current state of the fetcher, containing configuration and data + # needed for the discovery of new batches. + # + # ## Returns + # - `{:noreply, new_state}` where `new_state` is updated with the new start block for + # the next iteration of new batch discovery. + @impl GenServer + def handle_info(:check_new_batches, state) do + {handle_duration, {:ok, end_block}} = :timer.tc(&NewBatches.discover_new_batches/1, [state]) + + Process.send(self(), :check_new_confirmations, []) + + new_data = + Map.merge(state.data, %{ + duration: increase_duration(state.data, handle_duration), + new_batches_start_block: end_block + 1 + }) + + {:noreply, %{state | data: new_data}} + end + + # Initiates the discovery and processing of new confirmations for rollup blocks. + # + # This function fetches logs within the calculated L1 block range to identify + # new confirmations for rollup blocks. The discovered confirmations are + # processed to update the status of rollup blocks and L2-to-L1 messages + # accordingly. + # + # After processing, it immediately transitions to discovering new executions + # of L2-to-L1 messages by sending the `:check_new_executions` message. + # + # ## Parameters + # - `:check_new_confirmations`: The message that triggers the function. + # - `state`: The current state of the fetcher, containing configuration and + # data needed for the discovery of new rollup block confirmations. + # + # ## Returns + # - `{:noreply, new_state}` where `new_state` is updated with the new start + # block for the next iteration of new confirmation discovery. + @impl GenServer + def handle_info(:check_new_confirmations, state) do + {handle_duration, {retcode, end_block}} = :timer.tc(&NewConfirmations.discover_new_rollup_confirmation/1, [state]) + + Process.send(self(), :check_new_executions, []) + + updated_fields = + case retcode do + :ok -> %{} + _ -> %{historical_confirmations_end_block: nil, historical_confirmations_start_block: nil} + end + |> Map.merge(%{ + # credo:disable-for-previous-line Credo.Check.Refactor.PipeChainStart + duration: increase_duration(state.data, handle_duration), + new_confirmations_start_block: end_block + 1 + }) + + new_data = Map.merge(state.data, updated_fields) + + {:noreply, %{state | data: new_data}} + end + + # Initiates the process of discovering and handling new executions for L2-to-L1 messages. + # + # This function identifies new executions of L2-to-L1 messages by fetching logs + # for the calculated L1 block range. It updates the status of these messages and + # links them with the corresponding lifecycle transactions. + # + # After processing, it immediately transitions to checking historical batches of + # rollup transaction by sending the `:check_historical_batches` message. + # + # ## Parameters + # - `:check_new_executions`: The message that triggers the function. + # - `state`: The current state of the fetcher, containing configuration and data + # needed for the discovery of new message executions. + # + # ## Returns + # - `{:noreply, new_state}` where `new_state` is updated with the new start + # block for the next iteration of new message executions discovery. + @impl GenServer + def handle_info(:check_new_executions, state) do + {handle_duration, {:ok, end_block}} = :timer.tc(&NewL1Executions.discover_new_l1_messages_executions/1, [state]) + + Process.send(self(), :check_historical_batches, []) + + new_data = + Map.merge(state.data, %{ + duration: increase_duration(state.data, handle_duration), + new_executions_start_block: end_block + 1 + }) + + {:noreply, %{state | data: new_data}} + end + + # Initiates the process of discovering and handling historical batches of rollup transactions. + # + # This function fetches logs within the calculated L1 block range to identify the + # historical batches of rollup transactions. After discovery the linkage between + # batches and the corresponding rollup blocks and transactions are build. The + # status of the L2-to-L1 messages included in the corresponding rollup blocks is + # also updated. + # + # After processing, it immediately transitions to checking historical + # confirmations of rollup blocks by sending the `:check_historical_confirmations` + # message. + # + # ## Parameters + # - `:check_historical_batches`: The message that triggers the function. + # - `state`: The current state of the fetcher, containing configuration and data + # needed for the discovery of historical batches. + # + # ## Returns + # - `{:noreply, new_state}` where `new_state` is updated with the new end block + # for the next iteration of historical batch discovery. + @impl GenServer + def handle_info(:check_historical_batches, state) do + {handle_duration, {:ok, start_block}} = :timer.tc(&NewBatches.discover_historical_batches/1, [state]) + + Process.send(self(), :check_historical_confirmations, []) + + new_data = + Map.merge(state.data, %{ + duration: increase_duration(state.data, handle_duration), + historical_batches_end_block: start_block - 1 + }) + + {:noreply, %{state | data: new_data}} + end + + # Initiates the process of discovering and handling historical confirmations of rollup blocks. + # + # This function fetches logs within the calculated range to identify the + # historical confirmations of rollup blocks. The discovered confirmations are + # processed to update the status of rollup blocks and L2-to-L1 messages + # accordingly. + # + # After processing, it immediately transitions to checking historical executions + # of L2-to-L1 messages by sending the `:check_historical_executions` message. + # + # ## Parameters + # - `:check_historical_confirmations`: The message that triggers the function. + # - `state`: The current state of the fetcher, containing configuration and data + # needed for the discovery of historical confirmations. + # + # ## Returns + # - `{:noreply, new_state}` where `new_state` is updated with the new start and + # end blocks for the next iteration of historical confirmations discovery. + @impl GenServer + def handle_info(:check_historical_confirmations, state) do + {handle_duration, {retcode, {start_block, end_block}}} = + :timer.tc(&NewConfirmations.discover_historical_rollup_confirmation/1, [state]) + + Process.send(self(), :check_historical_executions, []) + + updated_fields = + case retcode do + :ok -> %{historical_confirmations_end_block: start_block - 1, historical_confirmations_start_block: end_block} + _ -> %{historical_confirmations_end_block: nil, historical_confirmations_start_block: nil} + end + |> Map.merge(%{ + # credo:disable-for-previous-line Credo.Check.Refactor.PipeChainStart + duration: increase_duration(state.data, handle_duration) + }) + + new_data = Map.merge(state.data, updated_fields) + + {:noreply, %{state | data: new_data}} + end + + # Initiates the discovery and handling of historical L2-to-L1 message executions. + # + # This function discovers historical executions of L2-to-L1 messages by retrieving + # logs within a specified L1 block range. It updates their status accordingly and + # builds the link between the messages and the lifecycle transactions where they + # are executed. + # + # After processing, it immediately transitions to finalizing lifecycle transactions + # by sending the `:check_lifecycle_txs_finalization` message. + # + # ## Parameters + # - `:check_historical_executions`: The message that triggers the function. + # - `state`: The current state of the fetcher, containing configuration and data + # needed for the discovery of historical executions. + # + # ## Returns + # - `{:noreply, new_state}` where `new_state` is updated with the new end block for + # the next iteration of historical executions. + @impl GenServer + def handle_info(:check_historical_executions, state) do + {handle_duration, {:ok, start_block}} = + :timer.tc(&NewL1Executions.discover_historical_l1_messages_executions/1, [state]) + + Process.send(self(), :check_lifecycle_txs_finalization, []) + + new_data = + Map.merge(state.data, %{ + duration: increase_duration(state.data, handle_duration), + historical_executions_end_block: start_block - 1 + }) + + {:noreply, %{state | data: new_data}} + end + + # Handles the periodic finalization check of lifecycle transactions. + # + # This function updates the finalization status of lifecycle transactions based on + # the current state of the L1 blockchain. It discovers all transactions that are not + # yet finalized up to the `safe` L1 block and changes their status to `:finalized`. + # + # After processing, as the final handler in the loop, it schedules the + # `:check_new_batches` message to initiate the next iteration. The scheduling of this + # message is delayed to account for the time spent on the previous handlers' execution. + # + # ## Parameters + # - `:check_lifecycle_txs_finalization`: The message that triggers the function. + # - `state`: The current state of the fetcher, containing the configuration needed for + # the lifecycle transactions status update. + # + # ## Returns + # - `{:noreply, new_state}` where `new_state` is the updated state with the reset duration. + @impl GenServer + def handle_info(:check_lifecycle_txs_finalization, state) do + {handle_duration, _} = + if state.config.l1_rpc.track_finalization do + :timer.tc(&L1Finalization.monitor_lifecycle_txs/1, [state]) + else + {0, nil} + end + + next_timeout = max(state.config.recheck_interval - div(increase_duration(state.data, handle_duration), 1000), 0) + + Process.send_after(self(), :check_new_batches, next_timeout) + + new_data = + Map.merge(state.data, %{ + duration: 0 + }) + + {:noreply, %{state | data: new_data}} + end +end diff --git a/apps/indexer/lib/indexer/fetcher/arbitrum/tracking_messages_on_l1.ex b/apps/indexer/lib/indexer/fetcher/arbitrum/tracking_messages_on_l1.ex new file mode 100644 index 000000000000..59c43d48a528 --- /dev/null +++ b/apps/indexer/lib/indexer/fetcher/arbitrum/tracking_messages_on_l1.ex @@ -0,0 +1,223 @@ +defmodule Indexer.Fetcher.Arbitrum.TrackingMessagesOnL1 do + @moduledoc """ + Manages the tracking and processing of new and historical cross-chain messages initiated on L1 for an Arbitrum rollup. + + This module is responsible for continuously monitoring and importing new messages + initiated from Layer 1 (L1) to Arbitrum's Layer 2 (L2), as well as discovering + and processing historical messages that were sent previously but have not yet + been processed. + + The fetcher's operation is divided into 3 phases, each initiated by sending + specific messages: + - `:init_worker`: Initializes the worker with the required configuration for message + tracking. + - `:check_new_msgs_to_rollup`: Processes new L1-to-L2 messages appearing on L1 as + the blockchain progresses. + - `:check_historical_msgs_to_rollup`: Retrieves historical L1-to-L2 messages that + were missed if the message synchronization process did not start from the + Arbitrum rollup's inception. + + While the `:init_worker` message is sent only once during the fetcher startup, + the subsequent sending of `:check_new_msgs_to_rollup` and + `:check_historical_msgs_to_rollup` forms the operation cycle of the fetcher. + + Discovery of L1-to-L2 messages is executed by requesting logs on L1 that correspond + to the `MessageDelivered` event emitted by the Arbitrum bridge contract. + Cross-chain messages are composed of information from the logs' data as well as from + the corresponding transaction details. To get the transaction details, RPC calls + `eth_getTransactionByHash` are made in chunks. + """ + + use GenServer + use Indexer.Fetcher + + import Indexer.Fetcher.Arbitrum.Utils.Helper, only: [increase_duration: 2] + + alias Indexer.Fetcher.Arbitrum.Workers.NewMessagesToL2 + + alias Indexer.Helper, as: IndexerHelper + alias Indexer.Fetcher.Arbitrum.Utils.{Db, Rpc} + + require Logger + + def child_spec(start_link_arguments) do + spec = %{ + id: __MODULE__, + start: {__MODULE__, :start_link, start_link_arguments}, + restart: :transient, + type: :worker + } + + Supervisor.child_spec(spec, []) + end + + def start_link(args, gen_server_options \\ []) do + GenServer.start_link(__MODULE__, args, Keyword.put_new(gen_server_options, :name, __MODULE__)) + end + + @impl GenServer + def init(args) do + Logger.metadata(fetcher: :arbitrum_bridge_l1) + + config_common = Application.get_all_env(:indexer)[Indexer.Fetcher.Arbitrum] + l1_rpc = config_common[:l1_rpc] + l1_rpc_block_range = config_common[:l1_rpc_block_range] + l1_rollup_address = config_common[:l1_rollup_address] + l1_rollup_init_block = config_common[:l1_rollup_init_block] + l1_start_block = config_common[:l1_start_block] + l1_rpc_chunk_size = config_common[:l1_rpc_chunk_size] + + config_tracker = Application.get_all_env(:indexer)[__MODULE__] + recheck_interval = config_tracker[:recheck_interval] + + Process.send(self(), :init_worker, []) + + {:ok, + %{ + config: %{ + json_l2_rpc_named_arguments: args[:json_rpc_named_arguments], + json_l1_rpc_named_arguments: IndexerHelper.json_rpc_named_arguments(l1_rpc), + recheck_interval: recheck_interval, + l1_rpc_chunk_size: l1_rpc_chunk_size, + l1_rpc_block_range: l1_rpc_block_range, + l1_rollup_address: l1_rollup_address, + l1_start_block: l1_start_block, + l1_rollup_init_block: l1_rollup_init_block + }, + data: %{} + }} + end + + @impl GenServer + def handle_info({ref, _result}, state) do + Process.demonitor(ref, [:flush]) + {:noreply, state} + end + + # Initializes the worker for discovering new and historical L1-to-L2 messages. + # + # This function prepares the initial parameters for the message discovery process. + # It fetches the Arbitrum bridge address and determines the starting block for + # new message discovery. If the starting block is not configured (set to a default + # value), the latest block number from L1 is used as the start. It also calculates + # the end block for historical message discovery. + # + # After setting these parameters, it immediately transitions to discovering new + # messages by sending the `:check_new_msgs_to_rollup` message. + # + # ## Parameters + # - `:init_worker`: The message triggering the initialization. + # - `state`: The current state of the process, containing configuration for data + # initialization and further message discovery. + # + # ## Returns + # - `{:noreply, new_state}` where `new_state` is updated with the bridge address, + # determined start block for new messages, and calculated end block for + # historical messages. + @impl GenServer + def handle_info( + :init_worker, + %{config: %{l1_rollup_address: _, json_l1_rpc_named_arguments: _, l1_start_block: _}, data: _} = state + ) do + %{bridge: bridge_address} = + Rpc.get_contracts_for_rollup(state.config.l1_rollup_address, :bridge, state.config.json_l1_rpc_named_arguments) + + l1_start_block = Rpc.get_l1_start_block(state.config.l1_start_block, state.config.json_l1_rpc_named_arguments) + new_msg_to_l2_start_block = Db.l1_block_to_discover_latest_message_to_l2(l1_start_block) + historical_msg_to_l2_end_block = Db.l1_block_to_discover_earliest_message_to_l2(l1_start_block - 1) + + Process.send(self(), :check_new_msgs_to_rollup, []) + + new_state = + state + |> Map.put( + :config, + Map.merge(state.config, %{ + l1_start_block: l1_start_block, + l1_bridge_address: bridge_address + }) + ) + |> Map.put( + :data, + Map.merge(state.data, %{ + new_msg_to_l2_start_block: new_msg_to_l2_start_block, + historical_msg_to_l2_end_block: historical_msg_to_l2_end_block + }) + ) + + {:noreply, new_state} + end + + # Initiates the process to discover and handle new L1-to-L2 messages initiated from L1. + # + # This function discovers new messages from L1 to L2 by retrieving logs for the + # calculated L1 block range. Discovered events are used to compose messages, which + # are then stored in the database. + # + # After processing, the function immediately transitions to discovering historical + # messages by sending the `:check_historical_msgs_to_rollup` message. + # + # ## Parameters + # - `:check_new_msgs_to_rollup`: The message that triggers the handler. + # - `state`: The current state of the fetcher, containing configuration and data + # needed for message discovery. + # + # ## Returns + # - `{:noreply, new_state}` where the starting block for the next new L1-to-L2 + # message discovery iteration is updated based on the results of the current + # iteration. + @impl GenServer + def handle_info(:check_new_msgs_to_rollup, %{data: _} = state) do + {handle_duration, {:ok, end_block}} = + :timer.tc(&NewMessagesToL2.discover_new_messages_to_l2/1, [ + state + ]) + + Process.send(self(), :check_historical_msgs_to_rollup, []) + + new_data = + Map.merge(state.data, %{ + duration: increase_duration(state.data, handle_duration), + new_msg_to_l2_start_block: end_block + 1 + }) + + {:noreply, %{state | data: new_data}} + end + + # Initiates the process to discover and handle historical L1-to-L2 messages initiated from L1. + # + # This function discovers historical messages by retrieving logs for a calculated L1 block range. + # The discovered events are then used to compose messages to be stored in the database. + # + # After processing, as it is the final handler in the loop, it schedules the + # `:check_new_msgs_to_rollup` message to initiate the next iteration. The scheduling of this + # message is delayed, taking into account the time spent on the previous handler's execution. + # + # ## Parameters + # - `:check_historical_msgs_to_rollup`: The message that triggers the handler. + # - `state`: The current state of the fetcher, containing configuration and data needed for + # message discovery. + # + # ## Returns + # - `{:noreply, new_state}` where the end block for the next L1-to-L2 message discovery + # iteration is updated based on the results of the current iteration. + @impl GenServer + def handle_info(:check_historical_msgs_to_rollup, %{config: %{recheck_interval: _}, data: _} = state) do + {handle_duration, {:ok, start_block}} = + :timer.tc(&NewMessagesToL2.discover_historical_messages_to_l2/1, [ + state + ]) + + next_timeout = max(state.config.recheck_interval - div(increase_duration(state.data, handle_duration), 1000), 0) + + Process.send_after(self(), :check_new_msgs_to_rollup, next_timeout) + + new_data = + Map.merge(state.data, %{ + duration: 0, + historical_msg_to_l2_end_block: start_block - 1 + }) + + {:noreply, %{state | data: new_data}} + end +end diff --git a/apps/indexer/lib/indexer/fetcher/arbitrum/utils/db.ex b/apps/indexer/lib/indexer/fetcher/arbitrum/utils/db.ex new file mode 100644 index 000000000000..5c56001464c4 --- /dev/null +++ b/apps/indexer/lib/indexer/fetcher/arbitrum/utils/db.ex @@ -0,0 +1,787 @@ +defmodule Indexer.Fetcher.Arbitrum.Utils.Db do + @moduledoc """ + Common functions to simplify DB routines for Indexer.Fetcher.Arbitrum fetchers + """ + + import Ecto.Query, only: [from: 2] + + import Indexer.Fetcher.Arbitrum.Utils.Logging, only: [log_warning: 1] + + alias Explorer.{Chain, Repo} + alias Explorer.Chain.Arbitrum.Reader + alias Explorer.Chain.Block, as: FullBlock + alias Explorer.Chain.{Data, Hash, Log} + + alias Explorer.Utility.MissingBlockRange + + require Logger + + # 32-byte signature of the event L2ToL1Tx(address caller, address indexed destination, uint256 indexed hash, uint256 indexed position, uint256 arbBlockNum, uint256 ethBlockNum, uint256 timestamp, uint256 callvalue, bytes data) + @l2_to_l1_event "0x3e7aafa77dbf186b7fd488006beff893744caa3c4f6f299e8a709fa2087374fc" + + @doc """ + Indexes L1 transactions provided in the input map. For transactions that + are already in the database, existing indices are taken. For new transactions, + the next available indices are assigned. + + ## Parameters + - `new_l1_txs`: A map of L1 transaction descriptions. The keys of the map are + transaction hashes. + + ## Returns + - `l1_txs`: A map of L1 transaction descriptions. Each element is extended with + the key `:id`, representing the index of the L1 transaction in the + `arbitrum_lifecycle_l1_transactions` table. + """ + @spec get_indices_for_l1_transactions(map()) :: map() + # TODO: consider a way to remove duplicate with ZkSync.Utils.Db + # credo:disable-for-next-line Credo.Check.Design.DuplicatedCode + def get_indices_for_l1_transactions(new_l1_txs) + when is_map(new_l1_txs) do + # Get indices for l1 transactions previously handled + l1_txs = + new_l1_txs + |> Map.keys() + |> Reader.lifecycle_transactions() + |> Enum.reduce(new_l1_txs, fn {hash, id}, txs -> + {_, txs} = + Map.get_and_update!(txs, hash.bytes, fn l1_tx -> + {l1_tx, Map.put(l1_tx, :id, id)} + end) + + txs + end) + + # Get the next index for the first new transaction based + # on the indices existing in DB + l1_tx_next_id = Reader.next_lifecycle_transaction_id() + + # Assign new indices for the transactions which are not in + # the l1 transactions table yet + {updated_l1_txs, _} = + l1_txs + |> Map.keys() + |> Enum.reduce( + {l1_txs, l1_tx_next_id}, + fn hash, {txs, next_id} -> + tx = txs[hash] + id = Map.get(tx, :id) + + if is_nil(id) do + {Map.put(txs, hash, Map.put(tx, :id, next_id)), next_id + 1} + else + {txs, next_id} + end + end + ) + + updated_l1_txs + end + + @doc """ + Calculates the next L1 block number to search for the latest committed batch. + + ## Parameters + - `value_if_nil`: The default value to return if no committed batch is found. + + ## Returns + - The next L1 block number after the latest committed batch or `value_if_nil` if no committed batches are found. + """ + @spec l1_block_to_discover_latest_committed_batch(FullBlock.block_number() | nil) :: FullBlock.block_number() | nil + def l1_block_to_discover_latest_committed_batch(value_if_nil) + when (is_integer(value_if_nil) and value_if_nil >= 0) or is_nil(value_if_nil) do + case Reader.l1_block_of_latest_committed_batch() do + nil -> + log_warning("No committed batches found in DB") + value_if_nil + + value -> + value + 1 + end + end + + @doc """ + Calculates the L1 block number to start the search for committed batches that precede + the earliest batch already discovered. + + ## Parameters + - `value_if_nil`: The default value to return if no committed batch is found. + + ## Returns + - The L1 block number immediately preceding the earliest committed batch, + or `value_if_nil` if no committed batches are found. + """ + @spec l1_block_to_discover_earliest_committed_batch(nil | FullBlock.block_number()) :: nil | FullBlock.block_number() + def l1_block_to_discover_earliest_committed_batch(value_if_nil) + when (is_integer(value_if_nil) and value_if_nil >= 0) or is_nil(value_if_nil) do + case Reader.l1_block_of_earliest_committed_batch() do + nil -> + log_warning("No committed batches found in DB") + value_if_nil + + value -> + value - 1 + end + end + + @doc """ + Retrieves the block number of the highest rollup block that has been included in a batch. + + ## Parameters + - `value_if_nil`: The default value to return if no rollup batches are found. + + ## Returns + - The number of the highest rollup block included in a batch + or `value_if_nil` if no rollup batches are found. + """ + @spec highest_committed_block(nil | integer()) :: nil | FullBlock.block_number() + def highest_committed_block(value_if_nil) + when is_integer(value_if_nil) or is_nil(value_if_nil) do + case Reader.highest_committed_block() do + nil -> value_if_nil + value -> value + end + end + + @doc """ + Calculates the next L1 block number to search for the latest message sent to L2. + + ## Parameters + - `value_if_nil`: The default value to return if no L1-to-L2 messages have been discovered. + + ## Returns + - The L1 block number immediately following the latest discovered message to L2, + or `value_if_nil` if no messages to L2 have been found. + """ + @spec l1_block_to_discover_latest_message_to_l2(nil | FullBlock.block_number()) :: nil | FullBlock.block_number() + def l1_block_to_discover_latest_message_to_l2(value_if_nil) + when (is_integer(value_if_nil) and value_if_nil >= 0) or is_nil(value_if_nil) do + case Reader.l1_block_of_latest_discovered_message_to_l2() do + nil -> + log_warning("No messages to L2 found in DB") + value_if_nil + + value -> + value + 1 + end + end + + @doc """ + Calculates the next L1 block number to start the search for messages sent to L2 + that precede the earliest message already discovered. + + ## Parameters + - `value_if_nil`: The default value to return if no L1-to-L2 messages have been discovered. + + ## Returns + - The L1 block number immediately preceding the earliest discovered message to L2, + or `value_if_nil` if no messages to L2 have been found. + """ + @spec l1_block_to_discover_earliest_message_to_l2(nil | FullBlock.block_number()) :: nil | FullBlock.block_number() + def l1_block_to_discover_earliest_message_to_l2(value_if_nil) + when (is_integer(value_if_nil) and value_if_nil >= 0) or is_nil(value_if_nil) do + case Reader.l1_block_of_earliest_discovered_message_to_l2() do + nil -> + log_warning("No messages to L2 found in DB") + value_if_nil + + value -> + value - 1 + end + end + + @doc """ + Determines the rollup block number to start searching for missed messages originating from L2. + + ## Parameters + - `value_if_nil`: The default value to return if no messages originating from L2 have been found. + + ## Returns + - The rollup block number just before the earliest discovered message from L2, + or `value_if_nil` if no messages from L2 are found. + """ + @spec rollup_block_to_discover_missed_messages_from_l2(nil | FullBlock.block_number()) :: + nil | FullBlock.block_number() + def rollup_block_to_discover_missed_messages_from_l2(value_if_nil \\ nil) + when (is_integer(value_if_nil) and value_if_nil >= 0) or is_nil(value_if_nil) do + case Reader.rollup_block_of_earliest_discovered_message_from_l2() do + nil -> + log_warning("No messages from L2 found in DB") + value_if_nil + + value -> + value - 1 + end + end + + @doc """ + Determines the rollup block number to start searching for missed messages originating to L2. + + ## Parameters + - `value_if_nil`: The default value to return if no messages originating to L2 have been found. + + ## Returns + - The rollup block number just before the earliest discovered message to L2, + or `value_if_nil` if no messages to L2 are found. + """ + @spec rollup_block_to_discover_missed_messages_to_l2(nil | FullBlock.block_number()) :: nil | FullBlock.block_number() + def rollup_block_to_discover_missed_messages_to_l2(value_if_nil \\ nil) + when (is_integer(value_if_nil) and value_if_nil >= 0) or is_nil(value_if_nil) do + case Reader.rollup_block_of_earliest_discovered_message_to_l2() do + nil -> + # In theory it could be a situation when when the earliest message points + # to a completion transaction which is not indexed yet. In this case, this + # warning will occur. + log_warning("No completed messages to L2 found in DB") + value_if_nil + + value -> + value - 1 + end + end + + @doc """ + Retrieves the L1 block number immediately following the block where the confirmation transaction + for the highest confirmed rollup block was included. + + ## Parameters + - `value_if_nil`: The default value to return if no confirmed rollup blocks are found. + + ## Returns + - The L1 block number immediately after the block containing the confirmation transaction of + the highest confirmed rollup block, or `value_if_nil` if no confirmed rollup blocks are present. + """ + @spec l1_block_of_latest_confirmed_block(nil | FullBlock.block_number()) :: nil | FullBlock.block_number() + def l1_block_of_latest_confirmed_block(value_if_nil) + when (is_integer(value_if_nil) and value_if_nil >= 0) or is_nil(value_if_nil) do + case Reader.l1_block_of_latest_confirmed_block() do + nil -> + log_warning("No confirmed blocks found in DB") + value_if_nil + + value -> + value + 1 + end + end + + @doc """ + Retrieves the block number of the highest rollup block for which a confirmation transaction + has been sent to L1. + + ## Parameters + - `value_if_nil`: The default value to return if no confirmed rollup blocks are found. + + ## Returns + - The block number of the highest confirmed rollup block, + or `value_if_nil` if no confirmed rollup blocks are found in the database. + """ + @spec highest_confirmed_block(nil | integer()) :: nil | FullBlock.block_number() + def highest_confirmed_block(value_if_nil) + when is_integer(value_if_nil) or is_nil(value_if_nil) do + case Reader.highest_confirmed_block() do + nil -> value_if_nil + value -> value + end + end + + @doc """ + Determines the next L1 block number to search for the latest execution of an L2-to-L1 message. + + ## Parameters + - `value_if_nil`: The default value to return if no execution transactions for L2-to-L1 messages + have been recorded. + + ## Returns + - The L1 block number following the block that contains the latest execution transaction + for an L2-to-L1 message, or `value_if_nil` if no such executions have been found. + """ + @spec l1_block_to_discover_latest_execution(nil | FullBlock.block_number()) :: nil | FullBlock.block_number() + def l1_block_to_discover_latest_execution(value_if_nil) + when (is_integer(value_if_nil) and value_if_nil >= 0) or is_nil(value_if_nil) do + case Reader.l1_block_of_latest_execution() do + nil -> + log_warning("No L1 executions found in DB") + value_if_nil + + value -> + value + 1 + end + end + + @doc """ + Determines the L1 block number just before the block that contains the earliest known + execution transaction for an L2-to-L1 message. + + ## Parameters + - `value_if_nil`: The default value to return if no execution transactions for + L2-to-L1 messages have been found. + + ## Returns + - The L1 block number preceding the earliest known execution transaction for + an L2-to-L1 message, or `value_if_nil` if no such executions are found in the database. + """ + @spec l1_block_to_discover_earliest_execution(nil | FullBlock.block_number()) :: nil | FullBlock.block_number() + def l1_block_to_discover_earliest_execution(value_if_nil) + when (is_integer(value_if_nil) and value_if_nil >= 0) or is_nil(value_if_nil) do + case Reader.l1_block_of_earliest_execution() do + nil -> + log_warning("No L1 executions found in DB") + value_if_nil + + value -> + value - 1 + end + end + + @doc """ + Retrieves full details of rollup blocks, including associated transactions, for each + block number specified in the input list. + + ## Parameters + - `list_of_block_numbers`: A list of block numbers for which full block details are to be retrieved. + + ## Returns + - A list of `Explorer.Chain.Block` instances containing detailed information for each + block number in the input list. Returns an empty list if no blocks are found for the given numbers. + """ + @spec rollup_blocks(maybe_improper_list(FullBlock.block_number(), [])) :: [FullBlock] + def rollup_blocks(list_of_block_numbers) + when is_list(list_of_block_numbers) do + query = + from( + block in FullBlock, + where: block.number in ^list_of_block_numbers + ) + + query + # :optional is used since a block may not have any transactions + |> Chain.join_associations(%{:transactions => :optional}) + |> Repo.all(timeout: :infinity) + end + + @doc """ + Retrieves unfinalized L1 transactions that are involved in changing the statuses + of rollup blocks or transactions. + + An L1 transaction is considered unfinalized if it has not yet reached a state + where it is permanently included in the blockchain, meaning it is still susceptible + to potential reorganization or change. Transactions are evaluated against + the finalized_block parameter to determine their finalized status. + + ## Parameters + - `finalized_block`: The block number up to which unfinalized transactions are to be retrieved. + + ## Returns + - A list of maps representing unfinalized L1 transactions and compatible with the + database import operation. + """ + @spec lifecycle_unfinalized_transactions(FullBlock.block_number()) :: [ + %{ + id: non_neg_integer(), + hash: Hash, + block_number: FullBlock.block_number(), + timestamp: DateTime, + status: :unfinalized + } + ] + def lifecycle_unfinalized_transactions(finalized_block) + when is_integer(finalized_block) and finalized_block >= 0 do + finalized_block + |> Reader.lifecycle_unfinalized_transactions() + |> Enum.map(&lifecycle_transaction_to_map/1) + end + + @doc """ + Retrieves the block number associated with a specific hash of a rollup block. + + ## Parameters + - `hash`: The hash of the rollup block whose number is to be retrieved. + + ## Returns + - The block number associated with the given rollup block hash. + """ + @spec rollup_block_hash_to_num(binary()) :: FullBlock.block_number() | nil + def rollup_block_hash_to_num(hash) when is_binary(hash) do + Reader.rollup_block_hash_to_num(hash) + end + + @doc """ + Retrieves the L1 batch that includes a specified rollup block number. + + ## Parameters + - `num`: The block number of the rollup block for which the containing + L1 batch is to be retrieved. + + ## Returns + - The `Explorer.Chain.Arbitrum.L1Batch` associated with the given rollup block number + if it exists and its commit transaction is loaded. + """ + @spec get_batch_by_rollup_block_number(FullBlock.block_number()) :: Explorer.Chain.Arbitrum.L1Batch | nil + def get_batch_by_rollup_block_number(num) + when is_integer(num) and num >= 0 do + case Reader.get_batch_by_rollup_block_number(num) do + nil -> + nil + + batch -> + case batch.commitment_transaction do + nil -> + raise "Incorrect state of the DB: commitment_transaction is not loaded for the batch with number #{num}" + + %Ecto.Association.NotLoaded{} -> + raise "Incorrect state of the DB: commitment_transaction is not loaded for the batch with number #{num}" + + _ -> + batch + end + end + end + + @doc """ + Retrieves rollup blocks within a specified block range that have not yet been confirmed. + + ## Parameters + - `first_block`: The starting block number of the range to search for unconfirmed rollup blocks. + - `last_block`: The ending block number of the range. + + ## Returns + - A list of maps, each representing an unconfirmed rollup block within the specified range. + If no unconfirmed blocks are found within the range, an empty list is returned. + """ + @spec unconfirmed_rollup_blocks(FullBlock.block_number(), FullBlock.block_number()) :: [ + %{ + batch_number: non_neg_integer(), + block_number: FullBlock.block_number(), + confirmation_id: non_neg_integer() | nil + } + ] + def unconfirmed_rollup_blocks(first_block, last_block) + when is_integer(first_block) and first_block >= 0 and + is_integer(last_block) and first_block <= last_block do + # credo:disable-for-lines:2 Credo.Check.Refactor.PipeChainStart + Reader.unconfirmed_rollup_blocks(first_block, last_block) + |> Enum.map(&rollup_block_to_map/1) + end + + @doc """ + Counts the number of confirmed rollup blocks in a specified batch. + + ## Parameters + - `batch_number`: The batch number for which the count of confirmed rollup blocks + is to be determined. + + ## Returns + - A number of rollup blocks confirmed in the specified batch. + """ + @spec count_confirmed_rollup_blocks_in_batch(non_neg_integer()) :: non_neg_integer() + def count_confirmed_rollup_blocks_in_batch(batch_number) + when is_integer(batch_number) and batch_number >= 0 do + Reader.count_confirmed_rollup_blocks_in_batch(batch_number) + end + + @doc """ + Retrieves a list of L2-to-L1 messages that have been initiated up to + a specified rollup block number. + + ## Parameters + - `block_number`: The block number up to which initiated L2-to-L1 messages + should be retrieved. + + ## Returns + - A list of maps, each representing an initiated L2-to-L1 message compatible with the + database import operation. If no initiated messages are found up to the specified + block number, an empty list is returned. + """ + @spec initiated_l2_to_l1_messages(FullBlock.block_number()) :: [ + %{ + direction: :from_l2, + message_id: non_neg_integer(), + originator_address: binary(), + originating_transaction_hash: binary(), + originating_transaction_block_number: FullBlock.block_number(), + completion_transaction_hash: nil, + status: :initiated + } + ] + def initiated_l2_to_l1_messages(block_number) + when is_integer(block_number) and block_number >= 0 do + # credo:disable-for-lines:2 Credo.Check.Refactor.PipeChainStart + Reader.l2_to_l1_messages(:initiated, block_number) + |> Enum.map(&message_to_map/1) + end + + @doc """ + Retrieves a list of L2-to-L1 'sent' messages that have been included up to + a specified rollup block number. + + A message is considered 'sent' when there is a batch including the transaction + that initiated the message, and this batch has been successfully delivered to L1. + + ## Parameters + - `block_number`: The block number up to which sent L2-to-L1 messages are to be retrieved. + + ## Returns + - A list of maps, each representing a sent L2-to-L1 message compatible with the + database import operation. If no messages with the 'sent' status are found by + the specified block number, an empty list is returned. + """ + @spec sent_l2_to_l1_messages(FullBlock.block_number()) :: [ + %{ + direction: :from_l2, + message_id: non_neg_integer(), + originator_address: binary(), + originating_transaction_hash: binary(), + originating_transaction_block_number: FullBlock.block_number(), + completion_transaction_hash: nil, + status: :sent + } + ] + def sent_l2_to_l1_messages(block_number) + when is_integer(block_number) and block_number >= 0 do + # credo:disable-for-lines:2 Credo.Check.Refactor.PipeChainStart + Reader.l2_to_l1_messages(:sent, block_number) + |> Enum.map(&message_to_map/1) + end + + @doc """ + Retrieves a list of L2-to-L1 'confirmed' messages that have been included up to + a specified rollup block number. + + A message is considered 'confirmed' when its transaction was included in a rollup block, + and the confirmation of this block has been delivered to L1. + + ## Parameters + - `block_number`: The block number up to which confirmed L2-to-L1 messages are to be retrieved. + + ## Returns + - A list of maps, each representing a confirmed L2-to-L1 message compatible with the + database import operation. If no messages with the 'confirmed' status are found by + the specified block number, an empty list is returned. + """ + @spec confirmed_l2_to_l1_messages(FullBlock.block_number()) :: [ + %{ + direction: :from_l2, + message_id: non_neg_integer(), + originator_address: binary(), + originating_transaction_hash: binary(), + originating_transaction_block_number: FullBlock.block_number(), + completion_transaction_hash: nil, + status: :confirmed + } + ] + def confirmed_l2_to_l1_messages(block_number) + when is_integer(block_number) and block_number >= 0 do + # credo:disable-for-lines:2 Credo.Check.Refactor.PipeChainStart + Reader.l2_to_l1_messages(:confirmed, block_number) + |> Enum.map(&message_to_map/1) + end + + @doc """ + Checks if the numbers from the provided list correspond to the numbers of indexed batches. + + ## Parameters + - `batches_numbers`: The list of batch numbers. + + ## Returns + - A list of batch numbers that are indexed and match the provided list, or `[]` + if none of the batch numbers in the provided list exist in the database. The output list + may be smaller than the input list. + """ + @spec batches_exist([non_neg_integer()]) :: [non_neg_integer()] + def batches_exist(batches_numbers) when is_list(batches_numbers) do + Reader.batches_exist(batches_numbers) + end + + @doc """ + Reads a list of transactions executing L2-to-L1 messages by their IDs. + + ## Parameters + - `message_ids`: A list of IDs to retrieve executing transactions for. + + ## Returns + - A list of `Explorer.Chain.Arbitrum.L1Execution` corresponding to the message IDs from + the input list. The output list may be smaller than the input list if some IDs do not + correspond to any existing transactions. + """ + @spec l1_executions([non_neg_integer()]) :: [Explorer.Chain.Arbitrum.L1Execution] + def l1_executions(message_ids) when is_list(message_ids) do + Reader.l1_executions(message_ids) + end + + @doc """ + Identifies the range of L1 blocks to investigate for missing confirmations of rollup blocks. + + This function determines the L1 block numbers bounding the interval where gaps in rollup block + confirmations might exist. It uses the earliest and latest L1 block numbers associated with + unconfirmed rollup blocks to define this range. + + ## Parameters + - `right_pos_value_if_nil`: The default value to use for the upper bound of the range if no + confirmed blocks found. + + ## Returns + - A tuple containing two elements: the lower and upper bounds of L1 block numbers to check + for missing rollup block confirmations. If the necessary confirmation data is unavailable, + the first element will be `nil`, and the second will be `right_pos_value_if_nil`. + """ + @spec l1_blocks_to_expect_rollup_blocks_confirmation(nil | FullBlock.block_number()) :: + {nil | FullBlock.block_number(), nil | FullBlock.block_number()} + def l1_blocks_to_expect_rollup_blocks_confirmation(right_pos_value_if_nil) + when (is_integer(right_pos_value_if_nil) and right_pos_value_if_nil >= 0) or is_nil(right_pos_value_if_nil) do + case Reader.l1_blocks_of_confirmations_bounding_first_unconfirmed_rollup_blocks_gap() do + nil -> + log_warning("No L1 confirmations found in DB") + {nil, right_pos_value_if_nil} + + {nil, newer_confirmation_l1_block} -> + {nil, newer_confirmation_l1_block - 1} + + {older_confirmation_l1_block, newer_confirmation_l1_block} -> + {older_confirmation_l1_block + 1, newer_confirmation_l1_block - 1} + end + end + + @doc """ + Retrieves all rollup logs in the range of blocks from `start_block` to `end_block` + corresponding to the `L2ToL1Tx` event emitted by the ArbSys contract. + + ## Parameters + - `start_block`: The starting block number of the range from which to + retrieve the transaction logs containing L2-to-L1 messages. + - `end_block`: The ending block number of the range. + + ## Returns + - A list of log maps for the `L2ToL1Tx` event where binary values for hashes + and data are decoded into hex strings, containing detailed information about + each event within the specified block range. Returns an empty list if no + relevant logs are found. + """ + @spec l2_to_l1_logs(FullBlock.block_number(), FullBlock.block_number()) :: [ + %{ + data: String, + index: non_neg_integer(), + first_topic: String, + second_topic: String, + third_topic: String, + fourth_topic: String, + address_hash: String, + transaction_hash: String, + block_hash: String, + block_number: FullBlock.block_number() + } + ] + def l2_to_l1_logs(start_block, end_block) + when is_integer(start_block) and start_block >= 0 and + is_integer(end_block) and start_block <= end_block do + arbsys_contract = Application.get_env(:indexer, Indexer.Fetcher.Arbitrum.Messaging)[:arbsys_contract] + + query = + from(log in Log, + where: + log.block_number >= ^start_block and + log.block_number <= ^end_block and + log.address_hash == ^arbsys_contract and + log.first_topic == ^@l2_to_l1_event + ) + + query + |> Repo.all(timeout: :infinity) + |> Enum.map(&logs_to_map/1) + end + + @doc """ + Returns 32-byte signature of the event `L2ToL1Tx` + """ + @spec l2_to_l1_event() :: <<_::528>> + def l2_to_l1_event, do: @l2_to_l1_event + + @doc """ + Determines whether a given range of block numbers has been fully indexed without any missing blocks. + + ## Parameters + - `start_block`: The starting block number of the range to check for completeness in indexing. + - `end_block`: The ending block number of the range. + + ## Returns + - `true` if the entire range from `start_block` to `end_block` is indexed and contains no missing + blocks, indicating no intersection with missing block ranges; `false` otherwise. + """ + @spec indexed_blocks?(FullBlock.block_number(), FullBlock.block_number()) :: boolean() + def indexed_blocks?(start_block, end_block) + when is_integer(start_block) and start_block >= 0 and + is_integer(end_block) and start_block <= end_block do + is_nil(MissingBlockRange.intersects_with_range(start_block, end_block)) + end + + @doc """ + Retrieves the block number for the closest block immediately after a given timestamp. + + ## Parameters + - `timestamp`: The `DateTime` timestamp for which the closest subsequent block number is sought. + + ## Returns + - `{:ok, block_number}` where `block_number` is the number of the closest block that occurred + after the specified timestamp. + - `{:error, :not_found}` if no block is found after the specified timestamp. + """ + @spec closest_block_after_timestamp(DateTime.t()) :: {:error, :not_found} | {:ok, FullBlock.block_number()} + def closest_block_after_timestamp(timestamp) do + Chain.timestamp_to_block_number(timestamp, :after, false) + end + + defp lifecycle_transaction_to_map(tx) do + [:id, :hash, :block_number, :timestamp, :status] + |> db_record_to_map(tx) + end + + defp rollup_block_to_map(block) do + [:batch_number, :block_number, :confirmation_id] + |> db_record_to_map(block) + end + + defp message_to_map(message) do + [ + :direction, + :message_id, + :originator_address, + :originating_transaction_hash, + :originating_transaction_block_number, + :completion_transaction_hash, + :status + ] + |> db_record_to_map(message) + end + + defp logs_to_map(log) do + [ + :data, + :index, + :first_topic, + :second_topic, + :third_topic, + :fourth_topic, + :address_hash, + :transaction_hash, + :block_hash, + :block_number + ] + |> db_record_to_map(log, true) + end + + defp db_record_to_map(required_keys, record, encode \\ false) do + required_keys + |> Enum.reduce(%{}, fn key, record_as_map -> + raw_value = Map.get(record, key) + + # credo:disable-for-lines:5 Credo.Check.Refactor.Nesting + value = + case raw_value do + %Hash{} -> if(encode, do: Hash.to_string(raw_value), else: raw_value.bytes) + %Data{} -> if(encode, do: Data.to_string(raw_value), else: raw_value.bytes) + _ -> raw_value + end + + Map.put(record_as_map, key, value) + end) + end +end diff --git a/apps/indexer/lib/indexer/fetcher/arbitrum/utils/helper.ex b/apps/indexer/lib/indexer/fetcher/arbitrum/utils/helper.ex new file mode 100644 index 000000000000..1579b89009d4 --- /dev/null +++ b/apps/indexer/lib/indexer/fetcher/arbitrum/utils/helper.ex @@ -0,0 +1,86 @@ +defmodule Indexer.Fetcher.Arbitrum.Utils.Helper do + @moduledoc """ + Provides utility functions to support the handling of Arbitrum-specific data fetching and processing in the indexer. + """ + + @doc """ + Increases a base duration by an amount specified in a map, if present. + + This function takes a map that may contain a duration key and a current duration value. + If the map contains a duration, it is added to the current duration; otherwise, the + current duration is returned unchanged. + + ## Parameters + - `data`: A map that may contain a `:duration` key with its value representing + the amount of time to add. + - `cur_duration`: The current duration value, to which the duration from the map + will be added if present. + + ## Returns + - The increased duration. + """ + @spec increase_duration( + %{optional(:duration) => non_neg_integer(), optional(any()) => any()}, + non_neg_integer() + ) :: non_neg_integer() + def increase_duration(data, cur_duration) + when is_map(data) and is_integer(cur_duration) and cur_duration >= 0 do + if Map.has_key?(data, :duration) do + data.duration + cur_duration + else + cur_duration + end + end + + @doc """ + Enriches lifecycle transaction entries with timestamps and status based on provided block information and finalization tracking. + + This function takes a map of lifecycle transactions and extends each entry with + a timestamp (extracted from a corresponding map of block numbers to timestamps) + and a status. The status is determined based on whether finalization tracking is enabled. + + ## Parameters + - `lifecycle_txs`: A map where each key is a transaction identifier, and the value is + a map containing at least the block number (`:block`). + - `blocks_to_ts`: A map linking block numbers to their corresponding timestamps. + - `track_finalization?`: A boolean flag indicating whether to mark transactions + as unfinalized or finalized. + + ## Returns + - An updated map of the same structure as `lifecycle_txs` but with each transaction extended to include: + - `timestamp`: The timestamp of the block in which the transaction is included. + - `status`: Either `:unfinalized` if `track_finalization?` is `true`, or `:finalized` otherwise. + """ + @spec extend_lifecycle_txs_with_ts_and_status( + %{binary() => %{:block => non_neg_integer(), optional(any()) => any()}}, + %{non_neg_integer() => DateTime.t()}, + boolean() + ) :: %{ + binary() => %{ + :block => non_neg_integer(), + :timestamp => DateTime.t(), + :status => :unfinalized | :finalized, + optional(any()) => any() + } + } + def extend_lifecycle_txs_with_ts_and_status(lifecycle_txs, blocks_to_ts, track_finalization?) + when is_map(lifecycle_txs) and is_map(blocks_to_ts) and is_boolean(track_finalization?) do + lifecycle_txs + |> Map.keys() + |> Enum.reduce(%{}, fn tx_key, updated_txs -> + Map.put( + updated_txs, + tx_key, + Map.merge(lifecycle_txs[tx_key], %{ + timestamp: blocks_to_ts[lifecycle_txs[tx_key].block_number], + status: + if track_finalization? do + :unfinalized + else + :finalized + end + }) + ) + end) + end +end diff --git a/apps/indexer/lib/indexer/fetcher/arbitrum/utils/logging.ex b/apps/indexer/lib/indexer/fetcher/arbitrum/utils/logging.ex new file mode 100644 index 000000000000..1dd8da71d9cf --- /dev/null +++ b/apps/indexer/lib/indexer/fetcher/arbitrum/utils/logging.ex @@ -0,0 +1,162 @@ +defmodule Indexer.Fetcher.Arbitrum.Utils.Logging do + @moduledoc """ + Common logging functions for Indexer.Fetcher.Arbitrum fetchers + """ + require Logger + + @doc """ + A helper function to log a message with debug severity. Uses `Logger.debug` facility. + + ## Parameters + - `msg`: a message to log + + ## Returns + `:ok` + """ + @spec log_debug(any()) :: :ok + def log_debug(msg) do + Logger.debug(msg) + end + + @doc """ + A helper function to log a message with warning severity. Uses `Logger.warning` facility. + + ## Parameters + - `msg`: a message to log + + ## Returns + `:ok` + """ + @spec log_warning(any()) :: :ok + def log_warning(msg) do + Logger.warning(msg) + end + + @doc """ + A helper function to log a message with info severity. Uses `Logger.info` facility. + + ## Parameters + - `msg`: a message to log + + ## Returns + `:ok` + """ + @spec log_info(any()) :: :ok + def log_info(msg) do + Logger.info(msg) + end + + @doc """ + A helper function to log a message with error severity. Uses `Logger.error` facility. + + ## Parameters + - `msg`: a message to log + + ## Returns + `:ok` + """ + @spec log_error(any()) :: :ok + def log_error(msg) do + Logger.error(msg) + end + + @doc """ + A helper function to log progress when handling data items in chunks. + + ## Parameters + - `prefix`: A prefix for the logging message. + - `data_items_names`: A tuple with singular and plural of data items names + - `chunk`: A list of data items numbers in the current chunk. + - `current_progress`: The total number of data items handled up to this moment. + - `total`: The total number of data items across all chunks. + + ## Returns + `:ok` + + ## Examples: + - `log_details_chunk_handling("A message", {"batch", "batches"}, [1, 2, 3], 0, 10)` produces + `A message for batches 1..3. Progress 30%` + - `log_details_chunk_handling("A message", {"batch", "batches"}, [2], 1, 10)` produces + `A message for batch 2. Progress 20%` + - `log_details_chunk_handling("A message", {"block", "blocks"}, [35], 0, 1)` produces + `A message for block 35.` + - `log_details_chunk_handling("A message", {"block", "blocks"}, [45, 50, 51, 52, 60], 1, 1)` produces + `A message for blocks 45, 50..52, 60.` + """ + @spec log_details_chunk_handling(binary(), tuple(), list(), non_neg_integer(), non_neg_integer()) :: :ok + def log_details_chunk_handling(prefix, data_items_names, chunk, current_progress, total) + # credo:disable-for-previous-line Credo.Check.Refactor.CyclomaticComplexity + when is_binary(prefix) and is_tuple(data_items_names) and is_list(chunk) and + (is_integer(current_progress) and current_progress >= 0) and + (is_integer(total) and total > 0) do + chunk_length = length(chunk) + + progress = + case chunk_length == total do + true -> + "" + + false -> + percentage = + (current_progress + chunk_length) + |> Decimal.div(total) + |> Decimal.mult(100) + |> Decimal.round(2) + |> Decimal.to_string() + + " Progress: #{percentage}%" + end + + if chunk_length == 1 do + log_debug("#{prefix} for #{elem(data_items_names, 0)} ##{Enum.at(chunk, 0)}.") + else + log_debug( + "#{prefix} for #{elem(data_items_names, 1)} #{Enum.join(shorten_numbers_list(chunk), ", ")}.#{progress}" + ) + end + end + + # Transform list of numbers to the list of string where consequent values + # are combined to be displayed as a range. + # + # ## Parameters + # - `msg`: a message to log + # + # ## Returns + # `shorten_list` - resulting list after folding + # + # ## Examples: + # [1, 2, 3] => ["1..3"] + # [1, 3] => ["1", "3"] + # [1, 2] => ["1..2"] + # [1, 3, 4, 5] => ["1", "3..5"] + defp shorten_numbers_list(numbers_list) do + {shorten_list, _, _} = + numbers_list + |> Enum.sort() + |> Enum.reduce({[], nil, nil}, fn number, {shorten_list, prev_range_start, prev_number} -> + shorten_numbers_list_impl(number, shorten_list, prev_range_start, prev_number) + end) + |> then(fn {shorten_list, prev_range_start, prev_number} -> + shorten_numbers_list_impl(prev_number, shorten_list, prev_range_start, prev_number) + end) + + Enum.reverse(shorten_list) + end + + defp shorten_numbers_list_impl(number, shorten_list, prev_range_start, prev_number) do + cond do + is_nil(prev_number) -> + {[], number, number} + + prev_number + 1 != number and prev_range_start == prev_number -> + {["#{prev_range_start}" | shorten_list], number, number} + + prev_number + 1 != number -> + {["#{prev_range_start}..#{prev_number}" | shorten_list], number, number} + + true -> + {shorten_list, prev_range_start, number} + end + end +end diff --git a/apps/indexer/lib/indexer/fetcher/arbitrum/utils/rpc.ex b/apps/indexer/lib/indexer/fetcher/arbitrum/utils/rpc.ex new file mode 100644 index 000000000000..2600fbc62222 --- /dev/null +++ b/apps/indexer/lib/indexer/fetcher/arbitrum/utils/rpc.ex @@ -0,0 +1,391 @@ +defmodule Indexer.Fetcher.Arbitrum.Utils.Rpc do + @moduledoc """ + Common functions to simplify RPC routines for Indexer.Fetcher.Arbitrum fetchers + """ + + import EthereumJSONRPC, + only: [json_rpc: 2, quantity_to_integer: 1, timestamp_to_datetime: 1] + + alias EthereumJSONRPC.Transport + alias Indexer.Helper, as: IndexerHelper + + @zero_hash "0000000000000000000000000000000000000000000000000000000000000000" + @rpc_resend_attempts 20 + + @selector_outbox "ce11e6ab" + @selector_sequencer_inbox "ee35f327" + @selector_bridge "e78cea92" + @rollup_contract_abi [ + %{ + "inputs" => [], + "name" => "outbox", + "outputs" => [ + %{ + "internalType" => "address", + "name" => "", + "type" => "address" + } + ], + "stateMutability" => "view", + "type" => "function" + }, + %{ + "inputs" => [], + "name" => "sequencerInbox", + "outputs" => [ + %{ + "internalType" => "address", + "name" => "", + "type" => "address" + } + ], + "stateMutability" => "view", + "type" => "function" + }, + %{ + "inputs" => [], + "name" => "bridge", + "outputs" => [ + %{ + "internalType" => "address", + "name" => "", + "type" => "address" + } + ], + "stateMutability" => "view", + "type" => "function" + } + ] + + @doc """ + Constructs a JSON RPC request to retrieve a transaction by its hash. + + ## Parameters + - `%{hash: tx_hash, id: id}`: A map containing the transaction hash (`tx_hash`) and + an identifier (`id`) for the request, which can be used later to establish + correspondence between requests and responses. + + ## Returns + - A `Transport.request()` struct representing the JSON RPC request for fetching + the transaction details associated with the given hash. + """ + @spec transaction_by_hash_request(%{hash: EthereumJSONRPC.hash(), id: non_neg_integer()}) :: Transport.request() + def transaction_by_hash_request(%{id: id, hash: tx_hash}) + when is_binary(tx_hash) and is_integer(id) do + EthereumJSONRPC.request(%{id: id, method: "eth_getTransactionByHash", params: [tx_hash]}) + end + + @doc """ + Retrieves specific contract addresses associated with Arbitrum rollup contract. + + This function fetches the addresses of the bridge, sequencer inbox, and outbox + contracts related to the specified Arbitrum rollup address. It invokes one of + the contract methods `bridge()`, `sequencerInbox()`, or `outbox()` based on + the `contracts_set` parameter to obtain the required information. + + ## Parameters + - `rollup_address`: The address of the Arbitrum rollup contract from which + information is being retrieved. + - `contracts_set`: A symbol indicating the set of contracts to retrieve (`:bridge` + for the bridge contract, `:inbox_outbox` for the sequencer + inbox and outbox contracts). + - `json_rpc_named_arguments`: Configuration parameters for the JSON RPC connection. + + ## Returns + - A map with keys corresponding to the contract types (`:bridge`, `:sequencer_inbox`, + `:outbox`) and values representing the contract addresses. + """ + @spec get_contracts_for_rollup( + EthereumJSONRPC.address(), + :bridge | :inbox_outbox, + EthereumJSONRPC.json_rpc_named_arguments() + ) :: %{(:bridge | :sequencer_inbox | :outbox) => binary()} + def get_contracts_for_rollup(rollup_address, contracts_set, json_rpc_named_arguments) + + def get_contracts_for_rollup(rollup_address, :bridge, json_rpc_named_arguments) do + call_simple_getters_in_rollup_contract(rollup_address, [@selector_bridge], json_rpc_named_arguments) + end + + def get_contracts_for_rollup(rollup_address, :inbox_outbox, json_rpc_named_arguments) do + call_simple_getters_in_rollup_contract( + rollup_address, + [@selector_sequencer_inbox, @selector_outbox], + json_rpc_named_arguments + ) + end + + # Calls getter functions on a rollup contract and collects their return values. + # + # This function is designed to interact with a rollup contract and invoke specified getter methods. + # It creates a list of requests for each method ID, executes these requests with retries as needed, + # and then maps the results to the corresponding method IDs. + # + # ## Parameters + # - `rollup_address`: The address of the rollup contract to interact with. + # - `method_ids`: A list of method identifiers representing the getter functions to be called. + # - `json_rpc_named_arguments`: Configuration parameters for the JSON RPC connection. + # + # ## Returns + # - A map where each key is a method identifier converted to an atom, and each value is the + # response from calling the respective method on the contract. + defp call_simple_getters_in_rollup_contract(rollup_address, method_ids, json_rpc_named_arguments) do + method_ids + |> Enum.map(fn method_id -> + %{ + contract_address: rollup_address, + method_id: method_id, + args: [] + } + end) + |> IndexerHelper.read_contracts_with_retries(@rollup_contract_abi, json_rpc_named_arguments, @rpc_resend_attempts) + |> Kernel.elem(0) + |> Enum.zip(method_ids) + |> Enum.reduce(%{}, fn {{:ok, [response]}, method_id}, retval -> + Map.put(retval, atomized_key(method_id), response) + end) + end + + @doc """ + Executes a batch of RPC calls and returns a list of response bodies. + + This function processes a list of RPC requests and returns only the response bodies, + discarding the request IDs. The function is designed for scenarios where only + the response data is required, and the association with request IDs is not needed. + + ## Parameters + - `requests_list`: A list of `Transport.request()` instances representing the RPC calls to be made. + - `json_rpc_named_arguments`: Configuration parameters for the JSON RPC connection. + - `help_str`: A string that helps identify the request type in log messages, used for error logging. + + ## Returns + - A list containing the bodies of the RPC call responses. This list will include both + successful responses and errors encountered during the batch execution. The developer + must handle these outcomes as appropriate. + """ + @spec make_chunked_request([Transport.request()], EthereumJSONRPC.json_rpc_named_arguments(), binary()) :: list() + def make_chunked_request(requests_list, json_rpc_named_arguments, help_str) + + def make_chunked_request([], _, _) do + [] + end + + def make_chunked_request(requests_list, json_rpc_named_arguments, help_str) + when is_list(requests_list) and is_binary(help_str) do + requests_list + |> make_chunked_request_keep_id(json_rpc_named_arguments, help_str) + |> Enum.map(fn %{result: resp_body} -> resp_body end) + end + + @doc """ + Executes a batch of RPC calls while preserving the original request IDs in the responses. + + This function processes a list of RPC requests in batches, retaining the association + between the requests and their responses to ensure that each response can be traced + back to its corresponding request. + + ## Parameters + - `requests_list`: A list of `Transport.request()` instances representing the RPC calls to be made. + - `json_rpc_named_arguments`: Configuration parameters for the JSON RPC connection. + - `help_str`: A string that helps identify the request type in log messages, used for error logging. + + ## Returns + - A list of maps, each containing the `id` and `result` from the RPC response, maintaining + the same order and ID as the original request. If the batch execution encounters errors + that cannot be resolved after the defined number of retries, the function will log + the errors using the provided `help_str` for context and will return a list of responses + where each element is either the result of a successful call or an error description. + It is the responsibility of the developer to distinguish between successful responses + and errors and handle them appropriately. + """ + @spec make_chunked_request_keep_id([Transport.request()], EthereumJSONRPC.json_rpc_named_arguments(), binary()) :: + [%{id: non_neg_integer(), result: any()}] + def make_chunked_request_keep_id(requests_list, json_rpc_named_arguments, help_str) + + def make_chunked_request_keep_id([], _, _) do + [] + end + + def make_chunked_request_keep_id(requests_list, json_rpc_named_arguments, help_str) + when is_list(requests_list) and is_binary(help_str) do + error_message_generator = &"Cannot call #{help_str}. Error: #{inspect(&1)}" + + {:ok, responses} = + IndexerHelper.repeated_batch_rpc_call( + requests_list, + json_rpc_named_arguments, + error_message_generator, + @rpc_resend_attempts + ) + + responses + end + + @doc """ + Executes a list of block requests, retrieves their timestamps, and returns a map of block numbers to timestamps. + + ## Parameters + - `blocks_requests`: A list of `Transport.request()` instances representing the block + information requests. + - `json_rpc_named_arguments`: Configuration parameters for the JSON RPC connection. + - `chunk_size`: The number of requests to be processed in each batch, defining the size of the chunks. + + ## Returns + - A map where each key is a block number and each value is the corresponding timestamp. + """ + @spec execute_blocks_requests_and_get_ts( + [Transport.request()], + EthereumJSONRPC.json_rpc_named_arguments(), + non_neg_integer() + ) :: %{EthereumJSONRPC.block_number() => DateTime.t()} + def execute_blocks_requests_and_get_ts(blocks_requests, json_rpc_named_arguments, chunk_size) + when is_list(blocks_requests) and is_integer(chunk_size) do + blocks_requests + |> Enum.chunk_every(chunk_size) + |> Enum.reduce(%{}, fn chunk, result -> + chunk + |> make_chunked_request(json_rpc_named_arguments, "eth_getBlockByNumber") + |> Enum.reduce(result, fn resp, result_inner -> + Map.put(result_inner, quantity_to_integer(resp["number"]), timestamp_to_datetime(resp["timestamp"])) + end) + end) + end + + @doc """ + Executes a list of transaction requests and retrieves the sender (from) addresses for each. + + ## Parameters + - `txs_requests`: A list of `Transport.request()` instances representing the transaction requests. + - `json_rpc_named_arguments`: Configuration parameters for the JSON RPC connection. + - `chunk_size`: The number of requests to be processed in each batch, defining the size of the chunks. + + ## Returns + - A map where each key is a transaction hash and each value is the corresponding sender's address. + """ + @spec execute_transactions_requests_and_get_from( + [Transport.request()], + EthereumJSONRPC.json_rpc_named_arguments(), + non_neg_integer() + ) :: [%{EthereumJSONRPC.hash() => EthereumJSONRPC.address()}] + def execute_transactions_requests_and_get_from(txs_requests, json_rpc_named_arguments, chunk_size) + when is_list(txs_requests) and is_integer(chunk_size) do + txs_requests + |> Enum.chunk_every(chunk_size) + |> Enum.reduce(%{}, fn chunk, result -> + chunk + |> make_chunked_request(json_rpc_named_arguments, "eth_getTransactionByHash") + |> Enum.reduce(result, fn resp, result_inner -> + Map.put(result_inner, resp["hash"], resp["from"]) + end) + end) + end + + @doc """ + Retrieves the block number associated with a given block hash using the Ethereum JSON RPC `eth_getBlockByHash` method, with retry logic for handling request failures. + + ## Parameters + - `hash`: The hash of the block for which the block number is requested. + - `json_rpc_named_arguments`: Configuration parameters for the JSON RPC connection. + + ## Returns + - The block number if the block is found and successfully retrieved, or `nil` + if the block cannot be fetched or the block number is not present in the response. + """ + @spec get_block_number_by_hash(EthereumJSONRPC.hash(), EthereumJSONRPC.json_rpc_named_arguments()) :: + EthereumJSONRPC.block_number() | nil + def get_block_number_by_hash(hash, json_rpc_named_arguments) do + func = &do_get_block_number_by_hash/2 + args = [hash, json_rpc_named_arguments] + error_message = &"Cannot fetch block #{hash} or its number. Error: #{inspect(&1)}" + + case IndexerHelper.repeated_call(func, args, error_message, @rpc_resend_attempts) do + {:error, _} -> nil + {:ok, res} -> res + end + end + + defp do_get_block_number_by_hash(hash, json_rpc_named_arguments) do + # credo:disable-for-lines:3 Credo.Check.Refactor.PipeChainStart + result = + EthereumJSONRPC.request(%{id: 0, method: "eth_getBlockByHash", params: [hash, false]}) + |> json_rpc(json_rpc_named_arguments) + + with {:ok, block} <- result, + false <- is_nil(block), + number <- Map.get(block, "number"), + false <- is_nil(number) do + {:ok, quantity_to_integer(number)} + else + {:error, message} -> + {:error, message} + + true -> + {:error, "RPC returned nil."} + end + end + + @doc """ + Determines the starting block number for further operations with L1 based on configuration and network status. + + This function selects the starting block number for operations involving L1. + If the configured block number is `0`, it attempts to retrieve the safe block number + from the network. Should the safe block number not be available (if the endpoint does + not support this feature), the latest block number is used instead. If a non-zero block + number is configured, that number is used directly. + + ## Parameters + - `configured_number`: The block number configured for starting operations. + - `json_rpc_named_arguments`: Configuration parameters for the JSON RPC connection. + + ## Returns + - The block number from which to start further operations with L1, determined based + on the provided configuration and network capabilities. + """ + @spec get_l1_start_block(EthereumJSONRPC.block_number(), EthereumJSONRPC.json_rpc_named_arguments()) :: + EthereumJSONRPC.block_number() + def get_l1_start_block(configured_number, json_rpc_named_arguments) do + if configured_number == 0 do + {block_number, _} = IndexerHelper.get_safe_block(json_rpc_named_arguments) + block_number + else + configured_number + end + end + + @doc """ + Converts a transaction hash from its hexadecimal string representation to a binary format. + + ## Parameters + - `hash`: The transaction hash as a hex string, which can be `nil`. If `nil`, a default zero hash value is used. + + ## Returns + - The binary representation of the hash. If the input is `nil`, returns the binary form of the default zero hash. + """ + @spec string_hash_to_bytes_hash(EthereumJSONRPC.hash() | nil) :: binary() + def string_hash_to_bytes_hash(hash) do + hash + |> json_tx_id_to_hash() + |> Base.decode16!(case: :mixed) + end + + defp json_tx_id_to_hash(hash) do + case hash do + "0x" <> tx_hash -> tx_hash + nil -> @zero_hash + end + end + + @doc """ + Retrieves the hardcoded number of resend attempts for RPC calls. + + ## Returns + - The number of resend attempts. + """ + @spec get_resend_attempts() :: non_neg_integer() + def get_resend_attempts do + @rpc_resend_attempts + end + + defp atomized_key(@selector_outbox), do: :outbox + defp atomized_key(@selector_sequencer_inbox), do: :sequencer_inbox + defp atomized_key(@selector_bridge), do: :bridge +end diff --git a/apps/indexer/lib/indexer/fetcher/arbitrum/workers/historical_messages_on_l2.ex b/apps/indexer/lib/indexer/fetcher/arbitrum/workers/historical_messages_on_l2.ex new file mode 100644 index 000000000000..f34f037b4346 --- /dev/null +++ b/apps/indexer/lib/indexer/fetcher/arbitrum/workers/historical_messages_on_l2.ex @@ -0,0 +1,284 @@ +defmodule Indexer.Fetcher.Arbitrum.Workers.HistoricalMessagesOnL2 do + @moduledoc """ + Handles the discovery and processing of historical messages between Layer 1 (L1) and Layer 2 (L2) within an Arbitrum rollup. + + L1-to-L2 messages are discovered by requesting rollup transactions through RPC. + This is necessary because some Arbitrum-specific fields are not included in the + already indexed transactions within the database. + + L2-to-L1 messages are discovered by analyzing the logs of already indexed rollup + transactions. + """ + + import Indexer.Fetcher.Arbitrum.Utils.Logging, only: [log_warning: 1, log_info: 1] + + alias EthereumJSONRPC.Block.ByNumber, as: BlockByNumber + alias EthereumJSONRPC.Transaction, as: TransactionByRPC + + alias Explorer.Chain + + alias Indexer.Fetcher.Arbitrum.Messaging + alias Indexer.Fetcher.Arbitrum.Utils.{Db, Logging, Rpc} + + require Logger + + @doc """ + Initiates the discovery process for historical messages sent from L2 to L1 up to a specified block number. + + This function orchestrates the discovery of historical messages from L2 to L1 + by analyzing the rollup logs representing the `L2ToL1Tx` event. It determines + the starting block for the discovery process and verifies that the relevant + rollup block range has been indexed before proceeding with the discovery and + data import. During the import process, each message is assigned the + appropriate status based on the current rollup state. + + ## Parameters + - `end_block`: The ending block number up to which the discovery should occur. + If `nil` or negative, the function returns with no action taken. + - `state`: Contains the operational configuration, including the depth of + blocks to consider for the starting point of message discovery. + + ## Returns + - `{:ok, nil}`: If `end_block` is `nil`, indicating no discovery action was required. + - `{:ok, 0}`: If `end_block` is negative, indicating that the genesis of the block + chain was reached. + - `{:ok, start_block}`: Upon successful discovery of historical messages, where + `start_block` indicates the necessity to consider another + block range in the next iteration of message discovery. + - `{:ok, end_block + 1}`: If the required block range is not fully indexed, + indicating that the next iteration of message discovery + should start with the same block range. + """ + @spec discover_historical_messages_from_l2(nil | integer(), %{ + :config => %{ + :messages_to_l2_blocks_depth => non_neg_integer(), + optional(any()) => any() + }, + optional(any()) => any() + }) :: {:ok, nil | non_neg_integer()} + def discover_historical_messages_from_l2(end_block, state) + + def discover_historical_messages_from_l2(end_block, _) when is_nil(end_block) do + {:ok, nil} + end + + def discover_historical_messages_from_l2(end_block, _) + when is_integer(end_block) and end_block < 0 do + {:ok, 0} + end + + def discover_historical_messages_from_l2( + end_block, + %{config: %{messages_from_l2_blocks_depth: messages_from_l2_blocks_depth}} = _state + ) + when is_integer(end_block) and is_integer(messages_from_l2_blocks_depth) and + messages_from_l2_blocks_depth > 0 do + start_block = max(0, end_block - messages_from_l2_blocks_depth + 1) + + if Db.indexed_blocks?(start_block, end_block) do + do_discover_historical_messages_from_l2(start_block, end_block) + else + log_warning( + "Not able to discover historical messages from L2, some blocks in #{start_block}..#{end_block} not indexed" + ) + + {:ok, end_block + 1} + end + end + + # Discovers and processes historical messages sent from L2 to L1 within a specified rollup block range. + # + # This function fetches relevant rollup logs from the database representing messages sent + # from L2 to L1 (the `L2ToL1Tx` event) between the specified `start_block` and `end_block`. + # If any logs are found, they are used to construct message structures, which are then + # imported into the database. As part of the message construction, the appropriate status + # of the message (initialized, sent, or confirmed) is determined based on the current rollup + # state. + # + # ## Parameters + # - `start_block`: The starting block number for the discovery range. + # - `end_block`: The ending block number for the discovery range. + # + # ## Returns + # - `{:ok, start_block}`: A tuple indicating successful processing, returning the initial + # starting block number. + defp do_discover_historical_messages_from_l2(start_block, end_block) do + log_info("Block range for discovery historical messages from L2: #{start_block}..#{end_block}") + + logs = Db.l2_to_l1_logs(start_block, end_block) + + unless logs == [] do + messages = + logs + |> Messaging.handle_filtered_l2_to_l1_messages(__MODULE__) + + import_to_db(messages) + end + + {:ok, start_block} + end + + @doc """ + Initiates the discovery of historical messages sent from L1 to L2 up to a specified block number. + + This function orchestrates the process of discovering historical L1-to-L2 messages within + a given rollup block range, based on the existence of the `requestId` field in the rollup + transaction body. Transactions are requested through RPC because already indexed + transactions from the database cannot be utilized; the `requestId` field is not included + in the transaction model. The function ensures that the block range has been indexed + before proceeding with message discovery and import. The imported messages are marked as + `:relayed`, as they represent completed actions from L1 to L2. + + ## Parameters + - `end_block`: The ending block number for the discovery operation. If `nil` or negative, + the function returns immediately with no action. + - `state`: The current state of the operation, containing configuration parameters + including `messages_to_l2_blocks_depth`, `chunk_size`, and JSON RPC connection settings. + + ## Returns + - `{:ok, nil}`: If `end_block` is `nil`, indicating no action was necessary. + - `{:ok, 0}`: If `end_block` is negative, indicating that the genesis of the block chain + was reached. + - `{:ok, start_block}`: On successful completion of historical message discovery, where + `start_block` indicates the necessity to consider another block + range in the next iteration of message discovery. + - `{:ok, end_block + 1}`: If the required block range is not fully indexed, indicating + that the next iteration of message discovery should start with + the same block range. + """ + @spec discover_historical_messages_to_l2(nil | integer(), %{ + :config => %{ + :messages_to_l2_blocks_depth => non_neg_integer(), + :rollup_rpc => %{ + :chunk_size => non_neg_integer(), + :json_rpc_named_arguments => EthereumJSONRPC.json_rpc_named_arguments(), + optional(any()) => any() + }, + optional(any()) => any() + }, + optional(any()) => any() + }) :: {:ok, nil | non_neg_integer()} + def discover_historical_messages_to_l2(end_block, state) + + def discover_historical_messages_to_l2(end_block, _) when is_nil(end_block) do + {:ok, nil} + end + + def discover_historical_messages_to_l2(end_block, _) + when is_integer(end_block) and end_block < 0 do + {:ok, 0} + end + + def discover_historical_messages_to_l2(end_block, %{config: %{messages_to_l2_blocks_depth: _} = config} = _state) + when is_integer(end_block) do + start_block = max(0, end_block - config.messages_to_l2_blocks_depth + 1) + + # Although indexing blocks is not necessary to determine the completion of L1-to-L2 messages, + # for database consistency, it is preferable to delay marking these messages as completed. + if Db.indexed_blocks?(start_block, end_block) do + do_discover_historical_messages_to_l2(start_block, end_block, config) + else + log_warning( + "Not able to discover historical messages to L2, some blocks in #{start_block}..#{end_block} not indexed" + ) + + {:ok, end_block + 1} + end + end + + # The function iterates through the block range in chunks, making RPC calls to fetch rollup block + # data and extract transactions. Each transaction is filtered for L1-to-L2 messages based on + # existence of `requestId` field in the transaction body, and then imported into the database. + # The imported messages are marked as `:relayed` as they represent completed actions from L1 to L2. + # + # Already indexed transactions from the database cannot be used because the `requestId` field is + # not included in the transaction model. + # + # ## Parameters + # - `start_block`: The starting block number for the discovery range. + # - `end_block`: The ending block number for the discovery range. + # - `config`: The configuration map containing settings for RPC communication and chunk size. + # + # ## Returns + # - `{:ok, start_block}`: A tuple indicating successful processing, returning the initial + # starting block number. + defp do_discover_historical_messages_to_l2( + start_block, + end_block, + %{rollup_rpc: %{chunk_size: chunk_size, json_rpc_named_arguments: json_rpc_named_arguments}} = _config + ) do + log_info("Block range for discovery historical messages to L2: #{start_block}..#{end_block}") + + {messages, _} = + start_block..end_block + |> Enum.chunk_every(chunk_size) + |> Enum.reduce({[], 0}, fn chunk, {messages_acc, chunks_counter} -> + Logging.log_details_chunk_handling( + "Collecting rollup data", + {"block", "blocks"}, + chunk, + chunks_counter, + end_block - start_block + 1 + ) + + # Since DB does not contain the field RequestId specific to Arbitrum + # all transactions will be requested from the rollup RPC endpoint. + # The catchup process intended to be run once and only for the BS instance + # which are already exist, so it does not make sense to introduce + # the new field in DB + requests = build_block_by_number_requests(chunk) + + messages = + requests + |> Rpc.make_chunked_request(json_rpc_named_arguments, "eth_getBlockByNumber") + |> get_transactions() + |> Enum.map(fn tx -> + tx + |> TransactionByRPC.to_elixir() + |> TransactionByRPC.elixir_to_params() + end) + |> Messaging.filter_l1_to_l2_messages(false) + + {messages ++ messages_acc, chunks_counter + length(chunk)} + end) + + unless messages == [] do + log_info("#{length(messages)} completions of L1-to-L2 messages will be imported") + end + + import_to_db(messages) + + {:ok, start_block} + end + + # Constructs a list of `eth_getBlockByNumber` requests for a given list of block numbers. + defp build_block_by_number_requests(block_numbers) do + block_numbers + |> Enum.reduce([], fn block_num, requests_list -> + [ + BlockByNumber.request(%{ + id: block_num, + number: block_num + }) + | requests_list + ] + end) + end + + # Aggregates transactions from a list of blocks, combining them into a single list. + defp get_transactions(blocks_by_rpc) do + blocks_by_rpc + |> Enum.reduce([], fn block_by_rpc, txs -> + block_by_rpc["transactions"] ++ txs + end) + end + + # Imports a list of messages into the database. + defp import_to_db(messages) do + {:ok, _} = + Chain.import(%{ + arbitrum_messages: %{params: messages}, + timeout: :infinity + }) + end +end diff --git a/apps/indexer/lib/indexer/fetcher/arbitrum/workers/l1_finalization.ex b/apps/indexer/lib/indexer/fetcher/arbitrum/workers/l1_finalization.ex new file mode 100644 index 000000000000..9a5c457f3571 --- /dev/null +++ b/apps/indexer/lib/indexer/fetcher/arbitrum/workers/l1_finalization.ex @@ -0,0 +1,74 @@ +defmodule Indexer.Fetcher.Arbitrum.Workers.L1Finalization do + @moduledoc """ + Oversees the finalization of lifecycle transactions on Layer 1 (L1) for Arbitrum rollups. + + This module is tasked with monitoring and updating the status of Arbitrum + lifecycle transactions that are related to the rollup process. It ensures that + transactions which have been confirmed up to the 'safe' block number on L1 are + marked as 'finalized' within the system's database. + """ + + import Indexer.Fetcher.Arbitrum.Utils.Logging, only: [log_info: 1] + + alias Indexer.Helper, as: IndexerHelper + alias Indexer.Fetcher.Arbitrum.Utils.{Db, Rpc} + + alias Explorer.Chain + + require Logger + + @doc """ + Monitors and updates the status of lifecycle transactions related an Arbitrum rollup to 'finalized'. + + This function retrieves the current 'safe' block number from L1 and identifies + lifecycle transactions that are not yet finalized up to this block. It then + updates the status of these transactions to 'finalized' and imports the updated + data into the database. + + ## Parameters + - A map containing: + - `config`: Configuration settings including JSON RPC arguments for L1 used + to fetch the 'safe' block number. + + ## Returns + - `:ok` + """ + @spec monitor_lifecycle_txs(%{ + :config => %{ + :l1_rpc => %{ + :json_rpc_named_arguments => EthereumJSONRPC.json_rpc_named_arguments(), + optional(any()) => any() + }, + optional(any()) => any() + }, + optional(any()) => any() + }) :: :ok + def monitor_lifecycle_txs(%{config: %{l1_rpc: %{json_rpc_named_arguments: json_rpc_named_arguments}}} = _state) do + {:ok, safe_block} = + IndexerHelper.get_block_number_by_tag( + "safe", + json_rpc_named_arguments, + Rpc.get_resend_attempts() + ) + + lifecycle_txs = Db.lifecycle_unfinalized_transactions(safe_block) + + if length(lifecycle_txs) > 0 do + log_info("Discovered #{length(lifecycle_txs)} lifecycle transaction to be finalized") + + updated_lifecycle_txs = + lifecycle_txs + |> Enum.map(fn tx -> + Map.put(tx, :status, :finalized) + end) + + {:ok, _} = + Chain.import(%{ + arbitrum_lifecycle_transactions: %{params: updated_lifecycle_txs}, + timeout: :infinity + }) + end + + :ok + end +end diff --git a/apps/indexer/lib/indexer/fetcher/arbitrum/workers/new_batches.ex b/apps/indexer/lib/indexer/fetcher/arbitrum/workers/new_batches.ex new file mode 100644 index 000000000000..1f5c2bf58511 --- /dev/null +++ b/apps/indexer/lib/indexer/fetcher/arbitrum/workers/new_batches.ex @@ -0,0 +1,975 @@ +defmodule Indexer.Fetcher.Arbitrum.Workers.NewBatches do + @moduledoc """ + Manages the discovery and importation of new and historical batches of transactions for an Arbitrum rollup. + + This module orchestrates the discovery of batches of transactions processed + through the Arbitrum Sequencer. It distinguishes between new batches currently + being created and historical batches processed in the past but not yet imported + into the database. + + The process involves fetching logs for the `SequencerBatchDelivered` event + emitted by the Arbitrum `SequencerInbox` contract, processing these logs to + extract batch details, and then building the link between batches and the + corresponding rollup blocks and transactions. It also discovers those + cross-chain messages initiated in rollup blocks linked with the new batches + and updates the status of messages to consider them as committed (`:sent`). + + For any blocks or transactions missing in the database, data is requested in + chunks from the rollup RPC endpoint by `eth_getBlockByNumber`. Additionally, + to complete batch details and lifecycle transactions, RPC calls to + `eth_getTransactionByHash` and `eth_getBlockByNumber` on L1 are made in chunks + for the necessary information not available in the logs. + """ + + alias ABI.{FunctionSelector, TypeDecoder} + + import EthereumJSONRPC, only: [quantity_to_integer: 1] + + import Indexer.Fetcher.Arbitrum.Utils.Logging, only: [log_info: 1, log_debug: 1] + + alias EthereumJSONRPC.Block.ByNumber, as: BlockByNumber + + alias Indexer.Helper, as: IndexerHelper + alias Indexer.Fetcher.Arbitrum.Utils.{Db, Logging, Rpc} + + alias Explorer.Chain + + require Logger + + # keccak256("SequencerBatchDelivered(uint256,bytes32,bytes32,bytes32,uint256,(uint64,uint64,uint64,uint64),uint8)") + @message_sequencer_batch_delivered "0x7394f4a19a13c7b92b5bb71033245305946ef78452f7b4986ac1390b5df4ebd7" + + @doc """ + Discovers and imports new batches of rollup transactions within the current L1 block range. + + This function determines the L1 block range for discovering new batches of rollup + transactions. It retrieves logs representing SequencerBatchDelivered events + emitted by the SequencerInbox contract within this range. The logs are processed + to identify new batches and their corresponding details. Comprehensive data + structures for these batches, along with their lifecycle transactions, rollup + blocks, and rollup transactions, are constructed. In addition, the function + updates the status of L2-to-L1 messages that have been committed within these new + batches. All discovered and processed data are then imported into the database. + The process targets only the batches that have not been previously processed, + thereby enhancing efficiency. + + ## Parameters + - A map containing: + - `config`: Configuration settings including RPC configurations, SequencerInbox + address, a shift for the message to block number mapping, and + a limit for new batches discovery. + - `data`: Contains the starting block number for new batch discovery. + + ## Returns + - `{:ok, end_block}`: On successful discovery and processing, where `end_block` + indicates the necessity to consider the next block range + in the following iteration of new batch discovery. + - `{:ok, start_block - 1}`: If there are no new blocks to be processed, + indicating that the current start block should be + reconsidered in the next iteration. + """ + @spec discover_new_batches(%{ + :config => %{ + :l1_rpc => %{ + :json_rpc_named_arguments => EthereumJSONRPC.json_rpc_named_arguments(), + :logs_block_range => non_neg_integer(), + optional(any()) => any() + }, + :l1_sequencer_inbox_address => binary(), + :messages_to_blocks_shift => non_neg_integer(), + :new_batches_limit => non_neg_integer(), + :rollup_rpc => %{ + :json_rpc_named_arguments => EthereumJSONRPC.json_rpc_named_arguments(), + :chunk_size => non_neg_integer(), + optional(any()) => any() + }, + optional(any()) => any() + }, + :data => %{:new_batches_start_block => non_neg_integer(), optional(any()) => any()}, + optional(any()) => any() + }) :: {:ok, non_neg_integer()} + def discover_new_batches( + %{ + config: %{ + l1_rpc: l1_rpc_config, + rollup_rpc: rollup_rpc_config, + l1_sequencer_inbox_address: sequencer_inbox_address, + messages_to_blocks_shift: messages_to_blocks_shift, + new_batches_limit: new_batches_limit + }, + data: %{new_batches_start_block: start_block} + } = _state + ) do + # Requesting the "latest" block instead of "safe" allows to catch new batches + # without latency. + {:ok, latest_block} = + IndexerHelper.get_block_number_by_tag( + "latest", + l1_rpc_config.json_rpc_named_arguments, + Rpc.get_resend_attempts() + ) + + end_block = min(start_block + l1_rpc_config.logs_block_range - 1, latest_block) + + if start_block <= end_block do + log_info("Block range for new batches discovery: #{start_block}..#{end_block}") + + discover( + sequencer_inbox_address, + start_block, + end_block, + new_batches_limit, + messages_to_blocks_shift, + l1_rpc_config, + rollup_rpc_config + ) + + {:ok, end_block} + else + {:ok, start_block - 1} + end + end + + @doc """ + Discovers and imports historical batches of rollup transactions within a specified block range. + + This function determines the L1 block range for discovering historical batches + of rollup transactions. Within this range, it retrieves logs representing the + SequencerBatchDelivered events emitted by the SequencerInbox contract. These + logs are processed to identify the batches and their details. The function then + constructs comprehensive data structures for batches, lifecycle transactions, + rollup blocks, and rollup transactions. Additionally, it identifies L2-to-L1 + messages that have been committed within these batches and updates their status. + All discovered and processed data are then imported into the database, with the + process targeting only previously undiscovered batches to enhance efficiency. + + ## Parameters + - A map containing: + - `config`: Configuration settings including the L1 rollup initialization block, + RPC configurations, SequencerInbox address, a shift for the message + to block number mapping, and a limit for new batches discovery. + - `data`: Contains the ending block number for the historical batch discovery. + + ## Returns + - `{:ok, start_block}`: On successful discovery and processing, where `start_block` + is the calculated starting block for the discovery range, + indicating the need to consider another block range in the + next iteration of historical batch discovery. + - `{:ok, l1_rollup_init_block}`: If the discovery process has reached the rollup + initialization block, indicating that all batches + up to the rollup origins have been discovered and + no further action is needed. + """ + @spec discover_historical_batches(%{ + :config => %{ + :l1_rollup_init_block => non_neg_integer(), + :l1_rpc => %{ + :json_rpc_named_arguments => EthereumJSONRPC.json_rpc_named_arguments(), + :logs_block_range => non_neg_integer(), + optional(any()) => any() + }, + :l1_sequencer_inbox_address => binary(), + :messages_to_blocks_shift => non_neg_integer(), + :new_batches_limit => non_neg_integer(), + :rollup_rpc => %{ + :json_rpc_named_arguments => EthereumJSONRPC.json_rpc_named_arguments(), + :chunk_size => non_neg_integer(), + optional(any()) => any() + }, + optional(any()) => any() + }, + :data => %{:historical_batches_end_block => any(), optional(any()) => any()}, + optional(any()) => any() + }) :: {:ok, non_neg_integer()} + def discover_historical_batches( + %{ + config: %{ + l1_rpc: l1_rpc_config, + rollup_rpc: rollup_rpc_config, + l1_sequencer_inbox_address: sequencer_inbox_address, + messages_to_blocks_shift: messages_to_blocks_shift, + l1_rollup_init_block: l1_rollup_init_block, + new_batches_limit: new_batches_limit + }, + data: %{historical_batches_end_block: end_block} + } = _state + ) do + if end_block >= l1_rollup_init_block do + start_block = max(l1_rollup_init_block, end_block - l1_rpc_config.logs_block_range + 1) + + log_info("Block range for historical batches discovery: #{start_block}..#{end_block}") + + discover_historical( + sequencer_inbox_address, + start_block, + end_block, + new_batches_limit, + messages_to_blocks_shift, + l1_rpc_config, + rollup_rpc_config + ) + + {:ok, start_block} + else + {:ok, l1_rollup_init_block} + end + end + + # Initiates the discovery process for batches within a specified block range. + # + # Invokes the actual discovery process for new batches by calling `do_discover` + # with the provided parameters. + # + # ## Parameters + # - `sequencer_inbox_address`: The SequencerInbox contract address. + # - `start_block`: The starting block number for discovery. + # - `end_block`: The ending block number for discovery. + # - `new_batches_limit`: Limit of new batches to process in one iteration. + # - `messages_to_blocks_shift`: Shift value for message to block number mapping. + # - `l1_rpc_config`: Configuration for L1 RPC calls. + # - `rollup_rpc_config`: Configuration for rollup RPC calls. + # + # ## Returns + # - N/A + defp discover( + sequencer_inbox_address, + start_block, + end_block, + new_batches_limit, + messages_to_blocks_shift, + l1_rpc_config, + rollup_rpc_config + ) do + do_discover( + sequencer_inbox_address, + start_block, + end_block, + new_batches_limit, + messages_to_blocks_shift, + l1_rpc_config, + rollup_rpc_config + ) + end + + # Initiates the historical discovery process for batches within a specified block range. + # + # Calls `do_discover` with parameters reversed for start and end blocks to + # process historical data. + # + # ## Parameters + # - `sequencer_inbox_address`: The SequencerInbox contract address. + # - `start_block`: The starting block number for discovery. + # - `end_block`: The ending block number for discovery. + # - `new_batches_limit`: Limit of new batches to process in one iteration. + # - `messages_to_blocks_shift`: Shift value for message to block number mapping. + # - `l1_rpc_config`: Configuration for L1 RPC calls. + # - `rollup_rpc_config`: Configuration for rollup RPC calls. + # + # ## Returns + # - N/A + defp discover_historical( + sequencer_inbox_address, + start_block, + end_block, + new_batches_limit, + messages_to_blocks_shift, + l1_rpc_config, + rollup_rpc_config + ) do + do_discover( + sequencer_inbox_address, + end_block, + start_block, + new_batches_limit, + messages_to_blocks_shift, + l1_rpc_config, + rollup_rpc_config + ) + end + + # Performs the discovery of new or historical batches within a specified block range, + # processing and importing the relevant data into the database. + # + # This function retrieves SequencerBatchDelivered event logs from the specified block range + # and processes these logs to identify new batches and their corresponding details. It then + # constructs comprehensive data structures for batches, lifecycle transactions, rollup + # blocks, and rollup transactions. Additionally, it identifies any L2-to-L1 messages that + # have been committed within these batches and updates their status. All discovered and + # processed data are then imported into the database. + # + # ## Parameters + # - `sequencer_inbox_address`: The SequencerInbox contract address used to filter logs. + # - `start_block`: The starting block number for the discovery range. + # - `end_block`: The ending block number for the discovery range. + # - `new_batches_limit`: The maximum number of new batches to process in one iteration. + # - `messages_to_blocks_shift`: The value used to align message counts with rollup block numbers. + # - `l1_rpc_config`: RPC configuration parameters for L1. + # - `rollup_rpc_config`: RPC configuration parameters for rollup data. + # + # ## Returns + # - N/A + defp do_discover( + sequencer_inbox_address, + start_block, + end_block, + new_batches_limit, + messages_to_blocks_shift, + l1_rpc_config, + rollup_rpc_config + ) do + raw_logs = + get_logs_new_batches( + min(start_block, end_block), + max(start_block, end_block), + sequencer_inbox_address, + l1_rpc_config.json_rpc_named_arguments + ) + + logs = + if end_block >= start_block do + raw_logs + else + Enum.reverse(raw_logs) + end + + # Discovered logs are divided into chunks to ensure progress + # in batch discovery, even if an error interrupts the fetching process. + logs + |> Enum.chunk_every(new_batches_limit) + |> Enum.each(fn chunked_logs -> + {batches, lifecycle_txs, rollup_blocks, rollup_txs, committed_txs} = + handle_batches_from_logs( + chunked_logs, + messages_to_blocks_shift, + l1_rpc_config, + rollup_rpc_config + ) + + {:ok, _} = + Chain.import(%{ + arbitrum_lifecycle_transactions: %{params: lifecycle_txs}, + arbitrum_l1_batches: %{params: batches}, + arbitrum_batch_blocks: %{params: rollup_blocks}, + arbitrum_batch_transactions: %{params: rollup_txs}, + arbitrum_messages: %{params: committed_txs}, + timeout: :infinity + }) + end) + end + + # Fetches logs for SequencerBatchDelivered events from the SequencerInbox contract within a block range. + # + # Retrieves logs that correspond to SequencerBatchDelivered events, specifically + # from the SequencerInbox contract, between the specified block numbers. + # + # ## Parameters + # - `start_block`: The starting block number for log retrieval. + # - `end_block`: The ending block number for log retrieval. + # - `sequencer_inbox_address`: The address of the SequencerInbox contract. + # - `json_rpc_named_arguments`: Configuration parameters for the JSON RPC connection. + # + # ## Returns + # - A list of logs for SequencerBatchDelivered events within the specified block range. + defp get_logs_new_batches(start_block, end_block, sequencer_inbox_address, json_rpc_named_arguments) + when start_block <= end_block do + {:ok, logs} = + IndexerHelper.get_logs( + start_block, + end_block, + sequencer_inbox_address, + [@message_sequencer_batch_delivered], + json_rpc_named_arguments + ) + + if length(logs) > 0 do + log_debug("Found #{length(logs)} SequencerBatchDelivered logs") + end + + logs + end + + # Processes logs to extract batch information and prepare it for database import. + # + # This function analyzes SequencerBatchDelivered event logs to identify new batches + # and retrieves their details, avoiding the reprocessing of batches already known + # in the database. It enriches the details of new batches with data from corresponding + # L1 transactions and blocks, including timestamps and block ranges. The function + # then prepares batches, associated rollup blocks and transactions, and lifecycle + # transactions for database import. Additionally, L2-to-L1 messages initiated in the + # rollup blocks associated with the discovered batches are retrieved from the database, + # marked as `:sent`, and prepared for database import. + # + # ## Parameters + # - `logs`: The list of SequencerBatchDelivered event logs. + # - `msg_to_block_shift`: The shift value for mapping batch messages to block numbers. + # - `l1_rpc_config`: The RPC configuration for L1 requests. + # - `rollup_rpc_config`: The RPC configuration for rollup data requests. + # + # ## Returns + # - A tuple containing lists of batches, lifecycle transactions, rollup blocks, + # rollup transactions, and committed messages (with the status `:sent`), all + # ready for database import. + defp handle_batches_from_logs( + logs, + msg_to_block_shift, + %{ + json_rpc_named_arguments: json_rpc_named_arguments, + chunk_size: chunk_size + } = l1_rpc_config, + rollup_rpc_config + ) do + existing_batches = + logs + |> parse_logs_to_get_batch_numbers() + |> Db.batches_exist() + + {batches, txs_requests, blocks_requests} = parse_logs_for_new_batches(logs, existing_batches) + + blocks_to_ts = Rpc.execute_blocks_requests_and_get_ts(blocks_requests, json_rpc_named_arguments, chunk_size) + + {lifecycle_txs_wo_indices, batches_to_import} = + execute_tx_requests_parse_txs_calldata(txs_requests, msg_to_block_shift, blocks_to_ts, batches, l1_rpc_config) + + {blocks_to_import, rollup_txs_to_import} = get_rollup_blocks_and_transactions(batches_to_import, rollup_rpc_config) + + lifecycle_txs = + lifecycle_txs_wo_indices + |> Db.get_indices_for_l1_transactions() + + tx_counts_per_batch = batches_to_rollup_txs_amounts(rollup_txs_to_import) + + batches_list_to_import = + batches_to_import + |> Map.values() + |> Enum.reduce([], fn batch, updated_batches_list -> + [ + batch + |> Map.put(:commitment_id, get_l1_tx_id_by_hash(lifecycle_txs, batch.tx_hash)) + |> Map.put( + :transactions_count, + case tx_counts_per_batch[batch.number] do + nil -> 0 + value -> value + end + ) + |> Map.drop([:tx_hash]) + | updated_batches_list + ] + end) + + committed_txs = + blocks_to_import + |> Map.keys() + |> Enum.max() + |> get_committed_l2_to_l1_messages() + + {batches_list_to_import, Map.values(lifecycle_txs), Map.values(blocks_to_import), rollup_txs_to_import, + committed_txs} + end + + # Extracts batch numbers from logs of SequencerBatchDelivered events. + defp parse_logs_to_get_batch_numbers(logs) do + logs + |> Enum.map(fn event -> + {batch_num, _, _} = sequencer_batch_delivered_event_parse(event) + batch_num + end) + end + + # Parses logs representing SequencerBatchDelivered events to identify new batches. + # + # This function sifts through logs of SequencerBatchDelivered events, extracts the + # necessary data, and assembles a map of new batch descriptions. Additionally, it + # prepares RPC `eth_getTransactionByHash` and `eth_getBlockByNumber` requests to + # fetch details not present in the logs. To minimize subsequent RPC calls, only + # batches not previously known (i.e., absent in `existing_batches`) are processed. + # + # ## Parameters + # - `logs`: A list of event logs to be processed. + # - `existing_batches`: A list of batch numbers already processed. + # + # ## Returns + # - A tuple containing: + # - A map of new batch descriptions, which are not yet ready for database import. + # - A list of RPC `eth_getTransactionByHash` requests for fetching details of + # the L1 transactions associated with these batches. + # - A list of RPC requests to fetch details of the L1 blocks where these batches + # were included. + defp parse_logs_for_new_batches(logs, existing_batches) do + {batches, txs_requests, blocks_requests} = + logs + |> Enum.reduce({%{}, [], %{}}, fn event, {batches, txs_requests, blocks_requests} -> + {batch_num, before_acc, after_acc} = sequencer_batch_delivered_event_parse(event) + + tx_hash_raw = event["transactionHash"] + tx_hash = Rpc.string_hash_to_bytes_hash(tx_hash_raw) + blk_num = quantity_to_integer(event["blockNumber"]) + + if batch_num in existing_batches do + {batches, txs_requests, blocks_requests} + else + updated_batches = + Map.put( + batches, + batch_num, + %{ + number: batch_num, + before_acc: before_acc, + after_acc: after_acc, + tx_hash: tx_hash + } + ) + + updated_txs_requests = [ + Rpc.transaction_by_hash_request(%{id: 0, hash: tx_hash_raw}) + | txs_requests + ] + + updated_blocks_requests = + Map.put( + blocks_requests, + blk_num, + BlockByNumber.request(%{id: 0, number: blk_num}, false, true) + ) + + log_info("New batch #{batch_num} found in #{tx_hash_raw}") + + {updated_batches, updated_txs_requests, updated_blocks_requests} + end + end) + + {batches, txs_requests, Map.values(blocks_requests)} + end + + # Parses SequencerBatchDelivered event to get batch sequence number and associated accumulators + defp sequencer_batch_delivered_event_parse(event) do + [_, batch_sequence_number, before_acc, after_acc] = event["topics"] + + {quantity_to_integer(batch_sequence_number), before_acc, after_acc} + end + + # Executes transaction requests and parses the calldata to extract batch data. + # + # This function processes a list of RPC `eth_getTransactionByHash` requests, extracts + # and decodes the calldata from the transactions to obtain batch details. It updates + # the provided batch map with block ranges for new batches and constructs a map of + # lifecycle transactions with their timestamps and finalization status. + # + # ## Parameters + # - `txs_requests`: The list of RPC requests to fetch transaction data. + # - `msg_to_block_shift`: The shift value to adjust the message count to the correct + # rollup block numbers. + # - `blocks_to_ts`: A map of block numbers to their timestamps, required to complete + # data for corresponding lifecycle transactions. + # - `batches`: The current batch data to be updated. + # - A configuration map containing JSON RPC arguments, a track finalization flag, + # and a chunk size for batch processing. + # + # ## Returns + # - A tuple containing: + # - A map of lifecycle (L1) transactions, which are not yet compatible with + # database import and require further processing. + # - An updated map of batch descriptions, also requiring further processing + # before database import. + defp execute_tx_requests_parse_txs_calldata(txs_requests, msg_to_block_shift, blocks_to_ts, batches, %{ + json_rpc_named_arguments: json_rpc_named_arguments, + track_finalization: track_finalization?, + chunk_size: chunk_size + }) do + txs_requests + |> Enum.chunk_every(chunk_size) + |> Enum.reduce({%{}, batches}, fn chunk, {l1_txs, updated_batches} -> + chunk + # each eth_getTransactionByHash will take time since it returns entire batch + # in `input` which is heavy because contains dozens of rollup blocks + |> Rpc.make_chunked_request(json_rpc_named_arguments, "eth_getTransactionByHash") + |> Enum.reduce({l1_txs, updated_batches}, fn resp, {txs_map, batches_map} -> + block_num = quantity_to_integer(resp["blockNumber"]) + tx_hash = Rpc.string_hash_to_bytes_hash(resp["hash"]) + + # Although they are called messages in the functions' ABI, in fact they are + # rollup blocks + {batch_num, prev_message_count, new_message_count} = + add_sequencer_l2_batch_from_origin_calldata_parse(resp["input"]) + + # In some cases extracted numbers for messages does not linked directly + # with rollup blocks, for this, the numbers are shifted by a value specific + # for particular rollup + updated_batches_map = + Map.put( + batches_map, + batch_num, + Map.merge(batches_map[batch_num], %{ + start_block: prev_message_count + msg_to_block_shift, + end_block: new_message_count + msg_to_block_shift - 1 + }) + ) + + updated_txs_map = + Map.put(txs_map, tx_hash, %{ + hash: tx_hash, + block_number: block_num, + timestamp: blocks_to_ts[block_num], + status: + if track_finalization? do + :unfinalized + else + :finalized + end + }) + + {updated_txs_map, updated_batches_map} + end) + end) + end + + # Parses calldata of `addSequencerL2BatchFromOrigin` or `addSequencerL2BatchFromBlobs` + # functions to extract batch information. + defp add_sequencer_l2_batch_from_origin_calldata_parse(calldata) do + case calldata do + "0x8f111f3c" <> encoded_params -> + # addSequencerL2BatchFromOrigin(uint256 sequenceNumber, bytes calldata data, uint256 afterDelayedMessagesRead, address gasRefunder, uint256 prevMessageCount, uint256 newMessageCount) + [sequence_number, _data, _after_delayed_messages_read, _gas_refunder, prev_message_count, new_message_count] = + TypeDecoder.decode( + Base.decode16!(encoded_params, case: :lower), + %FunctionSelector{ + function: "addSequencerL2BatchFromOrigin", + types: [ + {:uint, 256}, + :bytes, + {:uint, 256}, + :address, + {:uint, 256}, + {:uint, 256} + ] + } + ) + + {sequence_number, prev_message_count, new_message_count} + + "0x3e5aa082" <> encoded_params -> + # addSequencerL2BatchFromBlobs(uint256 sequenceNumber, uint256 afterDelayedMessagesRead, address gasRefunder, uint256 prevMessageCount, uint256 newMessageCount) + [sequence_number, _after_delayed_messages_read, _gas_refunder, prev_message_count, new_message_count] = + TypeDecoder.decode( + Base.decode16!(encoded_params, case: :lower), + %FunctionSelector{ + function: "addSequencerL2BatchFromBlobs", + types: [ + {:uint, 256}, + {:uint, 256}, + :address, + {:uint, 256}, + {:uint, 256} + ] + } + ) + + {sequence_number, prev_message_count, new_message_count} + end + end + + # Retrieves rollup blocks and transactions for a list of batches. + # + # This function extracts rollup block ranges from each batch's data to determine + # the required blocks. It then fetches existing rollup blocks and transactions from + # the database and recovers any missing data through RPC if necessary. + # + # ## Parameters + # - `batches`: A list of batches, each containing rollup block ranges associated + # with the batch. + # - `rollup_rpc_config`: Configuration for RPC calls to fetch rollup data. + # + # ## Returns + # - A tuple containing: + # - A map of rollup blocks, ready for database import. + # - A list of rollup transactions, ready for database import. + defp get_rollup_blocks_and_transactions( + batches, + rollup_rpc_config + ) do + blocks_to_batches = unwrap_rollup_block_ranges(batches) + + required_blocks_numbers = Map.keys(blocks_to_batches) + log_info("Identified #{length(required_blocks_numbers)} rollup blocks") + + {blocks_to_import_map, txs_to_import_list} = + get_rollup_blocks_and_txs_from_db(required_blocks_numbers, blocks_to_batches) + + # While it's not entirely aligned with data integrity principles to recover + # rollup blocks and transactions from RPC that are not yet indexed, it's + # a practical compromise to facilitate the progress of batch discovery. Given + # the potential high frequency of new batch appearances and the substantial + # volume of blocks and transactions, prioritizing discovery process advancement + # is deemed reasonable. + {blocks_to_import, txs_to_import} = + recover_data_if_necessary( + blocks_to_import_map, + txs_to_import_list, + required_blocks_numbers, + blocks_to_batches, + rollup_rpc_config + ) + + log_info( + "Found #{length(Map.keys(blocks_to_import))} rollup blocks and #{length(txs_to_import)} rollup transactions in DB" + ) + + {blocks_to_import, txs_to_import} + end + + # Unwraps rollup block ranges from batch data to create a block-to-batch number map. + # + # ## Parameters + # - `batches`: A map where keys are batch identifiers and values are structs + # containing the start and end blocks of each batch. + # + # ## Returns + # - A map where each key is a rollup block number and its value is the + # corresponding batch number. + defp unwrap_rollup_block_ranges(batches) do + batches + |> Map.values() + |> Enum.reduce(%{}, fn batch, b_2_b -> + batch.start_block..batch.end_block + |> Enum.reduce(b_2_b, fn block_num, b_2_b_inner -> + Map.put(b_2_b_inner, block_num, batch.number) + end) + end) + end + + # Retrieves rollup blocks and transactions from the database based on given block numbers. + # + # This function fetches rollup blocks from the database using provided block numbers. + # For each block, it constructs a map of rollup block details and a list of + # transactions, including the batch number from `blocks_to_batches` mapping, block + # hash, and transaction hash. + # + # ## Parameters + # - `rollup_blocks_numbers`: A list of rollup block numbers to retrieve from the + # database. + # - `blocks_to_batches`: A mapping from block numbers to batch numbers. + # + # ## Returns + # - A tuple containing: + # - A map of rollup blocks associated with the batch numbers, ready for + # database import. + # - A list of transactions, each associated with its respective rollup block + # and batch number, ready for database import. + defp get_rollup_blocks_and_txs_from_db(rollup_blocks_numbers, blocks_to_batches) do + rollup_blocks_numbers + |> Db.rollup_blocks() + |> Enum.reduce({%{}, []}, fn block, {blocks_map, txs_list} -> + batch_num = blocks_to_batches[block.number] + + updated_txs_list = + block.transactions + |> Enum.reduce(txs_list, fn tx, acc -> + [%{tx_hash: tx.hash.bytes, batch_number: batch_num} | acc] + end) + + updated_blocks_map = + blocks_map + |> Map.put(block.number, %{ + block_number: block.number, + batch_number: batch_num, + confirmation_id: nil + }) + + {updated_blocks_map, updated_txs_list} + end) + end + + # Recovers missing rollup blocks and transactions from the RPC if not all required blocks are found in the current data. + # + # This function compares the required rollup block numbers with the ones already + # present in the current data. If some blocks are missing, it retrieves them from + # the RPC along with their transactions. The retrieved blocks and transactions + # are then merged with the current data to ensure a complete set for further + # processing. + # + # ## Parameters + # - `current_rollup_blocks`: The map of rollup blocks currently held. + # - `current_rollup_txs`: The list of transactions currently held. + # - `required_blocks_numbers`: A list of block numbers that are required for + # processing. + # - `blocks_to_batches`: A map associating rollup block numbers with batch numbers. + # - `rollup_rpc_config`: Configuration for the RPC calls. + # + # ## Returns + # - A tuple containing the updated map of rollup blocks and the updated list of + # transactions, both are ready for database import. + defp recover_data_if_necessary( + current_rollup_blocks, + current_rollup_txs, + required_blocks_numbers, + blocks_to_batches, + rollup_rpc_config + ) do + required_blocks_amount = length(required_blocks_numbers) + + found_blocks_numbers = Map.keys(current_rollup_blocks) + found_blocks_numbers_length = length(found_blocks_numbers) + + if found_blocks_numbers_length != required_blocks_amount do + log_info("Only #{found_blocks_numbers_length} of #{required_blocks_amount} rollup blocks found in DB") + + {recovered_blocks_map, recovered_txs_list, _} = + recover_rollup_blocks_and_txs_from_rpc( + required_blocks_numbers, + found_blocks_numbers, + blocks_to_batches, + rollup_rpc_config + ) + + {Map.merge(current_rollup_blocks, recovered_blocks_map), current_rollup_txs ++ recovered_txs_list} + else + {current_rollup_blocks, current_rollup_txs} + end + end + + # Recovers missing rollup blocks and their transactions from RPC based on required block numbers. + # + # This function identifies missing rollup blocks by comparing the required block + # numbers with those already found. It then fetches the missing blocks in chunks + # using JSON RPC calls, aggregating the results into a map of rollup blocks and + # a list of transactions. The data is processed to ensure each block and its + # transactions are correctly associated with their batch number. + # + # ## Parameters + # - `required_blocks_numbers`: A list of block numbers that are required to be + # fetched. + # - `found_blocks_numbers`: A list of block numbers that have already been + # fetched. + # - `blocks_to_batches`: A map linking block numbers to their respective batch + # numbers. + # - `rollup_rpc_config`: A map containing configuration parameters including + # JSON RPC arguments for rollup RPC and the chunk size + # for batch processing. + # + # ## Returns + # - A tuple containing: + # - A map of rollup blocks associated with the batch numbers, ready for + # database import. + # - A list of transactions, each associated with its respective rollup block + # and batch number, ready for database import. + # - The updated counter of processed chunks (usually ignored). + defp recover_rollup_blocks_and_txs_from_rpc( + required_blocks_numbers, + found_blocks_numbers, + blocks_to_batches, + %{ + json_rpc_named_arguments: rollup_json_rpc_named_arguments, + chunk_size: rollup_chunk_size + } = _rollup_rpc_config + ) do + missed_blocks = required_blocks_numbers -- found_blocks_numbers + missed_blocks_length = length(missed_blocks) + + missed_blocks + |> Enum.sort() + |> Enum.chunk_every(rollup_chunk_size) + |> Enum.reduce({%{}, [], 0}, fn chunk, {blocks_map, txs_list, chunks_counter} -> + Logging.log_details_chunk_handling( + "Collecting rollup data", + {"block", "blocks"}, + chunk, + chunks_counter, + missed_blocks_length + ) + + requests = + chunk + |> Enum.reduce([], fn block_num, requests_list -> + [ + BlockByNumber.request( + %{ + id: blocks_to_batches[block_num], + number: block_num + }, + false + ) + | requests_list + ] + end) + + {blocks_map_updated, txs_list_updated} = + requests + |> Rpc.make_chunked_request_keep_id(rollup_json_rpc_named_arguments, "eth_getBlockByNumber") + |> prepare_rollup_block_map_and_transactions_list(blocks_map, txs_list) + + {blocks_map_updated, txs_list_updated, chunks_counter + length(chunk)} + end) + end + + # Processes JSON responses to construct a mapping of rollup block information and a list of transactions. + # + # This function takes JSON RPC responses for rollup blocks and processes each + # response to create a mapping of rollup block details and a comprehensive list + # of transactions associated with these blocks. It ensures that each block and its + # corresponding transactions are correctly associated with their batch number. + # + # ## Parameters + # - `json_responses`: A list of JSON RPC responses containing rollup block data. + # - `rollup_blocks`: The initial map of rollup block information. + # - `rollup_txs`: The initial list of rollup transactions. + # + # ## Returns + # - A tuple containing: + # - An updated map of rollup blocks associated with their batch numbers, ready + # for database import. + # - An updated list of transactions, each associated with its respective rollup + # block and batch number, ready for database import. + defp prepare_rollup_block_map_and_transactions_list(json_responses, rollup_blocks, rollup_txs) do + json_responses + |> Enum.reduce({rollup_blocks, rollup_txs}, fn resp, {blocks_map, txs_list} -> + batch_num = resp.id + blk_num = quantity_to_integer(resp.result["number"]) + + updated_blocks_map = + Map.put( + blocks_map, + blk_num, + %{block_number: blk_num, batch_number: batch_num, confirmation_id: nil} + ) + + updated_txs_list = + case resp.result["transactions"] do + nil -> + txs_list + + new_txs -> + Enum.reduce(new_txs, txs_list, fn l2_tx_hash, txs_list -> + [%{tx_hash: l2_tx_hash, batch_number: batch_num} | txs_list] + end) + end + + {updated_blocks_map, updated_txs_list} + end) + end + + # Retrieves the unique identifier of an L1 transaction by its hash from the given + # map. `nil` if there is no such transaction in the map. + defp get_l1_tx_id_by_hash(l1_txs, hash) do + l1_txs + |> Map.get(hash) + |> Kernel.||(%{id: nil}) + |> Map.get(:id) + end + + # Aggregates rollup transactions by batch number, counting the number of transactions in each batch. + defp batches_to_rollup_txs_amounts(rollup_txs) do + rollup_txs + |> Enum.reduce(%{}, fn tx, acc -> + Map.put(acc, tx.batch_number, Map.get(acc, tx.batch_number, 0) + 1) + end) + end + + # Retrieves initiated L2-to-L1 messages up to specified block number and marks them as 'sent'. + defp get_committed_l2_to_l1_messages(block_number) do + block_number + |> Db.initiated_l2_to_l1_messages() + |> Enum.map(fn tx -> + Map.put(tx, :status, :sent) + end) + end +end diff --git a/apps/indexer/lib/indexer/fetcher/arbitrum/workers/new_confirmations.ex b/apps/indexer/lib/indexer/fetcher/arbitrum/workers/new_confirmations.ex new file mode 100644 index 000000000000..35279518332b --- /dev/null +++ b/apps/indexer/lib/indexer/fetcher/arbitrum/workers/new_confirmations.ex @@ -0,0 +1,1034 @@ +defmodule Indexer.Fetcher.Arbitrum.Workers.NewConfirmations do + @moduledoc """ + Handles the discovery and processing of new and historical confirmations of rollup blocks for an Arbitrum rollup. + + This module orchestrates the discovery of rollup block confirmations delivered + to the Arbitrum Outbox contract. It distinguishes between new confirmations of + rollup blocks and past confirmations that were previously unprocessed or missed. + + The process involves fetching logs for the `SendRootUpdated` events emitted by + the Arbitrum Outbox contract. These events indicate the top of the rollup blocks + confirmed up to a specific point in time. The identified block is used to find + all blocks beneath it that are not confirmed by other `SendRootUpdated` events. + All discovered blocks are then linked with the corresponding transaction that + emitted the `SendRootUpdated` event. Additionally, L2-to-L1 messages included in + the rollup blocks up to the confirmed top are identified to change their status + from `:sent` to `:confirmed`. + + Though the `SendRootUpdated` event implies that all rollup blocks below the + mentioned block are confirmed, the current design of the process attempts to + match every rollup block to a specific confirmation. This means that if there + are two confirmations, and the earlier one points to block N while the later + points to block M (such that M > N), the blocks from N+1 to M are linked with + the latest confirmation, and blocks from X+1 to N are linked to the earlier + confirmation (where X is the rollup block mentioned in an even earlier + confirmation). + + Currently, the process of discovering confirmed rollup blocks works with any + position of the top confirmed block in a batch. Later, if it is confirmed that + the top block in a confirmation always aligns with the last block in a batch, + this approach to rollup block discovery can be revisited for simplification. + """ + + import EthereumJSONRPC, only: [quantity_to_integer: 1] + + import Indexer.Fetcher.Arbitrum.Utils.Logging, only: [log_warning: 1, log_info: 1, log_debug: 1] + + alias EthereumJSONRPC.Block.ByNumber, as: BlockByNumber + alias Indexer.Helper, as: IndexerHelper + + alias Indexer.Fetcher.Arbitrum.Utils.{Db, Rpc} + alias Indexer.Fetcher.Arbitrum.Utils.Helper, as: ArbitrumHelper + + alias Explorer.Chain + + require Logger + + # keccak256("SendRootUpdated(bytes32,bytes32)") + @send_root_updated_event "0xb4df3847300f076a369cd76d2314b470a1194d9e8a6bb97f1860aee88a5f6748" + + @doc """ + Discovers and processes new confirmations of rollup blocks within a calculated block range. + + This function identifies the appropriate L1 block range for discovering new + rollup confirmations. It fetches logs representing `SendRootUpdated` events + within this range to identify the new tops of rollup block confirmations. The + discovered confirmations are processed to update the status of rollup blocks + and L2-to-L1 messages accordingly. Eventually, updated rollup blocks, + cross-chain messages, and newly constructed lifecycle transactions are imported + into the database. + + ## Parameters + - A map containing: + - `config`: Configuration settings including the L1 outbox address, L1 RPC + configurations. + - `data`: Contains the starting L1 block number from which to begin the new + confirmation discovery. + + ## Returns + - `{retcode, end_block}` where `retcode` is either `:ok` or + `:confirmation_missed` indicating the success or failure of the discovery + process, and `end_block` is used to determine the start block number for the + next iteration of new confirmations discovery. + - `{:ok, start_block - 1}` if there are no new blocks to be processed, + indicating that the current start block should be reconsidered in the next + iteration. + """ + @spec discover_new_rollup_confirmation(%{ + :config => %{ + :l1_outbox_address => binary(), + :l1_rpc => %{ + :finalized_confirmations => boolean(), + :json_rpc_named_arguments => EthereumJSONRPC.json_rpc_named_arguments(), + :logs_block_range => non_neg_integer(), + optional(any()) => any() + }, + optional(any()) => any() + }, + :data => %{:new_confirmations_start_block => non_neg_integer(), optional(any()) => any()}, + optional(any()) => any() + }) :: {:confirmation_missed, non_neg_integer()} | {:ok, non_neg_integer()} + def discover_new_rollup_confirmation( + %{ + config: %{ + l1_rpc: l1_rpc_config, + l1_outbox_address: outbox_address + }, + data: %{new_confirmations_start_block: start_block} + } = _state + ) do + # It makes sense to use "safe" here. Blocks are confirmed with delay in one week + # (applicable for ArbitrumOne and Nova), so 10 mins delay is not significant + {:ok, latest_block} = + IndexerHelper.get_block_number_by_tag( + if(l1_rpc_config.finalized_confirmations, do: "safe", else: "latest"), + l1_rpc_config.json_rpc_named_arguments, + Rpc.get_resend_attempts() + ) + + end_block = min(start_block + l1_rpc_config.logs_block_range - 1, latest_block) + + if start_block <= end_block do + log_info("Block range for new rollup confirmations discovery: #{start_block}..#{end_block}") + + retcode = + discover( + outbox_address, + start_block, + end_block, + l1_rpc_config + ) + + {retcode, end_block} + else + {:ok, start_block - 1} + end + end + + @doc """ + Discovers and processes historical confirmations of rollup blocks within a calculated block range. + + This function determines the appropriate L1 block range for discovering + historical rollup confirmations based on the provided end block or from the + analysis of confirmations missed in the database. It then fetches logs + representing `SendRootUpdated` events within this range to identify the + historical tops of rollup block confirmations. The discovered confirmations + are processed to update the status of rollup blocks and L2-to-L1 messages + accordingly. Eventually, updated rollup blocks, cross-chain messages, and newly + constructed lifecycle transactions are imported into the database. + + ## Parameters + - A map containing: + - `config`: Configuration settings including the L1 outbox address, rollup + initialization block, RPC configurations, and the start block for + the confirmation discovery. + - `data`: Contains optional start and end L1 block numbers to limit the range + for historical confirmation discovery. + + ## Returns + - `{retcode, {start_block, interim_start_block}}` where + - `retcode` is either `:ok` or `:confirmation_missed` + - `start_block` is the starting block for the next iteration of discovery + - `interim_start_block` is the end block for the next iteration of discovery + """ + @spec discover_historical_rollup_confirmation(%{ + :config => %{ + :l1_outbox_address => binary(), + :l1_rollup_init_block => non_neg_integer(), + :l1_rpc => %{ + :finalized_confirmations => boolean(), + :json_rpc_named_arguments => EthereumJSONRPC.json_rpc_named_arguments(), + :logs_block_range => non_neg_integer(), + optional(any()) => any() + }, + :l1_start_block => non_neg_integer(), + optional(any()) => any() + }, + :data => %{ + :historical_confirmations_end_block => nil | non_neg_integer(), + :historical_confirmations_start_block => nil | non_neg_integer(), + optional(any()) => any() + }, + optional(any()) => any() + }) :: + {:confirmation_missed, {nil | non_neg_integer(), nil | non_neg_integer()}} + | {:ok, {nil | non_neg_integer(), nil | non_neg_integer()}} + def discover_historical_rollup_confirmation( + %{ + config: %{ + l1_rpc: l1_rpc_config, + l1_outbox_address: outbox_address, + l1_start_block: l1_start_block, + l1_rollup_init_block: l1_rollup_init_block + }, + data: %{ + historical_confirmations_end_block: expected_confirmation_end_block, + historical_confirmations_start_block: expected_confirmation_start_block + } + } = _state + ) do + {interim_start_block, end_block} = + case expected_confirmation_end_block do + nil -> + Db.l1_blocks_to_expect_rollup_blocks_confirmation(nil) + + _ -> + {expected_confirmation_start_block, expected_confirmation_end_block} + end + + with {:end_block_defined, true} <- {:end_block_defined, not is_nil(end_block)}, + {:genesis_not_reached, true} <- {:genesis_not_reached, end_block >= l1_rollup_init_block} do + start_block = + case interim_start_block do + nil -> + max(l1_rollup_init_block, end_block - l1_rpc_config.logs_block_range + 1) + + value -> + Enum.max([l1_rollup_init_block, value, end_block - l1_rpc_config.logs_block_range + 1]) + end + + log_info("Block range for historical rollup confirmations discovery: #{start_block}..#{end_block}") + + retcode = + discover( + outbox_address, + start_block, + end_block, + l1_rpc_config + ) + + {retcode, {start_block, interim_start_block}} + else + # TODO: Investigate on a live system what will happen when all blocks are confirmed + + # the situation when end block is `nil` is possible when there is no confirmed + # block in the database and the historical confirmations discovery must start + # from the L1 block specified as L1 start block (configured, or the latest block number) + {:end_block_defined, false} -> {:ok, {l1_start_block, nil}} + # If the genesis of the rollup has been reached during historical confirmations + # discovery, no further actions are needed. + {:genesis_not_reached, false} -> {:ok, {l1_rollup_init_block, nil}} + end + end + + # Discovers and processes new confirmations of rollup blocks within the given block range. + # + # This function fetches logs within the specified L1 block range to find new + # confirmations of rollup blocks. It processes these logs to extract confirmation + # details, identifies the corresponding rollup blocks and updates their + # status, and also discovers L2-to-L1 messages to be marked as confirmed. The + # identified lifecycle transactions, rollup blocks, and confirmed messages are then + # imported into the database. + # + # ## Parameters + # - `outbox_address`: The address of the Arbitrum outbox contract. + # - `start_block`: The starting block number for fetching logs. + # - `end_block`: The ending block number for fetching logs. + # - `l1_rpc_config`: Configuration for L1 RPC calls. + # + # ## Returns + # - The retcode indicating the result of the discovery and processing operation, + # either `:ok` or `:confirmation_missed`. + defp discover( + outbox_address, + start_block, + end_block, + l1_rpc_config + ) do + {logs, _} = + get_logs_new_confirmations( + start_block, + end_block, + outbox_address, + l1_rpc_config.json_rpc_named_arguments + ) + + {retcode, {lifecycle_txs, rollup_blocks, confirmed_txs}} = + handle_confirmations_from_logs( + logs, + l1_rpc_config, + outbox_address + ) + + {:ok, _} = + Chain.import(%{ + arbitrum_lifecycle_transactions: %{params: lifecycle_txs}, + arbitrum_batch_blocks: %{params: rollup_blocks}, + arbitrum_messages: %{params: confirmed_txs}, + timeout: :infinity + }) + + retcode + end + + # Processes logs to handle confirmations for rollup blocks. + # + # This function analyzes logs containing `SendRootUpdated` events with information + # about the confirmations up to a specific point in time. It identifies the ranges + # of rollup blocks covered by the confirmations and constructs lifecycle + # transactions linked to these confirmed blocks. Considering the highest confirmed + # rollup block number, it discovers L2-to-L1 messages that have been committed and + # updates their status to confirmed. Lists of confirmed rollup blocks, lifecycle + # transactions, and confirmed messages are prepared for database import. + # + # ## Parameters + # - `logs`: Log entries representing `SendRootUpdated` events. + # - `l1_rpc_config`: Configuration for L1 RPC calls. + # - `outbox_address`: The address of the Arbitrum outbox contract. + # + # ## Returns + # - `{retcode, {lifecycle_txs, rollup_blocks, confirmed_txs}}` where + # - `retcode` is either `:ok` or `:confirmation_missed` + # - `lifecycle_txs` is a list of lifecycle transactions confirming blocks in the + # rollup + # - `rollup_blocks` is a list of rollup blocks associated with the corresponding + # lifecycle transactions + # - `confirmed_txs` is a list of L2-to-L1 messages identified up to the highest + # confirmed block number, to be imported with the new status `:confirmed` + defp handle_confirmations_from_logs([], _, _) do + {:ok, {[], [], []}} + end + + defp handle_confirmations_from_logs( + logs, + l1_rpc_config, + outbox_address + ) do + {rollup_blocks_to_l1_txs, lifecycle_txs_basic, blocks_requests} = parse_logs_for_new_confirmations(logs) + + rollup_blocks = + discover_rollup_blocks( + rollup_blocks_to_l1_txs, + %{ + json_rpc_named_arguments: l1_rpc_config.json_rpc_named_arguments, + logs_block_range: l1_rpc_config.logs_block_range, + outbox_address: outbox_address + } + ) + + applicable_lifecycle_txs = take_lifecycle_txs_for_confirmed_blocks(rollup_blocks, lifecycle_txs_basic) + + retcode = + if Enum.count(lifecycle_txs_basic) != Enum.count(applicable_lifecycle_txs) do + :confirmation_missed + else + :ok + end + + if Enum.empty?(applicable_lifecycle_txs) do + {retcode, {[], [], []}} + else + {lifecycle_txs, rollup_blocks, highest_confirmed_block_number} = + finalize_lifecycle_txs_and_confirmed_blocks( + applicable_lifecycle_txs, + rollup_blocks, + blocks_requests, + l1_rpc_config + ) + + # Drawback of marking messages as confirmed during a new confirmation handling + # is that the status change could become stuck if confirmations are not handled. + # For example, due to DB inconsistency: some blocks/batches are missed. + confirmed_txs = get_confirmed_l2_to_l1_messages(highest_confirmed_block_number) + + {retcode, {lifecycle_txs, rollup_blocks, confirmed_txs}} + end + end + + # Parses logs to extract new confirmations for rollup blocks and prepares related data. + # + # This function processes `SendRootUpdated` event logs. For each event, it maps + # the hash of the top confirmed rollup block provided in the event to + # the confirmation description, containing the L1 transaction hash and + # block number. It also prepares a set of lifecycle transactions in basic form + # and block requests to later fetch timestamps for the corresponding lifecycle + # transactions. + # + # ## Parameters + # - `logs`: A list of log entries representing `SendRootUpdated` events. + # + # ## Returns + # - A tuple containing: + # - A map associating rollup block hashes with their confirmation descriptions. + # - A map of basic-form lifecycle transactions keyed by L1 transaction hash. + # - A list of RPC requests to fetch block data for these lifecycle transactions. + defp parse_logs_for_new_confirmations(logs) do + {rollup_block_to_l1_txs, lifecycle_txs, blocks_requests} = + logs + |> Enum.reduce({%{}, %{}, %{}}, fn event, {block_to_txs, lifecycle_txs, blocks_requests} -> + rollup_block_hash = send_root_updated_event_parse(event) + + l1_tx_hash_raw = event["transactionHash"] + l1_tx_hash = Rpc.string_hash_to_bytes_hash(l1_tx_hash_raw) + l1_blk_num = quantity_to_integer(event["blockNumber"]) + + updated_block_to_txs = + Map.put( + block_to_txs, + rollup_block_hash, + %{l1_tx_hash: l1_tx_hash, l1_block_num: l1_blk_num} + ) + + updated_lifecycle_txs = + Map.put( + lifecycle_txs, + l1_tx_hash, + %{hash: l1_tx_hash, block_number: l1_blk_num} + ) + + updated_blocks_requests = + Map.put( + blocks_requests, + l1_blk_num, + BlockByNumber.request(%{id: 0, number: l1_blk_num}, false, true) + ) + + log_info("New confirmation for the rollup block #{rollup_block_hash} found in #{l1_tx_hash_raw}") + + {updated_block_to_txs, updated_lifecycle_txs, updated_blocks_requests} + end) + + {rollup_block_to_l1_txs, lifecycle_txs, Map.values(blocks_requests)} + end + + # Transforms rollup block hashes to numbers and associates them with their confirmation descriptions. + # + # This function converts a map linking rollup block hashes to confirmation descriptions + # into a map of rollup block numbers to confirmations, facilitating the identification + # of blocks for confirmation. The function then processes confirmations starting from + # the lowest rollup block number, ensuring that each block is associated with the + # correct confirmation. This sequential handling preserves the confirmation history, + # allowing future processing to accurately associate blocks with their respective + # confirmations. + # + # ## Parameters + # - `rollup_blocks_to_l1_txs`: A map of rollup block hashes to confirmation descriptions. + # - `outbox_config`: Configuration for the Arbitrum outbox contract. + # + # ## Returns + # - A list of rollup blocks each associated with the transaction's hash that + # confirms the block. + defp discover_rollup_blocks(rollup_blocks_to_l1_txs, outbox_config) do + block_to_l1_txs = + rollup_blocks_to_l1_txs + |> Map.keys() + |> Enum.reduce(%{}, fn block_hash, transformed -> + rollup_block_num = Db.rollup_block_hash_to_num(block_hash) + + # nil is applicable for the case when the block is not indexed yet by + # the block fetcher, it makes sense to skip this block so far + case rollup_block_num do + nil -> + log_warning("The rollup block #{block_hash} did not found. Plan to skip the confirmations") + transformed + + value -> + Map.put(transformed, value, rollup_blocks_to_l1_txs[block_hash]) + end + end) + + if Enum.empty?(block_to_l1_txs) do + [] + else + # Oldest (with the lowest number) block is first + rollup_block_numbers = Enum.sort(Map.keys(block_to_l1_txs), :asc) + + rollup_block_numbers + |> Enum.reduce([], fn block_num, updated_rollup_blocks -> + log_info("Attempting to mark all rollup blocks including ##{block_num} and lower as confirmed") + + {_, confirmed_blocks} = + discover_rollup_blocks_belonging_to_one_confirmation( + block_num, + block_to_l1_txs[block_num], + outbox_config + ) + + # credo:disable-for-next-line Credo.Check.Refactor.Nesting + if length(confirmed_blocks) > 0 do + log_info("Found #{length(confirmed_blocks)} confirmed blocks") + + add_confirmation_transaction(confirmed_blocks, block_to_l1_txs[block_num].l1_tx_hash) ++ + updated_rollup_blocks + else + log_info("Either no unconfirmed blocks found or DB inconsistency error discovered") + [] + end + end) + end + end + + # Discovers rollup blocks within a single confirmation, ensuring no gaps in the confirmed range. + # + # This function follows these steps to identify unconfirmed rollup blocks related + # to a single confirmation event: + # 1. Retrieve the batch associated with the specified rollup block number. + # 2. Obtain a list of unconfirmed blocks within that batch. + # 3. Determine the first unconfirmed block in the batch, considering potential + # gaps or already confirmed blocks. + # 4. Verify the continuity of the unconfirmed blocks range to ensure there are no + # database inconsistencies or unindexed blocks. + # 5. If the first unconfirmed block is at the start of the batch, check if the + # confirmation also covers blocks from previous batches. If so, include their + # unconfirmed blocks in the range. + # 6. If all blocks in the previous batch are confirmed, return the current list of + # unconfirmed blocks. + # 7. If the first unconfirmed block is in the middle of the batch, return the + # current list of unconfirmed blocks. + # This process continues recursively until it finds a batch with all blocks + # confirmed, encounters a gap, or reaches the start of the chain of blocks related + # to the confirmation. + # + # Cache Behavior: + # For each new confirmation, the cache for `eth_getLogs` requests starts empty. + # During recursive calls for previous batches, the cache fills with results for + # specific block ranges. With the confirmation description remaining constant + # through these calls, the cache effectively reduces the number of requests by + # reusing results for events related to previous batches within the same block + # ranges. Although the same logs might be re-requested for other confirmations + # within the same discovery iteration, the cache is not shared across different + # confirmations and resets for each new confirmation. Extending cache usage + # across different confirmations would require additional logic to match block + # ranges and manage cache entries, significantly complicating cache handling. + # Given the rarity of back-to-back confirmations in the same iteration of + # discovery in a production environment, the added complexity of shared caching + # is deemed excessive. + # + # ## Parameters + # - `rollup_block_num`: The rollup block number associated with the confirmation. + # - `confirmation_desc`: Description of the latest confirmation. + # - `outbox_config`: Configuration for the Arbitrum outbox contract. + # - `cache`: A cache to minimize repetitive `eth_getLogs` calls. + # + # ## Returns + # - `{:ok, unconfirmed_blocks}`: A list of rollup blocks that are confirmed by + # the current confirmation but not yet marked as confirmed in the database. + # - `{:error, []}`: If a discrepancy or inconsistency is found during the + # discovery process. + defp discover_rollup_blocks_belonging_to_one_confirmation( + rollup_block_num, + confirmation_desc, + outbox_config, + cache \\ %{} + ) do + # The following batch fields are required in the further processing: + # number, start_block, end_block, commitment_transaction.block_number + with {:ok, batch} <- discover_rollup_blocks__get_batch(rollup_block_num), + {:ok, unconfirmed_rollup_blocks} when unconfirmed_rollup_blocks != [] <- + discover_rollup_blocks__get_unconfirmed_rollup_blocks(batch, rollup_block_num), + # It is not the issue to request logs for the first call of + # discover_rollup_blocks_belonging_to_one_confirmation since we need + # to make sure that there is no another confirmation for part of the + # blocks of the batch. + # If it returns `{:ok, []}` it will be passed as the return value of + # discover_rollup_blocks_belonging_to_one_confirmation function. + {:ok, {first_unconfirmed_block, new_cache}} <- + discover_rollup_blocks__check_confirmed_blocks_in_batch( + rollup_block_num, + length(unconfirmed_rollup_blocks), + batch, + confirmation_desc, + outbox_config, + cache + ), + true <- discover_rollup_blocks__check_consecutive_rollup_blocks(unconfirmed_rollup_blocks, batch.number) do + if List.first(unconfirmed_rollup_blocks).block_number == batch.start_block do + log_info("End of the batch #{batch.number} discovered, moving to the previous batch") + + {status, updated_rollup_blocks} = + discover_rollup_blocks_belonging_to_one_confirmation( + first_unconfirmed_block - 1, + confirmation_desc, + outbox_config, + new_cache + ) + + case status do + :error -> {:error, []} + # updated_rollup_blocks will contain either [] if the previous batch + # already confirmed or list of unconfirmed blocks of all previous + # unconfirmed batches + :ok -> {:ok, unconfirmed_rollup_blocks ++ updated_rollup_blocks} + end + else + log_info("All unconfirmed blocks in the batch ##{batch.number} found") + {:ok, unconfirmed_rollup_blocks} + end + end + end + + # Retrieves the batch containing the specified rollup block and logs the attempt. + defp discover_rollup_blocks__get_batch(rollup_block_num) do + # Generally if batch is nil it means either + # - a batch to a rollup block association is not found, not recoverable + # - a rollup block is not found, the corresponding batch is not handled yet. It is possible + # because the method can be called for guessed block number rather than received from + # the batch description or from blocks list received after a batch handling. In this case + # the confirmation must be postponed until the corresponding batch is handled. + batch = Db.get_batch_by_rollup_block_number(rollup_block_num) + + if batch != nil do + log_info( + "Attempt to identify which blocks of the batch ##{batch.number} within ##{batch.start_block}..##{rollup_block_num} are confirmed" + ) + + {:ok, batch} + else + log_warning( + "Batch where the block ##{rollup_block_num} was included is not found, skipping this blocks and lower" + ) + + {:error, []} + end + end + + # Identifies unconfirmed rollup blocks within a batch up to specified block + # number, checking for potential synchronization issues. + defp discover_rollup_blocks__get_unconfirmed_rollup_blocks(batch, rollup_block_num) do + unconfirmed_rollup_blocks = Db.unconfirmed_rollup_blocks(batch.start_block, rollup_block_num) + + if Enum.empty?(unconfirmed_rollup_blocks) do + # Blocks are not found only in case when all blocks in the batch confirmed + # or in case when Chain.Block for block in the batch are not received yet + + if Db.count_confirmed_rollup_blocks_in_batch(batch.number) == batch.end_block - batch.start_block + 1 do + log_info("No unconfirmed blocks in the batch #{batch.number}") + {:ok, []} + else + log_warning("Seems that the batch #{batch.number} was not fully synced. Skipping its blocks") + {:error, []} + end + else + {:ok, unconfirmed_rollup_blocks} + end + end + + # Identifies the first block in the batch that is not yet confirmed. + # + # This function attempts to find a `SendRootUpdated` event between the already + # discovered confirmation and the L1 block where the batch was committed, that + # mentions any block of the batch as the top of the confirmed blocks. Depending + # on the lookup result, it either considers the found block or the very + # first block of the batch as the start of the range of unconfirmed blocks ending + # with `rollup_block_num`. It also checks for a gap in the identified rollup + # blocks range, indicating potential database inconsistency or an unprocessed batch. + # To optimize `eth_getLogs` calls required for the `SendRootUpdated` event lookup, + # it uses a cache. + # + # ## Parameters + # - `rollup_block_num`: The rollup block number to check for confirmation. + # - `unconfirmed_rollup_blocks_length`: The number of unconfirmed blocks in the batch. + # - `batch`: The batch containing the rollup blocks. + # - `confirmation_desc`: Details of the latest confirmation. + # - `outbox_config`: Configuration for the Arbitrum outbox contract. + # - `cache`: A cache to minimize `eth_getLogs` calls. + # + # ## Returns + # - `{:ok, []}` when all blocks in the batch are already confirmed. + # - `{:error, []}` when a potential database inconsistency or unprocessed batch is + # found. + # - `{:ok, {first_unconfirmed_block_in_batch, new_cache}}` with the number of the + # first unconfirmed block in the batch and updated cache. + defp discover_rollup_blocks__check_confirmed_blocks_in_batch( + rollup_block_num, + unconfirmed_rollup_blocks_length, + batch, + confirmation_desc, + outbox_config, + cache + ) do + # This function might be over-engineered, as confirmations are likely always + # aligned with the end of a batch. If, after analyzing the databases of fully + # synchronized BS instances across several Arbitrum-based chains, it is confirmed + # that this alignment is consistent, then this functionality can be optimized. + + {status, block?, new_cache} = check_if_batch_confirmed(batch, confirmation_desc, outbox_config, cache) + + case {status, block? == rollup_block_num} do + {:error, _} -> + {:error, []} + + {_, true} -> + log_info("All the blocks in the batch ##{batch.number} have been already confirmed by another transaction") + # Though the response differs from another `:ok` response in the function, + # it is assumed that this case will be handled by the invoking function. + {:ok, []} + + {_, false} -> + first_unconfirmed_block_in_batch = + case block? do + nil -> + batch.start_block + + value -> + log_info("Blocks up to ##{value} of the batch have been already confirmed by another transaction") + value + 1 + end + + if unconfirmed_rollup_blocks_length == rollup_block_num - first_unconfirmed_block_in_batch + 1 do + {:ok, {first_unconfirmed_block_in_batch, new_cache}} + else + # The case when there is a gap in the blocks range is possible when there is + # a DB inconsistency. From another side, the case when the confirmation is for blocks + # in two batches -- one batch has been already indexed, another one has not been yet. + # Both cases should be handled in the same way - this confirmation must be postponed + # until the case resolution. + log_warning( + "Only #{unconfirmed_rollup_blocks_length} of #{rollup_block_num - first_unconfirmed_block_in_batch + 1} blocks found. Skipping the blocks from the batch #{batch.number}" + ) + + {:error, []} + end + end + end + + # Checks if any rollup blocks within a batch are confirmed by scanning `SendRootUpdated` events. + # + # This function uses the L1 block range from batch's commit transaction block to + # the block before the latest confirmation to search for `SendRootUpdated` events. + # These events indicate the top confirmed rollup block. To optimize `eth_getLogs` + # calls, it uses a cache and requests logs in chunked block ranges. + # + # ## Parameters + # - `batch`: The batch to check for confirmed rollup blocks. + # - `confirmation_desc`: Description of the latest confirmation details. + # - `l1_outbox_config`: Configuration for the L1 outbox contract, including block + # range for logs retrieval. + # - `cache`: A cache for the logs to reduce the number of `eth_getLogs` calls. + # + # ## Returns + # - `{:ok, highest_confirmed_rollup_block, new_cache}`: + # - `highest_confirmed_rollup_block` is the highest rollup block number confirmed + # within the batch. + # - `new_cache` contains the updated logs cache. + # - `{:ok, nil, new_cache}` if no rollup blocks within the batch are confirmed. + # - `new_cache` contains the updated logs cache. + # - `{:error, nil, new_cache}` if an error occurs during the log fetching process, + # such as when a rollup block corresponding to a given hash is not found in the + # database. + # - `new_cache` contains the updated logs cache despite the error. + defp check_if_batch_confirmed(batch, confirmation_desc, l1_outbox_config, cache) do + log_info( + "Use L1 blocks #{batch.commitment_transaction.block_number}..#{confirmation_desc.l1_block_num - 1} to look for a rollup block confirmation within #{batch.start_block}..#{batch.end_block} of ##{batch.number}" + ) + + l1_blocks_pairs_to_get_logs( + batch.commitment_transaction.block_number, + confirmation_desc.l1_block_num - 1, + l1_outbox_config.logs_block_range + ) + |> Enum.reduce_while({:ok, nil, cache}, fn {log_start, log_end}, {_, _, updated_cache} -> + # credo:disable-for-previous-line Credo.Check.Refactor.PipeChainStart + {status, latest_block_confirmed, new_cache} = + do_check_if_batch_confirmed( + {batch.start_block, batch.end_block}, + {log_start, log_end}, + l1_outbox_config, + updated_cache + ) + + case {status, latest_block_confirmed} do + {:error, _} -> + {:halt, {:error, nil, new_cache}} + + {_, nil} -> + {:cont, {:ok, nil, new_cache}} + + {_, previous_confirmed_rollup_block} -> + log_info("Confirmed block ##{previous_confirmed_rollup_block} for the batch found") + {:halt, {:ok, previous_confirmed_rollup_block, new_cache}} + end + end) + end + + # Generates descending order pairs of start and finish block numbers, ensuring + # identical beginning pairs for the same finish block and max range. + # Examples: + # l1_blocks_pairs_to_get_logs(1, 10, 3) -> [{8, 10}, {5, 7}, {2, 4}, {1, 1}] + # l1_blocks_pairs_to_get_logs(5, 10, 3) -> [{8, 10}, {5, 7}] + defp l1_blocks_pairs_to_get_logs(start, finish, max_range) do + # credo:disable-for-lines:9 Credo.Check.Refactor.PipeChainStart + Stream.unfold(finish, fn cur_finish -> + if cur_finish < start do + nil + else + cur_start = max(cur_finish - max_range + 1, start) + {{cur_start, cur_finish}, cur_start - 1} + end + end) + |> Enum.to_list() + end + + # Checks if any blocks within a specific range are identified as the top of confirmed blocks by scanning `SendRootUpdated` events. + # + # This function fetches logs for `SendRootUpdated` events within the specified + # L1 block range to determine if any rollup blocks within the given rollup block + # range are mentioned in the events, indicating the top of confirmed blocks up + # to that log. It uses caching to minimize `eth_getLogs` calls. + # + # ## Parameters + # - A tuple `{rollup_start_block, rollup_end_block}` specifying the rollup block + # range to check for confirmations + # - A tuple `{log_start, log_end}` specifying the L1 block range to fetch logs. + # - `l1_outbox_config`: Configuration for the Arbitrum Outbox contract. + # - `cache`: A cache of previously fetched logs to reduce `eth_getLogs` calls. + # + # ## Returns + # - A tuple `{:ok, latest_block_confirmed, new_cache}`: + # - `latest_block_confirmed` is the highest rollup block number confirmed within + # the specified range. + # - A tuple `{:ok, nil, new_cache}` if no rollup blocks within the specified range + # are confirmed. + # - A tuple `{:error, nil, new_cache}` if during parsing logs a rollup block with + # given hash is not being found in the database. + # For all three cases the `new_cache` contains the updated logs cache. + defp do_check_if_batch_confirmed( + {rollup_start_block, rollup_end_block}, + {log_start, log_end}, + l1_outbox_config, + cache + ) do + # The logs in the given L1 blocks range + {logs, new_cache} = + get_logs_new_confirmations( + log_start, + log_end, + l1_outbox_config.outbox_address, + l1_outbox_config.json_rpc_named_arguments, + cache + ) + + # For every discovered event check if the rollup block in the confirmation + # is within the specified range which usually means that the event + # is the confirmation of the batch described by the range. + {status, latest_block_confirmed} = + logs + |> Enum.reduce_while({:ok, nil}, fn event, _acc -> + log_info("Examining the transaction #{event["transactionHash"]}") + + rollup_block_hash = send_root_updated_event_parse(event) + rollup_block_num = Db.rollup_block_hash_to_num(rollup_block_hash) + + case rollup_block_num do + nil -> + log_warning("The rollup block ##{rollup_block_hash} not found") + {:halt, {:error, nil}} + + value when value >= rollup_start_block and value <= rollup_end_block -> + log_info("The rollup block ##{rollup_block_num} within the range") + {:halt, {:ok, rollup_block_num}} + + _ -> + log_info("The rollup block ##{rollup_block_num} outside of the range") + {:cont, {:ok, nil}} + end + end) + + {status, latest_block_confirmed, new_cache} + end + + # Retrieves logs for `SendRootUpdated` events between specified blocks, + # using cache if available to reduce RPC calls. + # + # This function fetches logs for `SendRootUpdated` events emitted by the + # Outbox contract within the given block range. It utilizes a cache to + # minimize redundant RPC requests. If logs are not present in the cache, + # it fetches them from the RPC node and updates the cache. + # + # ## Parameters + # - `start_block`: The starting block number for log retrieval. + # - `end_block`: The ending block number for log retrieval. + # - `outbox_address`: The address of the Outbox contract. + # - `json_rpc_named_arguments`: Configuration parameters for the JSON RPC + # connection. + # - `cache`: An optional parameter holding previously fetched logs to avoid + # redundant RPC calls. + # + # ## Returns + # - A tuple containing: + # - The list of logs corresponding to `SendRootUpdated` events. + # - The updated cache with the newly fetched logs. + defp get_logs_new_confirmations(start_block, end_block, outbox_address, json_rpc_named_arguments, cache \\ %{}) + when start_block <= end_block do + # TODO: consider to have a persistent cache in DB to reduce the number of getLogs requests + {logs, new_cache} = + case cache[{start_block, end_block}] do + nil -> + {:ok, rpc_logs} = + IndexerHelper.get_logs( + start_block, + end_block, + outbox_address, + [@send_root_updated_event], + json_rpc_named_arguments + ) + + {rpc_logs, Map.put(cache, {start_block, end_block}, rpc_logs)} + + cached_logs -> + {cached_logs, cache} + end + + if length(logs) > 0 do + log_debug("Found #{length(logs)} SendRootUpdated logs") + end + + {logs, new_cache} + end + + # Extracts the rollup block hash from a `SendRootUpdated` event log. + defp send_root_updated_event_parse(event) do + [_, _, l2_block_hash] = event["topics"] + + l2_block_hash + end + + # Validates if the list of rollup blocks are consecutive without gaps in their numbering. + defp discover_rollup_blocks__check_consecutive_rollup_blocks(unconfirmed_rollup_blocks, batch_number) do + if consecutive_rollup_blocks?(unconfirmed_rollup_blocks) do + true + else + # The case when there is a gap in the blocks range is possible when there is + # a DB inconsistency. From another side, the case when the confirmation is for blocks + # in two batches -- one batch has been already indexed, another one has not been yet. + # Both cases should be handled in the same way - this confirmation must be postponed + # until the case resolution. + log_warning("Skipping the blocks from the batch #{batch_number}") + {:error, []} + end + end + + # Checks if the list of rollup blocks are consecutive without gaps in their numbering. + defp consecutive_rollup_blocks?(blocks) do + {_, status} = + Enum.reduce_while(blocks, {nil, false}, fn block, {prev, _} -> + case prev do + nil -> + {:cont, {block.block_number, true}} + + value -> + # credo:disable-for-next-line Credo.Check.Refactor.Nesting + if block.block_number - 1 == value do + {:cont, {block.block_number, true}} + else + log_warning("A gap between blocks ##{value} and ##{block.block_number} found") + {:halt, {block.block_number, false}} + end + end + end) + + status + end + + # Adds the confirmation transaction hash to each rollup block description in the list. + defp add_confirmation_transaction(block_descriptions_list, confirm_tx_hash) do + block_descriptions_list + |> Enum.reduce([], fn block_descr, updated -> + new_block_descr = + block_descr + |> Map.put(:confirmation_transaction, confirm_tx_hash) + + [new_block_descr | updated] + end) + end + + # Selects lifecycle transaction descriptions used for confirming a given list of rollup blocks. + defp take_lifecycle_txs_for_confirmed_blocks(confirmed_rollup_blocks, lifecycle_txs) do + confirmed_rollup_blocks + |> Enum.reduce(%{}, fn block_descr, updated_txs -> + confirmation_tx_hash = block_descr.confirmation_transaction + + Map.put_new(updated_txs, confirmation_tx_hash, lifecycle_txs[confirmation_tx_hash]) + end) + end + + # Finalizes lifecycle transaction descriptions and establishes database-ready links + # between confirmed rollup blocks and their corresponding lifecycle transactions. + # + # This function executes chunked requests to L1 to retrieve block timestamps, which, + # along with the finalization flag, are then used to finalize the lifecycle + # transaction descriptions. Each entity in the list of blocks, which needs to be + # confirmed, is updated with the associated lifecycle transaction IDs and prepared + # for import. + # + # ## Parameters + # - `basic_lifecycle_txs`: The initial list of partially filled lifecycle transaction + # descriptions. + # - `confirmed_rollup_blocks`: Rollup blocks to be considered as confirmed. + # - `l1_blocks_requests`: RPC requests of `eth_getBlockByNumber` to fetch L1 block data + # for use in the lifecycle transaction descriptions. + # - A map containing L1 RPC configuration such as JSON RPC arguments, chunk size, + # and a flag indicating whether to track the finalization of transactions. + # + # ## Returns + # - A tuple containing: + # - The list of lifecycle transactions, ready for import. + # - The list of confirmed rollup blocks, ready for import. + # - The highest confirmed block number processed during this run. + defp finalize_lifecycle_txs_and_confirmed_blocks( + basic_lifecycle_txs, + confirmed_rollup_blocks, + l1_blocks_requests, + %{ + json_rpc_named_arguments: l1_json_rpc_named_arguments, + chunk_size: l1_chunk_size, + track_finalization: track_finalization? + } = _l1_rpc_config + ) do + blocks_to_ts = + Rpc.execute_blocks_requests_and_get_ts(l1_blocks_requests, l1_json_rpc_named_arguments, l1_chunk_size) + + lifecycle_txs = + basic_lifecycle_txs + |> ArbitrumHelper.extend_lifecycle_txs_with_ts_and_status(blocks_to_ts, track_finalization?) + |> Db.get_indices_for_l1_transactions() + + {updated_rollup_blocks, highest_confirmed_block_number} = + confirmed_rollup_blocks + |> Enum.reduce({[], -1}, fn block, {updated_list, highest_confirmed} -> + chosen_highest_confirmed = max(highest_confirmed, block.block_number) + + updated_block = + block + |> Map.put(:confirmation_id, lifecycle_txs[block.confirmation_transaction].id) + |> Map.drop([:confirmation_transaction]) + + {[updated_block | updated_list], chosen_highest_confirmed} + end) + + {Map.values(lifecycle_txs), updated_rollup_blocks, highest_confirmed_block_number} + end + + # Retrieves committed L2-to-L1 messages up to specified block number and marks them as 'confirmed'. + defp get_confirmed_l2_to_l1_messages(-1) do + [] + end + + defp get_confirmed_l2_to_l1_messages(block_number) do + block_number + |> Db.sent_l2_to_l1_messages() + |> Enum.map(fn tx -> + Map.put(tx, :status, :confirmed) + end) + end +end diff --git a/apps/indexer/lib/indexer/fetcher/arbitrum/workers/new_l1_executions.ex b/apps/indexer/lib/indexer/fetcher/arbitrum/workers/new_l1_executions.ex new file mode 100644 index 000000000000..d74f0edab229 --- /dev/null +++ b/apps/indexer/lib/indexer/fetcher/arbitrum/workers/new_l1_executions.ex @@ -0,0 +1,413 @@ +defmodule Indexer.Fetcher.Arbitrum.Workers.NewL1Executions do + @moduledoc """ + Coordinates the discovery and processing of new and historical L2-to-L1 message executions for an Arbitrum rollup. + + This module is responsible for identifying and importing executions of messages + that were initiated from Arbitrum's Layer 2 (L2) and are to be relayed to + Layer 1 (L1). It handles both new executions that are currently occurring on L1 + and historical executions that occurred in the past but have not yet been + processed. + + Discovery of these message executions involves parsing logs for + `OutBoxTransactionExecuted` events emitted by the Arbitrum outbox contract. As + the logs do not provide comprehensive data for constructing the related + lifecycle transactions, the module executes batched RPC calls to + `eth_getBlockByNumber`, using the responses to obtain transaction timestamps, + thereby enriching the lifecycle transaction data. + """ + + import EthereumJSONRPC, only: [quantity_to_integer: 1] + + import Indexer.Fetcher.Arbitrum.Utils.Logging, only: [log_info: 1, log_debug: 1] + + alias EthereumJSONRPC.Block.ByNumber, as: BlockByNumber + + import Explorer.Helper, only: [decode_data: 2] + + alias Indexer.Fetcher.Arbitrum.Utils.Helper, as: ArbitrumHelper + alias Indexer.Fetcher.Arbitrum.Utils.{Db, Rpc} + alias Indexer.Helper, as: IndexerHelper + + alias Explorer.Chain + + require Logger + + # keccak256("OutBoxTransactionExecuted(address,address,uint256,uint256)") + @outbox_transaction_executed_event "0x20af7f3bbfe38132b8900ae295cd9c8d1914be7052d061a511f3f728dab18964" + @outbox_transaction_executed_unindexed_params [{:uint, 256}] + + @doc """ + Discovers and processes new executions of L2-to-L1 messages within the current L1 block range. + + This function fetches logs for `OutBoxTransactionExecuted` events within the + specified L1 block range to identify new execution transactions for L2-to-L1 + messages, updating their status and linking them with corresponding lifecycle + transactions in the database. Additionally, the function checks unexecuted + L2-to-L1 messages to match them with any newly recorded executions and updates + their status to `:relayed`. + + ## Parameters + - A map containing: + - `config`: Configuration settings including the Arbitrum outbox contract + address, JSON RPC arguments, and the block range for fetching + logs. + - `data`: Contains the starting block number for new execution discovery. + + ## Returns + - `{:ok, end_block}`: On successful discovery and processing, where `end_block` + indicates the necessity to consider next block range in the + following iteration of new executions discovery. + - `{:ok, start_block - 1}`: when no new blocks on L1 produced from the last + iteration of the new executions discovery. + """ + @spec discover_new_l1_messages_executions(%{ + :config => %{ + :l1_outbox_address => binary(), + :l1_rpc => %{ + :json_rpc_named_arguments => EthereumJSONRPC.json_rpc_named_arguments(), + :logs_block_range => non_neg_integer(), + optional(any()) => any() + }, + optional(any()) => any() + }, + :data => %{:new_executions_start_block => non_neg_integer(), optional(any()) => any()}, + optional(any()) => any() + }) :: {:ok, non_neg_integer()} + def discover_new_l1_messages_executions( + %{ + config: %{ + l1_rpc: l1_rpc_config, + l1_outbox_address: outbox_address + }, + data: %{new_executions_start_block: start_block} + } = _state + ) do + # Requesting the "latest" block instead of "safe" allows to catch executions + # without latency. + {:ok, latest_block} = + IndexerHelper.get_block_number_by_tag( + "latest", + l1_rpc_config.json_rpc_named_arguments, + Rpc.get_resend_attempts() + ) + + end_block = min(start_block + l1_rpc_config.logs_block_range - 1, latest_block) + + if start_block <= end_block do + log_info("Block range for new l2-to-l1 messages executions discovery: #{start_block}..#{end_block}") + + discover(outbox_address, start_block, end_block, l1_rpc_config) + + {:ok, end_block} + else + {:ok, start_block - 1} + end + end + + @doc """ + Discovers and processes historical executions of L2-to-L1 messages within a calculated L1 block range. + + This function fetches logs for `OutBoxTransactionExecuted` events within the + calculated L1 block range. It then processes these logs to identify execution + transactions for L2-to-L1 messages, updating their status and linking them with + corresponding lifecycle transactions in the database. Additionally, the + function goes through unexecuted L2-to-L1 messages, matches them with the + executions recorded in the database up to this moment, and updates the messages' + status to `:relayed`. + + ## Parameters + - A map containing: + - `config`: Configuration settings including the Arbitrum outbox contract + address, the initialization block for the rollup, and JSON RPC + arguments. + - `data`: Contains the ending block number for the historical execution + discovery. + + ## Returns + - `{:ok, start_block}`: On successful discovery and processing, where + `start_block` indicates the necessity to consider another block range in the + next iteration of historical executions discovery. + - `{:ok, l1_rollup_init_block}`: If the historical discovery process has reached + the rollup initialization block, indicating that no further action is needed. + """ + @spec discover_historical_l1_messages_executions(%{ + :config => %{ + :l1_outbox_address => binary(), + :l1_rollup_init_block => non_neg_integer(), + :l1_rpc => %{ + :json_rpc_named_arguments => EthereumJSONRPC.json_rpc_named_arguments(), + :logs_block_range => non_neg_integer(), + optional(any()) => any() + }, + optional(any()) => any() + }, + :data => %{:historical_executions_end_block => non_neg_integer(), optional(any()) => any()}, + optional(any()) => any() + }) :: {:ok, non_neg_integer()} + def discover_historical_l1_messages_executions( + %{ + config: %{ + l1_rpc: l1_rpc_config, + l1_outbox_address: outbox_address, + l1_rollup_init_block: l1_rollup_init_block + }, + data: %{historical_executions_end_block: end_block} + } = _state + ) do + if end_block >= l1_rollup_init_block do + start_block = max(l1_rollup_init_block, end_block - l1_rpc_config.logs_block_range + 1) + + log_info("Block range for historical l2-to-l1 messages executions discovery: #{start_block}..#{end_block}") + + discover(outbox_address, start_block, end_block, l1_rpc_config) + + {:ok, start_block} + else + {:ok, l1_rollup_init_block} + end + end + + # Discovers and imports execution transactions for L2-to-L1 messages within a specified L1 block range. + # + # This function fetches logs for `OutBoxTransactionExecuted` events within the + # specified L1 block range to discover new execution transactions. It processes + # these logs to extract execution details and associated lifecycle transactions, + # which are then imported into the database. For lifecycle timestamps not + # available in the logs, RPC calls to `eth_getBlockByNumber` are made to fetch + # the necessary data. Furthermore, the function checks unexecuted L2-to-L1 + # messages to match them with any recorded executions, updating their status to + # `:relayed` and establishing links with the corresponding lifecycle + # transactions. These updated messages are also imported into the database. + # + # ## Parameters + # - `outbox_address`: The address of the Arbitrum outbox contract to filter the + # logs. + # - `start_block`: The starting block number for log retrieval. + # - `end_block`: The ending block number for log retrieval. + # - `l1_rpc_config`: Configuration parameters including JSON RPC arguments and + # settings for processing the logs. + # + # ## Returns + # - N/A + defp discover(outbox_address, start_block, end_block, l1_rpc_config) do + logs = + get_logs_for_new_executions( + start_block, + end_block, + outbox_address, + l1_rpc_config.json_rpc_named_arguments + ) + + {lifecycle_txs, executions} = get_executions_from_logs(logs, l1_rpc_config) + + unless executions == [] do + log_info("Executions for #{length(executions)} L2 messages will be imported") + + {:ok, _} = + Chain.import(%{ + arbitrum_lifecycle_transactions: %{params: lifecycle_txs}, + arbitrum_l1_executions: %{params: executions}, + timeout: :infinity + }) + end + + # Inspects all unexecuted messages to potentially mark them as completed, + # addressing the scenario where found executions may correspond to messages + # that have not yet been indexed. This ensures that as soon as a new unexecuted + # message is added to the database, it can be marked as relayed, considering + # the execution transactions that have already been indexed. + messages = get_relayed_messages(end_block) + + unless messages == [] do + log_info("Marking #{length(messages)} l2-to-l1 messages as completed") + + {:ok, _} = + Chain.import(%{ + arbitrum_messages: %{params: messages}, + timeout: :infinity + }) + end + end + + # Retrieves logs representing `OutBoxTransactionExecuted` events between the specified blocks. + defp get_logs_for_new_executions(start_block, end_block, outbox_address, json_rpc_named_arguments) + when start_block <= end_block do + {:ok, logs} = + IndexerHelper.get_logs( + start_block, + end_block, + outbox_address, + [@outbox_transaction_executed_event], + json_rpc_named_arguments + ) + + if length(logs) > 0 do + log_debug("Found #{length(logs)} OutBoxTransactionExecuted logs") + end + + logs + end + + # Extracts and processes execution details from logs for L2-to-L1 message transactions. + # + # This function parses logs representing `OutBoxTransactionExecuted` events to + # extract basic execution details. It then requests block timestamps and + # associates them with the extracted executions, forming lifecycle transactions + # enriched with timestamps and finalization statuses. Subsequently, unique + # identifiers for the lifecycle transactions are determined, and the connection + # between execution records and lifecycle transactions is established. + # + # ## Parameters + # - `logs`: A collection of log entries to be processed. + # - `l1_rpc_config`: Configuration parameters including JSON RPC arguments, + # chunk size for RPC calls, and a flag indicating whether to track the + # finalization of transactions. + # + # ## Returns + # - A tuple containing: + # - A list of lifecycle transactions with updated timestamps, finalization + # statuses, and unique identifiers. + # - A list of detailed execution information for L2-to-L1 messages. + # Both lists are prepared for database importation. + defp get_executions_from_logs( + logs, + %{ + json_rpc_named_arguments: json_rpc_named_arguments, + chunk_size: chunk_size, + track_finalization: track_finalization? + } = _l1_rpc_config + ) do + {basics_executions, basic_lifecycle_txs, blocks_requests} = parse_logs_for_new_executions(logs) + + blocks_to_ts = Rpc.execute_blocks_requests_and_get_ts(blocks_requests, json_rpc_named_arguments, chunk_size) + + lifecycle_txs = + basic_lifecycle_txs + |> ArbitrumHelper.extend_lifecycle_txs_with_ts_and_status(blocks_to_ts, track_finalization?) + |> Db.get_indices_for_l1_transactions() + + executions = + basics_executions + |> Enum.reduce([], fn execution, updated_executions -> + updated = + execution + |> Map.put(:execution_id, lifecycle_txs[execution.execution_tx_hash].id) + |> Map.drop([:execution_tx_hash]) + + [updated | updated_executions] + end) + + {Map.values(lifecycle_txs), executions} + end + + # Parses logs to extract new execution transactions for L2-to-L1 messages. + # + # This function processes log entries to identify `OutBoxTransactionExecuted` + # events, extracting the message ID, transaction hash, and block number for + # each. It accumulates this data into execution details, lifecycle + # transaction descriptions, and RPC requests for block information. These + # are then used in subsequent steps to finalize the execution status of the + # messages. + # + # ## Parameters + # - `logs`: A collection of log entries to be processed. + # + # ## Returns + # - A tuple containing: + # - `executions`: A list of details for execution transactions related to + # L2-to-L1 messages. + # - `lifecycle_txs`: A map of lifecycle transaction details, keyed by L1 + # transaction hash. + # - `blocks_requests`: A list of RPC requests for fetching block data where + # the executions occurred. + defp parse_logs_for_new_executions(logs) do + {executions, lifecycle_txs, blocks_requests} = + logs + |> Enum.reduce({[], %{}, %{}}, fn event, {executions, lifecycle_txs, blocks_requests} -> + msg_id = outbox_transaction_executed_event_parse(event) + + l1_tx_hash_raw = event["transactionHash"] + l1_tx_hash = Rpc.string_hash_to_bytes_hash(l1_tx_hash_raw) + l1_blk_num = quantity_to_integer(event["blockNumber"]) + + updated_executions = [ + %{ + message_id: msg_id, + execution_tx_hash: l1_tx_hash + } + | executions + ] + + updated_lifecycle_txs = + Map.put( + lifecycle_txs, + l1_tx_hash, + %{hash: l1_tx_hash, block_number: l1_blk_num} + ) + + updated_blocks_requests = + Map.put( + blocks_requests, + l1_blk_num, + BlockByNumber.request(%{id: 0, number: l1_blk_num}, false, true) + ) + + log_debug("Execution for L2 message ##{msg_id} found in #{l1_tx_hash_raw}") + + {updated_executions, updated_lifecycle_txs, updated_blocks_requests} + end) + + {executions, lifecycle_txs, Map.values(blocks_requests)} + end + + # Parses `OutBoxTransactionExecuted` event data to extract the transaction index parameter + defp outbox_transaction_executed_event_parse(event) do + [transaction_index] = decode_data(event["data"], @outbox_transaction_executed_unindexed_params) + + transaction_index + end + + # Retrieves unexecuted messages from L2 to L1, marking them as completed if their + # corresponding execution transactions are identified. + # + # This function fetches messages confirmed on L1 up to the specified rollup block + # number and matches these messages with their corresponding execution transactions. + # For matched pairs, it updates the message status to `:relayed` and links them with + # the execution transactions. + # + # ## Parameters + # - `block_number`: The block number up to which messages are considered for + # completion. + # + # ## Returns + # - A list of messages marked as completed, ready for database import. + defp get_relayed_messages(block_number) do + # Assuming that both catchup block fetcher and historical messages catchup fetcher + # will check all discovered historical messages to be marked as executed it is not + # needed to handle :initiated and :sent of historical messages here, only for + # new messages discovered and changed their status from `:sent` to `:confirmed` + confirmed_messages = Db.confirmed_l2_to_l1_messages(block_number) + + if Enum.empty?(confirmed_messages) do + [] + else + log_debug("Identified #{length(confirmed_messages)} l2-to-l1 messages already confirmed but not completed") + + messages_map = + confirmed_messages + |> Enum.reduce(%{}, fn msg, acc -> + Map.put(acc, msg.message_id, msg) + end) + + messages_map + |> Map.keys() + |> Db.l1_executions() + |> Enum.map(fn execution -> + messages_map + |> Map.get(execution.message_id) + |> Map.put(:completion_transaction_hash, execution.execution_transaction.hash.bytes) + |> Map.put(:status, :relayed) + end) + end + end +end diff --git a/apps/indexer/lib/indexer/fetcher/arbitrum/workers/new_messages_to_l2.ex b/apps/indexer/lib/indexer/fetcher/arbitrum/workers/new_messages_to_l2.ex new file mode 100644 index 000000000000..b5ee6bfd9db4 --- /dev/null +++ b/apps/indexer/lib/indexer/fetcher/arbitrum/workers/new_messages_to_l2.ex @@ -0,0 +1,346 @@ +defmodule Indexer.Fetcher.Arbitrum.Workers.NewMessagesToL2 do + @moduledoc """ + Manages the discovery and processing of new and historical L1-to-L2 messages initiated on L1 for an Arbitrum rollup. + + This module is responsible for identifying and importing messages that are initiated + from Layer 1 (L1) to Arbitrum's Layer 2 (L2). It handles both new messages that are + currently being sent to L2 and historical messages that were sent in the past but + have not yet been processed by the system. + + The initiated messages are identified by analyzing logs associated with + `MessageDelivered` events emitted by the Arbitrum bridge contract. These logs + contain almost all the information required to compose the messages, except for the + originator's address, which is obtained by making an RPC call to get the transaction + details. + """ + + import EthereumJSONRPC, only: [quantity_to_integer: 1] + + import Explorer.Helper, only: [decode_data: 2] + + import Indexer.Fetcher.Arbitrum.Utils.Logging, only: [log_info: 1, log_debug: 1] + + alias Indexer.Fetcher.Arbitrum.Utils.Rpc + alias Indexer.Helper, as: IndexerHelper + + alias Explorer.Chain + + require Logger + + @types_of_l1_messages_forwarded_to_l2 [3, 7, 9, 12] + + # keccak256("MessageDelivered(uint256,bytes32,address,uint8,address,bytes32,uint256,uint64)") + @message_delivered_event "0x5e3c1311ea442664e8b1611bfabef659120ea7a0a2cfc0667700bebc69cbffe1" + @message_delivered_event_unindexed_params [ + :address, + {:uint, 8}, + :address, + {:bytes, 32}, + {:uint, 256}, + {:uint, 64} + ] + + @doc """ + Discovers new L1-to-L2 messages initiated on L1 within a configured block range and processes them for database import. + + This function calculates the block range for discovering new messages from L1 to L2 + based on the latest block number available on the network. It then fetches logs + related to L1-to-L2 events within this range, extracts message details from both + the log and the corresponding L1 transaction, and imports them into the database. + + ## Parameters + - A map containing: + - `config`: Configuration settings including JSON RPC arguments for L1, Arbitrum + bridge address, RPC block range, and chunk size for RPC calls. + - `data`: Contains the starting block number for new L1-to-L2 message discovery. + + ## Returns + - `{:ok, end_block}`: On successful discovery and processing, where `end_block` + indicates the necessity to consider next block range in the + following iteration of new message discovery. + - `{:ok, start_block - 1}`: when no new blocks on L1 produced from the last + iteration of the new message discovery. + """ + @spec discover_new_messages_to_l2(%{ + :config => %{ + :json_l1_rpc_named_arguments => EthereumJSONRPC.json_rpc_named_arguments(), + :l1_bridge_address => binary(), + :l1_rpc_block_range => non_neg_integer(), + :l1_rpc_chunk_size => non_neg_integer(), + optional(any()) => any() + }, + :data => %{ + :new_msg_to_l2_start_block => non_neg_integer(), + optional(any()) => any() + }, + optional(any()) => any() + }) :: {:ok, non_neg_integer()} + def discover_new_messages_to_l2( + %{ + config: %{ + json_l1_rpc_named_arguments: json_rpc_named_arguments, + l1_rpc_chunk_size: chunk_size, + l1_rpc_block_range: rpc_block_range, + l1_bridge_address: bridge_address + }, + data: %{new_msg_to_l2_start_block: start_block} + } = _state + ) do + # Requesting the "latest" block instead of "safe" allows to get messages originated to L2 + # much earlier than they will be seen by the Arbitrum Sequencer. + {:ok, latest_block} = + IndexerHelper.get_block_number_by_tag( + "latest", + json_rpc_named_arguments, + Rpc.get_resend_attempts() + ) + + end_block = min(start_block + rpc_block_range - 1, latest_block) + + if start_block <= end_block do + log_info("Block range for discovery new messages from L1: #{start_block}..#{end_block}") + + discover( + bridge_address, + start_block, + end_block, + json_rpc_named_arguments, + chunk_size + ) + + {:ok, end_block} + else + {:ok, start_block - 1} + end + end + + @doc """ + Discovers historical L1-to-L2 messages initiated on L1 within the configured block range and processes them for database import. + + This function calculates the block range for message discovery and targets historical + messages from L1 to L2 by querying the specified block range on L1. The discovery is + conducted by fetching logs related to L1-to-L2 events, extracting message details + from both the log and the corresponding L1 transaction, and importing them into + the database. + + ## Parameters + - A map containing: + - `config`: Configuration settings including JSON RPC arguments for L1, Arbitrum + bridge address, rollup initialization block, block range, and chunk + size for RPC calls. + - `data`: Contains the end block for historical L1-to-L2 message discovery. + + ## Returns + - `{:ok, start_block}`: On successful discovery and processing, where `start_block` + indicates the necessity to consider another block range in + the next iteration of message discovery. + - `{:ok, l1_rollup_init_block}`: If the discovery process already reached rollup + initialization block and no discovery action was + necessary. + """ + @spec discover_historical_messages_to_l2(%{ + :config => %{ + :json_l1_rpc_named_arguments => EthereumJSONRPC.json_rpc_named_arguments(), + :l1_bridge_address => binary(), + :l1_rollup_init_block => non_neg_integer(), + :l1_rpc_block_range => non_neg_integer(), + :l1_rpc_chunk_size => non_neg_integer(), + optional(any()) => any() + }, + :data => %{ + :historical_msg_to_l2_end_block => non_neg_integer(), + optional(any()) => any() + }, + optional(any()) => any() + }) :: {:ok, non_neg_integer()} + def discover_historical_messages_to_l2( + %{ + config: %{ + json_l1_rpc_named_arguments: json_rpc_named_arguments, + l1_rpc_chunk_size: chunk_size, + l1_rpc_block_range: rpc_block_range, + l1_bridge_address: bridge_address, + l1_rollup_init_block: l1_rollup_init_block + }, + data: %{historical_msg_to_l2_end_block: end_block} + } = _state + ) do + if end_block >= l1_rollup_init_block do + start_block = max(l1_rollup_init_block, end_block - rpc_block_range + 1) + + log_info("Block range for discovery historical messages from L1: #{start_block}..#{end_block}") + + discover( + bridge_address, + start_block, + end_block, + json_rpc_named_arguments, + chunk_size + ) + + {:ok, start_block} + else + {:ok, l1_rollup_init_block} + end + end + + # Discovers and imports L1-to-L2 messages initiated on L1 within a specified block range. + # + # This function discovers messages initiated on L1 for transferring information from L1 to L2 + # by retrieving relevant logs within the specified block range on L1, focusing on + # `MessageDelivered` events. It processes these logs to extract and construct message + # details. For information not present in the events, RPC calls are made to fetch additional + # transaction details. The discovered messages are then imported into the database. + # + # ## Parameters + # - `bridge_address`: The address of the Arbitrum bridge contract used to filter the logs. + # - `start_block`: The starting block number for log retrieval. + # - `end_block`: The ending block number for log retrieval. + # - `json_rpc_named_argument`: Configuration parameters for the JSON RPC connection. + # - `chunk_size`: The size of chunks for processing RPC calls in batches. + # + # ## Returns + # - N/A + defp discover(bridge_address, start_block, end_block, json_rpc_named_argument, chunk_size) do + logs = + get_logs_for_l1_to_l2_messages( + start_block, + end_block, + bridge_address, + json_rpc_named_argument + ) + + messages = get_messages_from_logs(logs, json_rpc_named_argument, chunk_size) + + unless messages == [] do + log_info("Origins of #{length(messages)} L1-to-L2 messages will be imported") + end + + {:ok, _} = + Chain.import(%{ + arbitrum_messages: %{params: messages}, + timeout: :infinity + }) + end + + # Retrieves logs representing the `MessageDelivered` events. + defp get_logs_for_l1_to_l2_messages(start_block, end_block, bridge_address, json_rpc_named_arguments) + when start_block <= end_block do + {:ok, logs} = + IndexerHelper.get_logs( + start_block, + end_block, + bridge_address, + [@message_delivered_event], + json_rpc_named_arguments + ) + + if length(logs) > 0 do + log_debug("Found #{length(logs)} MessageDelivered logs") + end + + logs + end + + # Extracts complete message details from the provided logs and prepares them for + # database insertion. + # + # This function filters and parses the logs to identify L1-to-L2 messages, + # generating corresponding RPC requests to fetch additional transaction data. + # It executes these RPC requests to obtain the `from` address of each transaction. + # It then completes each message description by merging the fetched `from` + # address and setting the status to `:initiated`, making them ready for database + # import. + # + # ## Parameters + # - `logs`: A list of log entries to be processed. + # - `json_rpc_named_arguments`: Configuration parameters for the JSON RPC connection. + # - `chunk_size`: The size of chunks for batch processing transactions. + # + # ## Returns + # - A list of maps describing discovered messages compatible with the database + # import operation. + defp get_messages_from_logs(logs, json_rpc_named_arguments, chunk_size) do + {messages, txs_requests} = parse_logs_for_l1_to_l2_messages(logs) + + txs_to_from = Rpc.execute_transactions_requests_and_get_from(txs_requests, json_rpc_named_arguments, chunk_size) + + Enum.map(messages, fn msg -> + Map.merge(msg, %{ + originator_address: txs_to_from[msg.originating_transaction_hash], + status: :initiated + }) + end) + end + + # Parses logs to extract L1-to-L2 message details and prepares RPC requests for transaction data. + # + # This function processes log entries corresponding to `MessageDelivered` events, filtering out + # L1-to-L2 messages identified by one of the following message types: `3`, `17`, `9`, `12`. + # Utilizing information from both the transaction and the log, the function constructs maps + # that partially describe each message and prepares RPC `eth_getTransactionByHash` requests to fetch + # the remaining data needed to complete these message descriptions. + # + # ## Parameters + # - `logs`: A collection of log entries to be processed. + # + # ## Returns + # - A tuple comprising: + # - `messages`: A list of maps, each containing an incomplete representation of a message. + # - `txs_requests`: A list of RPC request `eth_getTransactionByHash` structured to fetch + # additional data needed to finalize the message descriptions. + defp parse_logs_for_l1_to_l2_messages(logs) do + {messages, txs_requests} = + logs + |> Enum.reduce({[], %{}}, fn event, {messages, txs_requests} -> + {msg_id, type, ts} = message_delivered_event_parse(event) + + if type in @types_of_l1_messages_forwarded_to_l2 do + tx_hash = event["transactionHash"] + blk_num = quantity_to_integer(event["blockNumber"]) + + updated_messages = [ + %{ + direction: :to_l2, + message_id: msg_id, + originating_transaction_hash: tx_hash, + origination_timestamp: ts, + originating_transaction_block_number: blk_num + } + | messages + ] + + updated_txs_requests = + Map.put( + txs_requests, + tx_hash, + Rpc.transaction_by_hash_request(%{id: 0, hash: tx_hash}) + ) + + log_debug("L1 to L2 message #{tx_hash} found with the type #{type}") + + {updated_messages, updated_txs_requests} + else + {messages, txs_requests} + end + end) + + {messages, Map.values(txs_requests)} + end + + # Parses the `MessageDelivered` event to extract relevant message details. + defp message_delivered_event_parse(event) do + [ + _inbox, + kind, + _sender, + _message_data_hash, + _base_fee_l1, + timestamp + ] = decode_data(event["data"], @message_delivered_event_unindexed_params) + + message_index = quantity_to_integer(Enum.at(event["topics"], 1)) + + {message_index, kind, Timex.from_unix(timestamp)} + end +end diff --git a/apps/indexer/lib/indexer/fetcher/optimism.ex b/apps/indexer/lib/indexer/fetcher/optimism.ex index 5da2eb1ca42f..10cbbee29d41 100644 --- a/apps/indexer/lib/indexer/fetcher/optimism.ex +++ b/apps/indexer/lib/indexer/fetcher/optimism.ex @@ -159,6 +159,7 @@ defmodule Indexer.Fetcher.Optimism do non_neg_integer() ) :: {:ok, list()} | {:error, term()} def get_logs(from_block, to_block, address, topic0, json_rpc_named_arguments, retries) do + # TODO: use the function from the Indexer.Helper module processed_from_block = if is_integer(from_block), do: integer_to_quantity(from_block), else: from_block processed_to_block = if is_integer(to_block), do: integer_to_quantity(to_block), else: to_block diff --git a/apps/indexer/lib/indexer/fetcher/polygon_edge.ex b/apps/indexer/lib/indexer/fetcher/polygon_edge.ex index 00196467bac8..73a42f83ac9e 100644 --- a/apps/indexer/lib/indexer/fetcher/polygon_edge.ex +++ b/apps/indexer/lib/indexer/fetcher/polygon_edge.ex @@ -516,6 +516,7 @@ defmodule Indexer.Fetcher.PolygonEdge do non_neg_integer() ) :: {:ok, list()} | {:error, term()} def get_logs(from_block, to_block, address, topic0, json_rpc_named_arguments, retries) do + # TODO: use the function from the Indexer.Helper module processed_from_block = if is_integer(from_block), do: integer_to_quantity(from_block), else: from_block processed_to_block = if is_integer(to_block), do: integer_to_quantity(to_block), else: to_block diff --git a/apps/indexer/lib/indexer/fetcher/polygon_zkevm/bridge.ex b/apps/indexer/lib/indexer/fetcher/polygon_zkevm/bridge.ex index 17c77dc98b5b..98c8ef53b7f0 100644 --- a/apps/indexer/lib/indexer/fetcher/polygon_zkevm/bridge.ex +++ b/apps/indexer/lib/indexer/fetcher/polygon_zkevm/bridge.ex @@ -21,8 +21,7 @@ defmodule Indexer.Fetcher.PolygonZkevm.Bridge do alias EthereumJSONRPC.Logs alias Explorer.Chain alias Explorer.Chain.PolygonZkevm.Reader - alias Explorer.SmartContract.Reader, as: SmartContractReader - alias Indexer.Helper + alias Indexer.Helper, as: IndexerHelper alias Indexer.Transform.Addresses # 32-byte signature of the event BridgeEvent(uint8 leafType, uint32 originNetwork, address originAddress, uint32 destinationNetwork, address destinationAddress, uint256 amount, bytes metadata, uint32 depositCount) @@ -68,8 +67,11 @@ defmodule Indexer.Fetcher.PolygonZkevm.Bridge do @spec filter_bridge_events(list(), binary()) :: list() def filter_bridge_events(events, bridge_contract) do Enum.filter(events, fn event -> - Helper.address_hash_to_string(event.address_hash, true) == bridge_contract and - Enum.member?([@bridge_event, @claim_event_v1, @claim_event_v2], Helper.log_topic_to_string(event.first_topic)) + IndexerHelper.address_hash_to_string(event.address_hash, true) == bridge_contract and + Enum.member?( + [@bridge_event, @claim_event_v1, @claim_event_v2], + IndexerHelper.log_topic_to_string(event.first_topic) + ) end) end @@ -111,7 +113,7 @@ defmodule Indexer.Fetcher.PolygonZkevm.Bridge do error_message = &"Cannot fetch logs for the block range #{from_block}..#{to_block}. Error: #{inspect(&1)}" - Helper.repeated_call(&json_rpc/2, [req, json_rpc_named_arguments], error_message, retries) + IndexerHelper.repeated_call(&json_rpc/2, [req, json_rpc_named_arguments], error_message, retries) end @doc """ @@ -239,7 +241,7 @@ defmodule Indexer.Fetcher.PolygonZkevm.Bridge do defp blocks_to_timestamps(events, json_rpc_named_arguments) do events - |> Helper.get_blocks_by_events(json_rpc_named_arguments, 100_000_000) + |> IndexerHelper.get_blocks_by_events(json_rpc_named_arguments, 100_000_000) |> Enum.reduce(%{}, fn block, acc -> block_number = quantity_to_integer(Map.get(block, "number")) timestamp = timestamp_to_datetime(Map.get(block, "timestamp")) @@ -384,14 +386,16 @@ defmodule Indexer.Fetcher.PolygonZkevm.Bridge do tokens_not_inserted = tokens_to_insert |> Enum.reject(fn token -> - Enum.any?(tokens_inserted, fn inserted -> token.address == Helper.address_hash_to_string(inserted.address) end) + Enum.any?(tokens_inserted, fn inserted -> + token.address == IndexerHelper.address_hash_to_string(inserted.address) + end) end) |> Enum.map(& &1.address) tokens_inserted_outside = Reader.token_addresses_to_ids_from_db(tokens_not_inserted) tokens_inserted - |> Enum.reduce(%{}, fn t, acc -> Map.put(acc, Helper.address_hash_to_string(t.address), t.id) end) + |> Enum.reduce(%{}, fn t, acc -> Map.put(acc, IndexerHelper.address_hash_to_string(t.address), t.id) end) |> Map.merge(tokens_existing) |> Map.merge(tokens_inserted_outside) end @@ -429,7 +433,7 @@ defmodule Indexer.Fetcher.PolygonZkevm.Bridge do if status == :ok do response = parse_response(response) - address = Helper.address_hash_to_string(request.contract_address, true) + address = IndexerHelper.address_hash_to_string(request.contract_address, true) new_data = get_new_data(token_data_acc[address] || %{}, request, response) @@ -455,7 +459,8 @@ defmodule Indexer.Fetcher.PolygonZkevm.Bridge do end) |> List.flatten() - {responses, error_messages} = read_contracts_with_retries(requests, @erc20_abi, json_rpc_named_arguments, 3) + {responses, error_messages} = + IndexerHelper.read_contracts_with_retries(requests, @erc20_abi, json_rpc_named_arguments, 3) if not Enum.empty?(error_messages) or Enum.count(requests) != Enum.count(responses) do Logger.warning( @@ -466,33 +471,6 @@ defmodule Indexer.Fetcher.PolygonZkevm.Bridge do {requests, responses} end - defp read_contracts_with_retries(requests, abi, json_rpc_named_arguments, retries_left) when retries_left > 0 do - responses = SmartContractReader.query_contracts(requests, abi, json_rpc_named_arguments: json_rpc_named_arguments) - - error_messages = - Enum.reduce(responses, [], fn {status, error_message}, acc -> - acc ++ - if status == :error do - [error_message] - else - [] - end - end) - - if Enum.empty?(error_messages) do - {responses, []} - else - retries_left = retries_left - 1 - - if retries_left == 0 do - {responses, Enum.uniq(error_messages)} - else - :timer.sleep(1000) - read_contracts_with_retries(requests, abi, json_rpc_named_arguments, retries_left) - end - end - end - defp get_new_data(data, request, response) do if atomized_key(request.method_id) == :symbol do Map.put(data, :symbol, response) diff --git a/apps/indexer/lib/indexer/fetcher/zksync/utils/db.ex b/apps/indexer/lib/indexer/fetcher/zksync/utils/db.ex index 12f7e51ba986..64eedeea9671 100644 --- a/apps/indexer/lib/indexer/fetcher/zksync/utils/db.ex +++ b/apps/indexer/lib/indexer/fetcher/zksync/utils/db.ex @@ -135,6 +135,8 @@ defmodule Indexer.Fetcher.ZkSync.Utils.Db do `zksync_lifecycle_l1_transactions` table. """ @spec get_indices_for_l1_transactions(map()) :: any() + # TODO: consider a way to remove duplicate with Arbitrum.Utils.Db + # credo:disable-for-next-line Credo.Check.Design.DuplicatedCode def get_indices_for_l1_transactions(new_l1_txs) when is_map(new_l1_txs) do # Get indices for l1 transactions previously handled diff --git a/apps/indexer/lib/indexer/fetcher/zksync/utils/rpc.ex b/apps/indexer/lib/indexer/fetcher/zksync/utils/rpc.ex index 282d60b35146..f343eb4673c3 100644 --- a/apps/indexer/lib/indexer/fetcher/zksync/utils/rpc.ex +++ b/apps/indexer/lib/indexer/fetcher/zksync/utils/rpc.ex @@ -84,16 +84,16 @@ defmodule Indexer.Fetcher.ZkSync.Utils.Rpc do end end - defp json_txid_to_hash(hash) do + defp json_tx_id_to_hash(hash) do case hash do "0x" <> tx_hash -> tx_hash nil -> @zero_hash end end - defp strhash_to_byteshash(hash) do + defp string_hash_to_bytes_hash(hash) do hash - |> json_txid_to_hash() + |> json_tx_id_to_hash() |> Base.decode16!(case: :mixed) end @@ -139,8 +139,8 @@ defmodule Indexer.Fetcher.ZkSync.Utils.Rpc do case transform_type do :iso8601_to_datetime -> from_iso8601_to_datetime(value_in_json_response) :ts_to_datetime -> from_ts_to_datetime(value_in_json_response) - :str_to_txhash -> json_txid_to_hash(value_in_json_response) - :str_to_byteshash -> strhash_to_byteshash(value_in_json_response) + :str_to_txhash -> json_tx_id_to_hash(value_in_json_response) + :str_to_byteshash -> string_hash_to_bytes_hash(value_in_json_response) _ -> value_in_json_response end ) diff --git a/apps/indexer/lib/indexer/helper.ex b/apps/indexer/lib/indexer/helper.ex index 08552d626795..d79501c2bb79 100644 --- a/apps/indexer/lib/indexer/helper.ex +++ b/apps/indexer/lib/indexer/helper.ex @@ -10,12 +10,14 @@ defmodule Indexer.Helper do fetch_block_number_by_tag: 2, json_rpc: 2, quantity_to_integer: 1, + integer_to_quantity: 1, request: 1 ] alias EthereumJSONRPC.Block.ByNumber - alias EthereumJSONRPC.Blocks + alias EthereumJSONRPC.{Blocks, Transport} alias Explorer.Chain.Hash + alias Explorer.SmartContract.Reader, as: ContractReader @finite_retries_number 3 @infinite_retries_number 100_000_000 @@ -88,7 +90,19 @@ defmodule Indexer.Helper do end end - defp get_safe_block(json_rpc_named_arguments) do + @doc """ + Retrieves the safe block if the endpoint supports such an interface; otherwise, it requests the latest block. + + ## Parameters + - `json_rpc_named_arguments`: Configuration parameters for the JSON RPC connection. + + ## Returns + `{block_num, latest}`: A tuple where + - `block_num` is the safe or latest block number. + - `latest` is a boolean, where `true` indicates that `block_num` is the latest block number fetched using the tag `latest`. + """ + @spec get_safe_block(EthereumJSONRPC.json_rpc_named_arguments()) :: {non_neg_integer(), boolean()} + def get_safe_block(json_rpc_named_arguments) do case get_block_number_by_tag("safe", json_rpc_named_arguments) do {:ok, safe_block} -> {safe_block, false} @@ -154,6 +168,70 @@ defmodule Indexer.Helper do ] end + @doc """ + Retrieves event logs from Ethereum-like blockchains within a specified block + range for a given address and set of topics using JSON-RPC. + + ## Parameters + - `from_block`: The starting block number (integer or hexadecimal string) for the log search. + - `to_block`: The ending block number (integer or hexadecimal string) for the log search. + - `address`: The address of the contract to filter logs from. + - `topics`: List of topics to filter the logs. + - `json_rpc_named_arguments`: Configuration for the JSON-RPC call. + - `id`: (optional) JSON-RPC request identifier, defaults to 0. + - `retries`: (optional) Number of retry attempts if the request fails, defaults to 3. + + ## Returns + - `{:ok, logs}` on successful retrieval of logs. + - `{:error, reason}` if the request fails after all retries. + """ + @spec get_logs( + non_neg_integer() | binary(), + non_neg_integer() | binary(), + binary(), + [binary()], + EthereumJSONRPC.json_rpc_named_arguments() + ) :: {:error, atom() | binary() | map()} | {:ok, any()} + @spec get_logs( + non_neg_integer() | binary(), + non_neg_integer() | binary(), + binary(), + [binary()], + EthereumJSONRPC.json_rpc_named_arguments(), + integer() + ) :: {:error, atom() | binary() | map()} | {:ok, any()} + @spec get_logs( + non_neg_integer() | binary(), + non_neg_integer() | binary(), + binary(), + [binary()], + EthereumJSONRPC.json_rpc_named_arguments(), + integer(), + non_neg_integer() + ) :: {:error, atom() | binary() | map()} | {:ok, any()} + def get_logs(from_block, to_block, address, topics, json_rpc_named_arguments, id \\ 0, retries \\ 3) do + processed_from_block = if is_integer(from_block), do: integer_to_quantity(from_block), else: from_block + processed_to_block = if is_integer(to_block), do: integer_to_quantity(to_block), else: to_block + + req = + request(%{ + id: id, + method: "eth_getLogs", + params: [ + %{ + :fromBlock => processed_from_block, + :toBlock => processed_to_block, + :address => address, + :topics => topics + } + ] + }) + + error_message = &"Cannot fetch logs for the block range #{from_block}..#{to_block}. Error: #{inspect(&1)}" + + repeated_call(&json_rpc/2, [req, json_rpc_named_arguments], error_message, retries) + end + @doc """ Prints a log of progress when handling something splitted to block chunks. """ @@ -204,11 +282,170 @@ defmodule Indexer.Helper do end @doc """ - Calls the given function with the given arguments - until it returns {:ok, any()} or the given attempts number is reached. - Pauses execution between invokes for 3..1200 seconds (depending on the number of retries). + Retrieves decoded results of `eth_call` requests to contracts, with retry logic for handling errors. + + The function attempts the specified number of retries, with a progressive delay between + each retry, for each `eth_call` request. If, after all retries, some requests remain + unsuccessful, it returns a list of unique error messages encountered. + + ## Parameters + - `requests`: A list of `EthereumJSONRPC.Contract.call()` instances describing the parameters + for `eth_call`, including the contract address and method selector. + - `abi`: A list of maps providing the ABI that describes the input parameters and output + format for the contract functions. + - `json_rpc_named_arguments`: Configuration parameters for the JSON RPC connection. + - `retries_left`: The number of retries allowed for any `eth_call` that returns an error. + + ## Returns + - `{responses, errors}` where: + - `responses`: A list of tuples `{status, result}`, where `result` is the decoded response + from the corresponding `eth_call` if `status` is `:ok`, or the error message + if `status` is `:error`. + - `errors`: A list of error messages, if any element in `responses` contains `:error`. """ - @spec repeated_call((... -> any()), list(), (... -> any()), non_neg_integer()) :: + @spec read_contracts_with_retries( + [EthereumJSONRPC.Contract.call()], + [map()], + EthereumJSONRPC.json_rpc_named_arguments(), + integer() + ) :: {[{:ok | :error, any()}], list()} + def read_contracts_with_retries(requests, abi, json_rpc_named_arguments, retries_left) + when is_list(requests) and is_list(abi) and is_integer(retries_left) do + do_read_contracts_with_retries(requests, abi, json_rpc_named_arguments, retries_left, 0) + end + + defp do_read_contracts_with_retries(requests, abi, json_rpc_named_arguments, retries_left, retries_done) do + responses = ContractReader.query_contracts(requests, abi, json_rpc_named_arguments: json_rpc_named_arguments) + + error_messages = + Enum.reduce(responses, [], fn {status, error_message}, acc -> + acc ++ + if status == :error do + [error_message] + else + [] + end + end) + + if error_messages == [] do + {responses, []} + else + retries_left = retries_left - 1 + + if retries_left <= 0 do + {responses, Enum.uniq(error_messages)} + else + Logger.error("#{List.first(error_messages)}. Retrying...") + pause_before_retry(retries_done) + do_read_contracts_with_retries(requests, abi, json_rpc_named_arguments, retries_left, retries_done + 1) + end + end + end + + @doc """ + Executes a batch of RPC calls with retry logic for handling errors. + + This function performs a batch of RPC calls, retrying a specified number of times + with a progressive delay between each attempt up to a maximum (20 minutes). If, + after all retries, some calls remain unsuccessful, it returns the batch responses, + which include the results of successful calls or error descriptions. + + ## Parameters + - `requests`: A list of `Transport.request()` instances describing the RPC calls. + - `json_rpc_named_arguments`: Configuration parameters for the JSON RPC connection. + - `error_message_generator`: A function that generates a string containing the error + message returned by the RPC call. + - `retries_left`: The number of retries allowed for any RPC call that returns an error. + + ## Returns + - `{:ok, responses}`: When all calls are successful, `responses` is a list of standard + JSON responses, each including `id` and `result` fields. + - `{:error, responses}`: When some calls fail, `responses` is a list containing either + standard JSON responses for successful calls (including `id` + and `result` fields) or errors, which may be in an unassured + format. + """ + @spec repeated_batch_rpc_call([Transport.request()], EthereumJSONRPC.json_rpc_named_arguments(), fun(), integer()) :: + {:error, any()} | {:ok, any()} + def repeated_batch_rpc_call(requests, json_rpc_named_arguments, error_message_generator, retries_left) + when is_list(requests) and is_function(error_message_generator) and is_integer(retries_left) do + do_repeated_batch_rpc_call(requests, json_rpc_named_arguments, error_message_generator, retries_left, 0) + end + + # credo:disable-for-next-line Credo.Check.Refactor.CyclomaticComplexity + defp do_repeated_batch_rpc_call( + requests, + json_rpc_named_arguments, + error_message_generator, + retries_left, + retries_done + ) do + case json_rpc(requests, json_rpc_named_arguments) do + {:ok, responses_list} = batch_responses -> + standardized_error = + Enum.reduce_while(responses_list, %{}, fn one_response, acc -> + # credo:disable-for-next-line Credo.Check.Refactor.Nesting + case one_response do + %{error: error_msg_with_code} -> {:halt, error_msg_with_code} + _ -> {:cont, acc} + end + end) + + case standardized_error do + %{code: _, message: error_msg} -> {:error, error_msg, batch_responses} + _ -> {:ok, batch_responses, []} + end + + {:error, message} = err -> + {:error, message, err} + end + |> case do + # credo:disable-for-previous-line Credo.Check.Refactor.PipeChainStart + {:ok, responses, _} -> + responses + + {:error, message, responses_or_error} -> + retries_left = retries_left - 1 + + if retries_left <= 0 do + Logger.error(error_message_generator.(message)) + responses_or_error + else + Logger.error("#{error_message_generator.(message)} Retrying...") + pause_before_retry(retries_done) + + do_repeated_batch_rpc_call( + requests, + json_rpc_named_arguments, + error_message_generator, + retries_left, + retries_done + 1 + ) + end + end + end + + @doc """ + Repeatedly executes a specified function with given arguments until it succeeds + or reaches the limit of retry attempts. It pauses between retries, with the + pause duration increasing progressively up to a maximum (20 minutes). + + The main intent of the function is to robustly handle RPC calls that may fail. + + ## Parameters + - `func`: The function to be called. + - `args`: List of arguments to pass to the function. + - `error_message`: A function that takes an error message and returns a log message. + - `retries_left`: The number of attempts left. + - `retries_done`: (optional) The number of attempts already made, defaults to 0. + + ## Returns + - `{:ok, result}` on success. + - `{:error, reason}` if retries are exhausted without success. + """ + @spec repeated_call(function(), list(), function(), non_neg_integer()) :: + {:ok, any()} | {:error, binary() | atom() | map()} + @spec repeated_call(function(), list(), function(), non_neg_integer(), non_neg_integer()) :: {:ok, any()} | {:error, binary() | atom() | map()} def repeated_call(func, args, error_message, retries_left, retries_done \\ 0) do case apply(func, args) do @@ -223,10 +460,7 @@ defmodule Indexer.Helper do err else Logger.error("#{error_message.(message)} Retrying...") - - # wait up to 20 minutes - :timer.sleep(min(3000 * Integer.pow(2, retries_done), 1_200_000)) - + pause_before_retry(retries_done) repeated_call(func, args, error_message, retries_left, retries_done + 1) end end @@ -307,4 +541,9 @@ defmodule Indexer.Helper do Hash.to_string(topic) end end + + # Pauses the process, incrementally increasing the sleep time up to a maximum of 20 minutes. + defp pause_before_retry(retries_done) do + :timer.sleep(min(3000 * Integer.pow(2, retries_done), 1_200_000)) + end end diff --git a/apps/indexer/lib/indexer/supervisor.ex b/apps/indexer/lib/indexer/supervisor.ex index 8b56c378dd37..ecb32417fbbe 100644 --- a/apps/indexer/lib/indexer/supervisor.ex +++ b/apps/indexer/lib/indexer/supervisor.ex @@ -46,6 +46,9 @@ defmodule Indexer.Supervisor do Withdrawal } + alias Indexer.Fetcher.Arbitrum.RollupMessagesCatchup, as: ArbitrumRollupMessagesCatchup + alias Indexer.Fetcher.Arbitrum.TrackingBatchesStatuses, as: ArbitrumTrackingBatchesStatuses + alias Indexer.Fetcher.Arbitrum.TrackingMessagesOnL1, as: ArbitrumTrackingMessagesOnL1 alias Indexer.Fetcher.ZkSync.BatchesStatusTracker, as: ZkSyncBatchesStatusTracker alias Indexer.Fetcher.ZkSync.TransactionBatch, as: ZkSyncTransactionBatch @@ -177,6 +180,15 @@ defmodule Indexer.Supervisor do configure(Indexer.Fetcher.PolygonZkevm.TransactionBatch.Supervisor, [ [json_rpc_named_arguments: json_rpc_named_arguments, memory_monitor: memory_monitor] ]), + configure(ArbitrumTrackingMessagesOnL1.Supervisor, [ + [json_rpc_named_arguments: json_rpc_named_arguments, memory_monitor: memory_monitor] + ]), + configure(ArbitrumTrackingBatchesStatuses.Supervisor, [ + [json_rpc_named_arguments: json_rpc_named_arguments, memory_monitor: memory_monitor] + ]), + configure(ArbitrumRollupMessagesCatchup.Supervisor, [ + [json_rpc_named_arguments: json_rpc_named_arguments, memory_monitor: memory_monitor] + ]), {Indexer.Fetcher.Beacon.Blob.Supervisor, [[memory_monitor: memory_monitor]]}, # Out-of-band fetchers diff --git a/apps/indexer/lib/indexer/transform/arbitrum/messaging.ex b/apps/indexer/lib/indexer/transform/arbitrum/messaging.ex new file mode 100644 index 000000000000..f33c327082eb --- /dev/null +++ b/apps/indexer/lib/indexer/transform/arbitrum/messaging.ex @@ -0,0 +1,44 @@ +defmodule Indexer.Transform.Arbitrum.Messaging do + @moduledoc """ + Helper functions for transforming data for Arbitrum cross-chain messages. + """ + + alias Indexer.Fetcher.Arbitrum.Messaging, as: ArbitrumMessages + + require Logger + + @doc """ + Parses and combines lists of rollup transactions and logs to identify and process both L1-to-L2 and L2-to-L1 messages. + + This function utilizes two filtering operations: one that identifies L1-to-L2 + message completions from a list of transactions and another that identifies + L2-to-L1 message initiations from a list of logs. Each filter constructs + a detailed message structure for the respective direction. The function then + combines these messages into a single list suitable for database import. + + ## Parameters + - `transactions`: A list of rollup transaction entries to filter for L1-to-L2 messages. + - `logs`: A list of log entries to filter for L2-to-L1 messages. + + ## Returns + - A combined list of detailed message maps from both L1-to-L2 completions and + L2-to-L1 initiations, ready for database import. + """ + @spec parse(list(), list()) :: list() + def parse(transactions, logs) do + prev_metadata = Logger.metadata() + Logger.metadata(fetcher: :arbitrum_bridge_l2) + + l1_to_l2_completion_ops = + transactions + |> ArbitrumMessages.filter_l1_to_l2_messages() + + l2_to_l1_initiating_ops = + logs + |> ArbitrumMessages.filter_l2_to_l1_messages() + + Logger.reset_metadata(prev_metadata) + + l1_to_l2_completion_ops ++ l2_to_l1_initiating_ops + end +end diff --git a/apps/indexer/lib/indexer/transform/transaction_actions.ex b/apps/indexer/lib/indexer/transform/transaction_actions.ex index 44d782e77586..9d525469b6db 100644 --- a/apps/indexer/lib/indexer/transform/transaction_actions.ex +++ b/apps/indexer/lib/indexer/transform/transaction_actions.ex @@ -13,8 +13,7 @@ defmodule Indexer.Transform.TransactionActions do alias Explorer.Chain.Cache.{TransactionActionTokensData, TransactionActionUniswapPools} alias Explorer.Chain.{Address, Hash, Token, TransactionAction} alias Explorer.Repo - alias Explorer.SmartContract.Reader - alias Indexer.Helper + alias Indexer.Helper, as: IndexerHelper @mainnet 1 @goerli 5 @@ -198,7 +197,7 @@ defmodule Indexer.Transform.TransactionActions do @aave_v3_liquidation_call_event ], sanitize_first_topic(log.first_topic) - ) && Helper.address_hash_to_string(log.address_hash, true) == pool_address + ) && IndexerHelper.address_hash_to_string(log.address_hash, true) == pool_address end) end @@ -290,12 +289,12 @@ defmodule Indexer.Transform.TransactionActions do debt_address = log.third_topic - |> Helper.log_topic_to_string() + |> IndexerHelper.log_topic_to_string() |> truncate_address_hash() collateral_address = log.second_topic - |> Helper.log_topic_to_string() + |> IndexerHelper.log_topic_to_string() |> truncate_address_hash() case get_token_data([debt_address, collateral_address]) do @@ -330,7 +329,7 @@ defmodule Indexer.Transform.TransactionActions do when type in ["borrow", "supply", "withdraw", "repay", "flash_loan"] do address = address_topic - |> Helper.log_topic_to_string() + |> IndexerHelper.log_topic_to_string() |> truncate_address_hash() case get_token_data([address]) do @@ -360,7 +359,7 @@ defmodule Indexer.Transform.TransactionActions do defp aave_handle_event(type, log, address_topic, chain_id) when type in ["enable_collateral", "disable_collateral"] do address = address_topic - |> Helper.log_topic_to_string() + |> IndexerHelper.log_topic_to_string() |> truncate_address_hash() case get_token_data([address]) do @@ -415,7 +414,7 @@ defmodule Indexer.Transform.TransactionActions do first_topic ) || (first_topic == @uniswap_v3_transfer_nft_event && - Helper.address_hash_to_string(log.address_hash, true) == uniswap_v3_positions_nft) + IndexerHelper.address_hash_to_string(log.address_hash, true) == uniswap_v3_positions_nft) end) end @@ -424,7 +423,7 @@ defmodule Indexer.Transform.TransactionActions do with false <- first_topic == @uniswap_v3_transfer_nft_event, # check UniswapV3Pool contract is legitimate - pool_address <- Helper.address_hash_to_string(log.address_hash, true), + pool_address <- IndexerHelper.address_hash_to_string(log.address_hash, true), false <- is_nil(legitimate[pool_address]), false <- Enum.empty?(legitimate[pool_address]), # this is legitimate uniswap pool, so handle this event @@ -466,19 +465,19 @@ defmodule Indexer.Transform.TransactionActions do # This is Transfer event for NFT from = log.second_topic - |> Helper.log_topic_to_string() + |> IndexerHelper.log_topic_to_string() |> truncate_address_hash() # credo:disable-for-next-line if from == burn_address_hash_string() do to = log.third_topic - |> Helper.log_topic_to_string() + |> IndexerHelper.log_topic_to_string() |> truncate_address_hash() [token_id] = log.fourth_topic - |> Helper.log_topic_to_string() + |> IndexerHelper.log_topic_to_string() |> decode_data([{:uint, 256}]) mint_nft_ids = Map.put_new(acc, to, %{ids: [], log_index: log.index}) @@ -614,7 +613,7 @@ defmodule Indexer.Transform.TransactionActions do sanitize_first_topic(log.first_topic) != @uniswap_v3_transfer_nft_event end) |> Enum.reduce(addresses_acc, fn log, acc -> - pool_address = Helper.address_hash_to_string(log.address_hash, true) + pool_address = IndexerHelper.address_hash_to_string(log.address_hash, true) Map.put(acc, pool_address, true) end) end) @@ -680,10 +679,14 @@ defmodule Indexer.Transform.TransactionActions do end) |> Enum.map(fn {pool_address, pool} -> token0 = - if Helper.address_correct?(pool.token0), do: String.downcase(pool.token0), else: burn_address_hash_string() + if IndexerHelper.address_correct?(pool.token0), + do: String.downcase(pool.token0), + else: burn_address_hash_string() token1 = - if Helper.address_correct?(pool.token1), do: String.downcase(pool.token1), else: burn_address_hash_string() + if IndexerHelper.address_correct?(pool.token1), + do: String.downcase(pool.token1), + else: burn_address_hash_string() fee = if pool.fee == "", do: 0, else: pool.fee @@ -696,10 +699,7 @@ defmodule Indexer.Transform.TransactionActions do } end) - max_retries = Application.get_env(:explorer, :token_functions_reader_max_retries) - - {responses_get_pool, error_messages} = - read_contracts_with_retries(requests_get_pool, @uniswap_v3_factory_abi, max_retries) + {responses_get_pool, error_messages} = read_contracts(requests_get_pool, @uniswap_v3_factory_abi) if not Enum.empty?(error_messages) or Enum.count(requests_get_pool) != Enum.count(responses_get_pool) do Logger.error( @@ -727,9 +727,7 @@ defmodule Indexer.Transform.TransactionActions do end) |> List.flatten() - max_retries = Application.get_env(:explorer, :token_functions_reader_max_retries) - - {responses, error_messages} = read_contracts_with_retries(requests, @uniswap_v3_pool_abi, max_retries) + {responses, error_messages} = read_contracts(requests, @uniswap_v3_pool_abi) if not Enum.empty?(error_messages) do incorrect_pools = uniswap_get_incorrect_pools(requests, responses) @@ -959,8 +957,7 @@ defmodule Indexer.Transform.TransactionActions do end) |> List.flatten() - max_retries = Application.get_env(:explorer, :token_functions_reader_max_retries) - {responses, error_messages} = read_contracts_with_retries(requests, @erc20_abi, max_retries) + {responses, error_messages} = read_contracts(requests, @erc20_abi) if not Enum.empty?(error_messages) or Enum.count(requests) != Enum.count(responses) do Logger.warning( @@ -976,34 +973,15 @@ defmodule Indexer.Transform.TransactionActions do |> Enum.group_by(& &1.transaction_hash) end - defp read_contracts_with_retries(requests, abi, retries_left) when retries_left > 0 do - responses = Reader.query_contracts(requests, abi) - - error_messages = - Enum.reduce(responses, [], fn {status, error_message}, acc -> - acc ++ - if status == :error do - [error_message] - else - [] - end - end) - - if Enum.empty?(error_messages) do - {responses, []} - else - retries_left = retries_left - 1 + defp read_contracts(requests, abi) do + max_retries = Application.get_env(:explorer, :token_functions_reader_max_retries) + json_rpc_named_arguments = Application.get_env(:explorer, :json_rpc_named_arguments) - if retries_left == 0 do - {responses, Enum.uniq(error_messages)} - else - read_contracts_with_retries(requests, abi, retries_left) - end - end + IndexerHelper.read_contracts_with_retries(requests, abi, json_rpc_named_arguments, max_retries) end defp sanitize_first_topic(first_topic) do - if is_nil(first_topic), do: "", else: String.downcase(Helper.log_topic_to_string(first_topic)) + if is_nil(first_topic), do: "", else: String.downcase(IndexerHelper.log_topic_to_string(first_topic)) end defp truncate_address_hash(nil), do: burn_address_hash_string() diff --git a/config/config_helper.exs b/config/config_helper.exs index 46f37b5ee7a5..bccd722b8b78 100644 --- a/config/config_helper.exs +++ b/config/config_helper.exs @@ -21,6 +21,7 @@ defmodule ConfigHelper do :filecoin -> base_repos ++ [Explorer.Repo.Filecoin] :stability -> base_repos ++ [Explorer.Repo.Stability] :zksync -> base_repos ++ [Explorer.Repo.ZkSync] + :arbitrum -> base_repos ++ [Explorer.Repo.Arbitrum] _ -> base_repos end diff --git a/config/runtime.exs b/config/runtime.exs index bd08ca5b8bfb..a9fb04c4dfba 100644 --- a/config/runtime.exs +++ b/config/runtime.exs @@ -249,7 +249,7 @@ precompiled_config_base_dir = precompiled_config_default_path = case ConfigHelper.chain_type() do - "arbitrum" -> "#{precompiled_config_base_dir}config/assets/precompiles-arbitrum.json" + :arbitrum -> "#{precompiled_config_base_dir}config/assets/precompiles-arbitrum.json" _ -> nil end @@ -852,6 +852,47 @@ config :indexer, Indexer.Fetcher.ZkSync.BatchesStatusTracker, config :indexer, Indexer.Fetcher.ZkSync.BatchesStatusTracker.Supervisor, enabled: ConfigHelper.parse_bool_env_var("INDEXER_ZKSYNC_BATCHES_ENABLED") +config :indexer, Indexer.Fetcher.Arbitrum.Messaging, + arbsys_contract: + ConfigHelper.safe_get_env("INDEXER_ARBITRUM_ARBSYS_CONTRACT", "0x0000000000000000000000000000000000000064") + +config :indexer, Indexer.Fetcher.Arbitrum, + l1_rpc: System.get_env("INDEXER_ARBITRUM_L1_RPC"), + l1_rpc_chunk_size: ConfigHelper.parse_integer_env_var("INDEXER_ARBITRUM_L1_RPC_CHUNK_SIZE", 20), + l1_rpc_block_range: ConfigHelper.parse_integer_env_var("INDEXER_ARBITRUM_L1_RPC_HISTORICAL_BLOCKS_RANGE", 1000), + l1_rollup_address: System.get_env("INDEXER_ARBITRUM_L1_ROLLUP_CONTRACT"), + l1_rollup_init_block: ConfigHelper.parse_integer_env_var("INDEXER_ARBITRUM_L1_ROLLUP_INIT_BLOCK", 1), + l1_start_block: ConfigHelper.parse_integer_env_var("INDEXER_ARBITRUM_L1_COMMON_START_BLOCK", 0), + rollup_chunk_size: ConfigHelper.parse_integer_env_var("INDEXER_ARBITRUM_ROLLUP_CHUNK_SIZE", 20) + +config :indexer, Indexer.Fetcher.Arbitrum.TrackingMessagesOnL1, + recheck_interval: ConfigHelper.parse_time_env_var("INDEXER_ARBITRUM_TRACKING_MESSAGES_ON_L1_RECHECK_INTERVAL", "20s") + +config :indexer, Indexer.Fetcher.Arbitrum.TrackingMessagesOnL1.Supervisor, + enabled: ConfigHelper.parse_bool_env_var("INDEXER_ARBITRUM_BRIDGE_MESSAGES_TRACKING_ENABLED") + +config :indexer, Indexer.Fetcher.Arbitrum.TrackingBatchesStatuses, + recheck_interval: ConfigHelper.parse_time_env_var("INDEXER_ARBITRUM_BATCHES_TRACKING_RECHECK_INTERVAL", "20s"), + track_l1_tx_finalization: + ConfigHelper.parse_bool_env_var("INDEXER_ARBITRUM_BATCHES_TRACKING_L1_FINALIZATION_CHECK_ENABLED", "false"), + messages_to_blocks_shift: + ConfigHelper.parse_integer_env_var("INDEXER_ARBITRUM_BATCHES_TRACKING_MESSAGES_TO_BLOCKS_SHIFT", 0), + finalized_confirmations: ConfigHelper.parse_bool_env_var("INDEXER_ARBITRUM_CONFIRMATIONS_TRACKING_FINALIZED", "true"), + new_batches_limit: ConfigHelper.parse_integer_env_var("INDEXER_ARBITRUM_NEW_BATCHES_LIMIT", 10) + +config :indexer, Indexer.Fetcher.Arbitrum.TrackingBatchesStatuses.Supervisor, + enabled: ConfigHelper.parse_bool_env_var("INDEXER_ARBITRUM_BATCHES_TRACKING_ENABLED") + +config :indexer, Indexer.Fetcher.Arbitrum.RollupMessagesCatchup, + recheck_interval: ConfigHelper.parse_time_env_var("INDEXER_ARBITRUM_MISSED_MESSAGES_RECHECK_INTERVAL", "1h"), + messages_to_l2_blocks_depth: + ConfigHelper.parse_integer_env_var("INDEXER_ARBITRUM_MISSED_MESSAGES_TO_L2_BLOCK_DEPTH", 50), + messages_to_l1_blocks_depth: + ConfigHelper.parse_integer_env_var("INDEXER_ARBITRUM_MISSED_MESSAGES_TO_L1_BLOCK_DEPTH", 1000) + +config :indexer, Indexer.Fetcher.Arbitrum.RollupMessagesCatchup.Supervisor, + enabled: ConfigHelper.parse_bool_env_var("INDEXER_ARBITRUM_BRIDGE_MESSAGES_TRACKING_ENABLED") + config :indexer, Indexer.Fetcher.RootstockData.Supervisor, disabled?: ConfigHelper.chain_type() != :rsk || ConfigHelper.parse_bool_env_var("INDEXER_DISABLE_ROOTSTOCK_DATA_FETCHER") diff --git a/config/runtime/dev.exs b/config/runtime/dev.exs index cb1aa7d3cbea..2c831a5f36e4 100644 --- a/config/runtime/dev.exs +++ b/config/runtime/dev.exs @@ -156,6 +156,15 @@ config :explorer, Explorer.Repo.Filecoin, url: System.get_env("DATABASE_URL"), pool_size: 1 +# Configure Arbitrum database +config :explorer, Explorer.Repo.Arbitrum, + database: database, + hostname: hostname, + url: System.get_env("DATABASE_URL"), + # actually this repo is not started, and its pool size remains unused. + # separating repos for different CHAIN_TYPE is implemented only for the sake of keeping DB schema update relevant to the current chain type + pool_size: 1 + # Configures Stability database config :explorer, Explorer.Repo.Stability, database: database, diff --git a/config/runtime/prod.exs b/config/runtime/prod.exs index 899a772783f1..eb54b61f33be 100644 --- a/config/runtime/prod.exs +++ b/config/runtime/prod.exs @@ -121,6 +121,14 @@ config :explorer, Explorer.Repo.Filecoin, pool_size: 1, ssl: ExplorerConfigHelper.ssl_enabled?() +# Configures Arbitrum database +config :explorer, Explorer.Repo.Arbitrum, + url: System.get_env("DATABASE_URL"), + # actually this repo is not started, and its pool size remains unused. + # separating repos for different CHAIN_TYPE is implemented only for the sake of keeping DB schema update relevant to the current chain type + pool_size: 1, + ssl: ExplorerConfigHelper.ssl_enabled?() + # Configures Stability database config :explorer, Explorer.Repo.Stability, url: System.get_env("DATABASE_URL"), diff --git a/cspell.json b/cspell.json index 50353346b5a0..0f0cf285ed89 100644 --- a/cspell.json +++ b/cspell.json @@ -13,7 +13,6 @@ "AIRTABLE", "ARGMAX", "Aiubo", - "Arbitrum", "Asfpp", "Asfpp", "Autodetection", @@ -114,6 +113,10 @@ "alloc", "amzootyukbugmx", "apikey", + "APIV", + "Arbitrum", + "arbsys", + "ARGMAX", "arounds", "asda", "atoken", @@ -136,6 +139,7 @@ "bizbuz", "blockheight", "blockless", + "blocknum", "blockno", "blockreward", "blockscout", @@ -155,6 +159,7 @@ "cacerts", "callcode", "calltracer", + "callvalue", "capturelog", "cattributes", "cellspacing", @@ -192,6 +197,8 @@ "contractname", "cooldown", "cooltesthost", + "crosschain", + "crosslevel", "crossorigin", "CRYPTOCOMPARE", "ctbs", @@ -459,8 +466,10 @@ "reqs", "rerequest", "reshows", + "retcode", "retryable", "returnaddress", + "retval", "reuseaddr", "rollup", "rollups", @@ -554,6 +563,7 @@ "unclosable", "unfetched", "unfinalized", + "unindexed", "unknownc", "unknowne", "unmarshal", @@ -593,6 +603,7 @@ "xbaddress", "xdai", "xffff", + "xlevel", "xlink", "xmark", "xmlhttprequest", From 5a568f034e8bfcafd39ecbe3c8df5bfc675f71b4 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 27 May 2024 12:53:41 +0300 Subject: [PATCH 036/150] --- (#10096) updated-dependencies: - dependency-name: phoenix_ecto dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- mix.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/mix.lock b/mix.lock index d5db0e1a8b52..e6e73650f6dc 100644 --- a/mix.lock +++ b/mix.lock @@ -100,11 +100,11 @@ "parallel_stream": {:hex, :parallel_stream, "1.1.0", "f52f73eb344bc22de335992377413138405796e0d0ad99d995d9977ac29f1ca9", [:mix], [], "hexpm", "684fd19191aedfaf387bbabbeb8ff3c752f0220c8112eb907d797f4592d6e871"}, "parse_trans": {:hex, :parse_trans, "3.4.1", "6e6aa8167cb44cc8f39441d05193be6e6f4e7c2946cb2759f015f8c56b76e5ff", [:rebar3], [], "hexpm", "620a406ce75dada827b82e453c19cf06776be266f5a67cff34e1ef2cbb60e49a"}, "phoenix": {:hex, :phoenix, "1.5.14", "2d5db884be496eefa5157505ec0134e66187cb416c072272420c5509d67bf808", [:mix], [{:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:phoenix_html, "~> 2.13 or ~> 3.0", [hex: :phoenix_html, repo: "hexpm", optional: true]}, {:phoenix_pubsub, "~> 2.0", [hex: :phoenix_pubsub, repo: "hexpm", optional: false]}, {:plug, "~> 1.10", [hex: :plug, repo: "hexpm", optional: false]}, {:plug_cowboy, "~> 1.0 or ~> 2.2", [hex: :plug_cowboy, repo: "hexpm", optional: true]}, {:plug_crypto, "~> 1.1.2 or ~> 1.2", [hex: :plug_crypto, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "207f1aa5520320cbb7940d7ff2dde2342162cf513875848f88249ea0ba02fef7"}, - "phoenix_ecto": {:hex, :phoenix_ecto, "4.5.1", "6fdbc334ea53620e71655664df6f33f670747b3a7a6c4041cdda3e2c32df6257", [:mix], [{:ecto, "~> 3.5", [hex: :ecto, repo: "hexpm", optional: false]}, {:phoenix_html, "~> 2.14.2 or ~> 3.0 or ~> 4.1", [hex: :phoenix_html, repo: "hexpm", optional: true]}, {:plug, "~> 1.9", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "ebe43aa580db129e54408e719fb9659b7f9e0d52b965c5be26cdca416ecead28"}, + "phoenix_ecto": {:hex, :phoenix_ecto, "4.6.1", "96798325fab2fed5a824ca204e877b81f9afd2e480f581e81f7b4b64a5a477f2", [:mix], [{:ecto, "~> 3.5", [hex: :ecto, repo: "hexpm", optional: false]}, {:phoenix_html, "~> 2.14.2 or ~> 3.0 or ~> 4.1", [hex: :phoenix_html, repo: "hexpm", optional: true]}, {:plug, "~> 1.9", [hex: :plug, repo: "hexpm", optional: false]}, {:postgrex, "~> 0.17", [hex: :postgrex, repo: "hexpm", optional: true]}], "hexpm", "0ae544ff99f3c482b0807c5cec2c8289e810ecacabc04959d82c3337f4703391"}, "phoenix_html": {:hex, :phoenix_html, "3.0.4", "232d41884fe6a9c42d09f48397c175cd6f0d443aaa34c7424da47604201df2e1", [:mix], [{:plug, "~> 1.5", [hex: :plug, repo: "hexpm", optional: true]}], "hexpm", "ce17fd3cf815b2ed874114073e743507704b1f5288bb03c304a77458485efc8b"}, "phoenix_live_reload": {:hex, :phoenix_live_reload, "1.3.3", "3a53772a6118d5679bf50fc1670505a290e32a1d195df9e069d8c53ab040c054", [:mix], [{:file_system, "~> 0.2.1 or ~> 0.3", [hex: :file_system, repo: "hexpm", optional: false]}, {:phoenix, "~> 1.4", [hex: :phoenix, repo: "hexpm", optional: false]}], "hexpm", "766796676e5f558dbae5d1bdb066849673e956005e3730dfd5affd7a6da4abac"}, "phoenix_pubsub": {:hex, :phoenix_pubsub, "2.1.3", "3168d78ba41835aecad272d5e8cd51aa87a7ac9eb836eabc42f6e57538e3731d", [:mix], [], "hexpm", "bba06bc1dcfd8cb086759f0edc94a8ba2bc8896d5331a1e2c2902bf8e36ee502"}, - "plug": {:hex, :plug, "1.15.3", "712976f504418f6dff0a3e554c40d705a9bcf89a7ccef92fc6a5ef8f16a30a97", [:mix], [{:mime, "~> 1.0 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:plug_crypto, "~> 1.1.1 or ~> 1.2 or ~> 2.0", [hex: :plug_crypto, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4.3 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "cc4365a3c010a56af402e0809208873d113e9c38c401cabd88027ef4f5c01fd2"}, + "plug": {:hex, :plug, "1.16.0", "1d07d50cb9bb05097fdf187b31cf087c7297aafc3fed8299aac79c128a707e47", [:mix], [{:mime, "~> 1.0 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:plug_crypto, "~> 1.1.1 or ~> 1.2 or ~> 2.0", [hex: :plug_crypto, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4.3 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "cbf53aa1f5c4d758a7559c0bd6d59e286c2be0c6a1fac8cc3eee2f638243b93e"}, "plug_cowboy": {:hex, :plug_cowboy, "2.7.1", "87677ffe3b765bc96a89be7960f81703223fe2e21efa42c125fcd0127dd9d6b2", [:mix], [{:cowboy, "~> 2.7", [hex: :cowboy, repo: "hexpm", optional: false]}, {:cowboy_telemetry, "~> 0.3", [hex: :cowboy_telemetry, repo: "hexpm", optional: false]}, {:plug, "~> 1.14", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "02dbd5f9ab571b864ae39418db7811618506256f6d13b4a45037e5fe78dc5de3"}, "plug_crypto": {:hex, :plug_crypto, "1.2.5", "918772575e48e81e455818229bf719d4ab4181fcbf7f85b68a35620f78d89ced", [:mix], [], "hexpm", "26549a1d6345e2172eb1c233866756ae44a9609bd33ee6f99147ab3fd87fd842"}, "poison": {:hex, :poison, "4.0.1", "bcb755a16fac91cad79bfe9fc3585bb07b9331e50cfe3420a24bcc2d735709ae", [:mix], [], "hexpm", "ba8836feea4b394bb718a161fc59a288fe0109b5006d6bdf97b6badfcf6f0f25"}, From 5c4cb10f4c1d81336e245ad4e459f0a93169776f Mon Sep 17 00:00:00 2001 From: Alexander Kolotov Date: Mon, 27 May 2024 21:49:14 +0300 Subject: [PATCH 037/150] hide chain specific fields behind Map.get (#10131) --- .../views/api/v2/arbitrum_view.ex | 28 +++++++++++-------- 1 file changed, 17 insertions(+), 11 deletions(-) diff --git a/apps/block_scout_web/lib/block_scout_web/views/api/v2/arbitrum_view.ex b/apps/block_scout_web/lib/block_scout_web/views/api/v2/arbitrum_view.ex index ffc9c745aa84..5a4458402b37 100644 --- a/apps/block_scout_web/lib/block_scout_web/views/api/v2/arbitrum_view.ex +++ b/apps/block_scout_web/lib/block_scout_web/views/api/v2/arbitrum_view.ex @@ -366,8 +366,8 @@ defmodule BlockScoutWeb.API.V2.ArbitrumView do }) :: map() defp extend_if_message(arbitrum_json, %Transaction{} = arbitrum_tx) do message_type = - case {APIV2Helper.specified?(arbitrum_tx.arbitrum_message_to_l2), - APIV2Helper.specified?(arbitrum_tx.arbitrum_message_from_l2)} do + case {APIV2Helper.specified?(Map.get(arbitrum_tx, :arbitrum_message_to_l2)), + APIV2Helper.specified?(Map.get(arbitrum_tx, :arbitrum_message_from_l2))} do {true, false} -> "incoming" {false, true} -> "outcoming" _ -> nil @@ -385,22 +385,28 @@ defmodule BlockScoutWeb.API.V2.ArbitrumView do optional(any()) => any() }) :: map() defp extend_with_transaction_info(out_json, %Transaction{} = arbitrum_tx) do + # These checks are only needed for the case when the module is compiled with + # chain_type different from "arbitrum" + gas_used_for_l1 = Map.get(arbitrum_tx, :gas_used_for_l1, 0) + gas_used = Map.get(arbitrum_tx, :gas_used, 0) + gas_price = Map.get(arbitrum_tx, :gas_price, 0) + gas_used_for_l2 = - arbitrum_tx.gas_used - |> Decimal.sub(arbitrum_tx.gas_used_for_l1) + gas_used + |> Decimal.sub(gas_used_for_l1) poster_fee = - arbitrum_tx.gas_price + gas_price |> Wei.to(:wei) - |> Decimal.mult(arbitrum_tx.gas_used_for_l1) + |> Decimal.mult(gas_used_for_l1) network_fee = - arbitrum_tx.gas_price + gas_price |> Wei.to(:wei) |> Decimal.mult(gas_used_for_l2) out_json - |> Map.put("gas_used_for_l1", arbitrum_tx.gas_used_for_l1) + |> Map.put("gas_used_for_l1", gas_used_for_l1) |> Map.put("gas_used_for_l2", gas_used_for_l2) |> Map.put("poster_fee", poster_fee) |> Map.put("network_fee", network_fee) @@ -418,8 +424,8 @@ defmodule BlockScoutWeb.API.V2.ArbitrumView do defp extend_with_block_info(out_json, %Block{} = arbitrum_block) do out_json |> Map.put("delayed_messages", Hash.to_integer(arbitrum_block.nonce)) - |> Map.put("l1_block_height", arbitrum_block.l1_block_number) - |> Map.put("send_count", arbitrum_block.send_count) - |> Map.put("send_root", arbitrum_block.send_root) + |> Map.put("l1_block_height", Map.get(arbitrum_block, :l1_block_number)) + |> Map.put("send_count", Map.get(arbitrum_block, :send_count)) + |> Map.put("send_root", Map.get(arbitrum_block, :send_root)) end end From dc44b18030ba9babb47286abce2c7966af4c10f1 Mon Sep 17 00:00:00 2001 From: varasev <33550681+varasev@users.noreply.github.com> Date: Tue, 28 May 2024 08:12:23 +0300 Subject: [PATCH 038/150] fix: Hotfix for Indexer.Fetcher.Optimism.WithdrawalEvent and EthereumJSONRPC.Receipt (#10130) * fix: Hotfix for Indexer.Fetcher.Optimism.WithdrawalEvent * Small refactoring * Small refactoring * Consider nil in game_index * Add empty handler for Optimism Fjord tx receipt fields --------- Co-authored-by: POA <33550681+poa@users.noreply.github.com> --- .../lib/ethereum_jsonrpc/receipt.ex | 3 ++- .../indexer/fetcher/optimism/withdrawal_event.ex | 14 +++++++++++++- 2 files changed, 15 insertions(+), 2 deletions(-) diff --git a/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/receipt.ex b/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/receipt.ex index caa06a212a63..5e1e5f09d68a 100644 --- a/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/receipt.ex +++ b/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/receipt.ex @@ -369,7 +369,8 @@ defmodule EthereumJSONRPC.Receipt do defp entry_to_elixir({key, quantity}) when key in ~w(blockNumber cumulativeGasUsed gasUsed transactionIndex blobGasUsed - blobGasPrice l1Fee l1GasPrice l1GasUsed effectiveGasPrice gasUsedForL1) do + blobGasPrice l1Fee l1GasPrice l1GasUsed effectiveGasPrice gasUsedForL1 + l1BlobBaseFeeScalar l1BlobBaseFee l1BaseFeeScalar) do result = if is_nil(quantity) do nil diff --git a/apps/indexer/lib/indexer/fetcher/optimism/withdrawal_event.ex b/apps/indexer/lib/indexer/fetcher/optimism/withdrawal_event.ex index e2c107bbac29..85020312d712 100644 --- a/apps/indexer/lib/indexer/fetcher/optimism/withdrawal_event.ex +++ b/apps/indexer/lib/indexer/fetcher/optimism/withdrawal_event.ex @@ -207,7 +207,8 @@ defmodule Indexer.Fetcher.Optimism.WithdrawalEvent do Map.put(acc, block_number, timestamp) end) - Enum.map(events, fn event -> + events + |> Enum.map(fn event -> tx_hash = event["transactionHash"] {l1_event_type, game_index} = @@ -233,6 +234,17 @@ defmodule Indexer.Fetcher.Optimism.WithdrawalEvent do game_index: game_index } end) + |> Enum.reduce(%{}, fn e, acc -> + key = {e.withdrawal_hash, e.l1_event_type} + prev_game_index = Map.get(acc, key, %{game_index: 0}).game_index + + if prev_game_index < e.game_index or is_nil(prev_game_index) do + Map.put(acc, key, e) + else + acc + end + end) + |> Map.values() end def get_last_l1_item do From f0f83d23d17a93aed94d66864d1dd40086ace001 Mon Sep 17 00:00:00 2001 From: Kirill Fedoseev Date: Tue, 28 May 2024 11:53:51 +0200 Subject: [PATCH 039/150] fix: missing nil case for revert reason (#10136) --- apps/explorer/lib/explorer/chain/transaction.ex | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/apps/explorer/lib/explorer/chain/transaction.ex b/apps/explorer/lib/explorer/chain/transaction.ex index d03c39db60c2..70355c66b8a7 100644 --- a/apps/explorer/lib/explorer/chain/transaction.ex +++ b/apps/explorer/lib/explorer/chain/transaction.ex @@ -654,16 +654,16 @@ defmodule Explorer.Chain.Transaction do end def decoded_revert_reason(transaction, revert_reason, options \\ []) do - hex = - case revert_reason do - "0x" <> hex_part -> - hex_part + case revert_reason do + nil -> + nil - hex -> - hex - end + "0x" <> hex_part -> + process_hex_revert_reason(hex_part, transaction, options) - process_hex_revert_reason(hex, transaction, options) + hex -> + process_hex_revert_reason(hex, transaction, options) + end end @default_error_abi [ From 67a966833df0d429e1c1d4b652b05ae2e689846b Mon Sep 17 00:00:00 2001 From: Victor Baranov Date: Wed, 29 May 2024 12:42:06 +0300 Subject: [PATCH 040/150] Explicit message on token balance update error (#10129) --- .../lib/indexer/fetcher/on_demand/token_balance.ex | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/apps/indexer/lib/indexer/fetcher/on_demand/token_balance.ex b/apps/indexer/lib/indexer/fetcher/on_demand/token_balance.ex index a63e82c18578..b9f7e625ee61 100644 --- a/apps/indexer/lib/indexer/fetcher/on_demand/token_balance.ex +++ b/apps/indexer/lib/indexer/fetcher/on_demand/token_balance.ex @@ -194,8 +194,14 @@ defmodule Indexer.Fetcher.OnDemand.TokenBalance do |> Map.put(:value_fetched_at, DateTime.utc_now()) end - defp prepare_updated_balance({{:error, error}, _ctb}, _block_number) do - Logger.warn(fn -> ["Error on updating current token balance: ", inspect(error)] end) + defp prepare_updated_balance({{:error, error}, ctb}, block_number) do + Logger.warn(fn -> + [ + "Error on updating current token #{to_string(ctb.token_contract_address_hash)} balance for address #{to_string(ctb.address_hash)} at block number #{block_number}: ", + inspect(error) + ] + end) + nil end From ba664fa27e96960a590636e59aca2547bb5cd428 Mon Sep 17 00:00:00 2001 From: nikitosing <32202610+nikitosing@users.noreply.github.com> Date: Wed, 29 May 2024 12:58:30 +0300 Subject: [PATCH 041/150] fix: Add missing preloads to tokens endpoints (#10072) * fix: Add healthcheck endpoints for indexer-only setup (#10076) * fix: Update Vyper inner compilers list to support all compilers (#10091) Increases the number of items retrieved for Vyper releases to 100. Allows to get the oldest vyper compilers when using internal verification (i.e., sc_verifier is disabled) When not set, the default number of items retrieved is 30, which is less than total number of currently existing releases (46). This makes the oldest compilers unavailable when using vyper verificaiton method. Should fix the failing `/api/v2/smart-contracts/{address_hash}/verification/via/vyper-code success verification` test * Fix certified flag in the search API v2 endpoint (#10094) * Update CHANGELOG for 6.6.0 * Remove custom release CI for Immutable * feat: implement fetch_first_trace for Geth (#10087) * feat: implement fetch_first_trace for Geth * chore: add missing doc & spec * Update CHANGELOG * Fix GA pre-release && release workflows * Improve response of address API to return multiple implementations for Diamond proxy (#10113) * Update CHANGELOG * Indexer/API separated images for Redstone * chore: Bump ecto_sql from 3.11.1 to 3.11.2 updated-dependencies: - dependency-name: ecto_sql dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> * feat: indexer for cross level messages on Arbitrum (#9312) * Initial version of x-level messages indexer * fixes for cspell and credo * new state of x-level messages * Monitoring of new L1-to-L2 messages on L1 * new batches discovery * fetcher workers in separate modules * proper name * Fix for responses without "id", e.g. "Too Many Requests" * update DB with new batches and corresponding data * update DB with confirmed blocks * fixes for cspell and credo * tracking commitments confirmations for L1 to L2 messages * Proper usign of max function * tracking completion of L2 to L1 messages * catchup historical messages to L2 * incorrect version of committed file * catchup historical messages from L2 and completion of L1-to-L2 messages * historical batches catchup * status for historical l2-to-l1 messages * address matching issue * catchup historical executions of L2-to-L1 messages * db query to find unconfirmed blocks gaps * first changes to catchup historical confirmations * finalized catchup of historical confirmations * 4844 blobs support * fix for the issue with multiple confirmations * limit amount of batches to handle at once * Use latest L1 block by fetchers if start block is not configured * merge issue fix * missed file * historical messages discovery * reduce logs severity * first iteration to improve documentation for new functionality * second iteration to improve documentation for new functionality * third iteration to improve documentation for new functionality * fourth iteration to improve documentation for new functionality * fifth iteration to improve documentation for new functionality * final iteration to improve documentation for new functionality * merge issues addressed * code review issues addressed * code review issues addressed * fix merge issue * raising exception in the case of DB inconsistency * fix formatting issue * termination case for RollupMessagesCatchup * code review comments addressed * code review comments addressed * consistency in primary keys * dialyzer fix * code review comments addressed * missed doc comment * code review comments addressed * updated indices creation as per code review comments * fix merge issue * configuration of intervals as time variables * TODO added to reflect improvement ability * database fields refactoring * association renaming * feat: APIv2 endpoints for Arbitrum messages and batches (#9963) * Arbitrum related info in Transaction and Block views * Views to get info about batches and messages * usage of committed for batches instead of confirmed * merge issues addressed * changes after merge * formatting issue fix * code review comment addressed * associations and fields in api response renamed * format issue addressed * feat: Arbitrum-specific fields in the block and transaction API endpoints (#10067) * Arbitrum related info in Transaction and Block views * Views to get info about batches and messages * usage of committed for batches instead of confirmed * merge issues addressed * changes after merge * formatting issue fix * block and transaction views extended * code review comment addressed * associations and fields in api response renamed * format issue addressed * fix credo issue * fix tests issues * ethereumjsonrpc test fail investigation * test issues fixes * --- (#10096) updated-dependencies: - dependency-name: phoenix_ecto dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> * hide chain specific fields behind Map.get (#10131) * fix: Hotfix for Indexer.Fetcher.Optimism.WithdrawalEvent and EthereumJSONRPC.Receipt (#10130) * fix: Hotfix for Indexer.Fetcher.Optimism.WithdrawalEvent * Small refactoring * Small refactoring * Consider nil in game_index * Add empty handler for Optimism Fjord tx receipt fields --------- Co-authored-by: POA <33550681+poa@users.noreply.github.com> * fix: missing nil case for revert reason (#10136) * fix: Add missing preloads to tokens endpoints --------- Signed-off-by: dependabot[bot] Co-authored-by: Qwerty5Uiop <105209995+Qwerty5Uiop@users.noreply.github.com> Co-authored-by: Rim Rakhimov Co-authored-by: Victor Baranov Co-authored-by: Kirill Fedoseev Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Alexander Kolotov Co-authored-by: varasev <33550681+varasev@users.noreply.github.com> Co-authored-by: POA <33550681+poa@users.noreply.github.com> --- .../lib/block_scout_web/views/api/v2/helper.ex | 5 +++-- .../lib/explorer/chain/address/current_token_balance.ex | 2 +- apps/explorer/lib/explorer/chain/token_transfer.ex | 8 +++++++- 3 files changed, 11 insertions(+), 4 deletions(-) diff --git a/apps/block_scout_web/lib/block_scout_web/views/api/v2/helper.ex b/apps/block_scout_web/lib/block_scout_web/views/api/v2/helper.ex index 2afc67df048a..d2157538a3a5 100644 --- a/apps/block_scout_web/lib/block_scout_web/views/api/v2/helper.ex +++ b/apps/block_scout_web/lib/block_scout_web/views/api/v2/helper.ex @@ -54,7 +54,8 @@ defmodule BlockScoutWeb.API.V2.Helper do """ @spec address_with_info(any(), any()) :: nil | %{optional(<<_::32, _::_*8>>) => any()} def address_with_info(%Address{} = address, _address_hash) do - implementation_names = Implementation.names(address) + smart_contract? = Address.smart_contract?(address) + implementation_names = if smart_contract?, do: Implementation.names(address), else: [] formatted_implementation_names = implementation_names @@ -71,7 +72,7 @@ defmodule BlockScoutWeb.API.V2.Helper do %{ "hash" => Address.checksum(address), - "is_contract" => Address.smart_contract?(address), + "is_contract" => smart_contract?, "name" => address_name(address), # todo: added for backward compatibility, remove when frontend unbound from these props "implementation_name" => implementation_name, diff --git a/apps/explorer/lib/explorer/chain/address/current_token_balance.ex b/apps/explorer/lib/explorer/chain/address/current_token_balance.ex index b248ee4479e2..e2fb6499509a 100644 --- a/apps/explorer/lib/explorer/chain/address/current_token_balance.ex +++ b/apps/explorer/lib/explorer/chain/address/current_token_balance.ex @@ -85,7 +85,7 @@ defmodule Explorer.Chain.Address.CurrentTokenBalance do def token_holders_ordered_by_value(token_contract_address_hash, options \\ []) do token_contract_address_hash |> token_holders_ordered_by_value_query_without_address_preload(options) - |> preload(:address) + |> preload(address: :smart_contract) end @doc """ diff --git a/apps/explorer/lib/explorer/chain/token_transfer.ex b/apps/explorer/lib/explorer/chain/token_transfer.ex index ab732f3a8270..d0a33a1d304b 100644 --- a/apps/explorer/lib/explorer/chain/token_transfer.ex +++ b/apps/explorer/lib/explorer/chain/token_transfer.ex @@ -163,7 +163,13 @@ defmodule Explorer.Chain.TokenTransfer do [] _ -> - preloads = DenormalizationHelper.extend_transaction_preload([:transaction, :token, :from_address, :to_address]) + preloads = + DenormalizationHelper.extend_transaction_preload([ + :transaction, + :token, + [from_address: :smart_contract], + [to_address: :smart_contract] + ]) only_consensus_transfers_query() |> where([tt], tt.token_contract_address_hash == ^token_address_hash and not is_nil(tt.block_number)) From e2692945fc29a97e8f7b4679797a929a86fd0561 Mon Sep 17 00:00:00 2001 From: Qwerty5Uiop <105209995+Qwerty5Uiop@users.noreply.github.com> Date: Wed, 29 May 2024 16:39:58 +0400 Subject: [PATCH 042/150] feat: Add window between balance fetch retries for missing balanceOf tokens (#10142) --- .../import/runner/address/token_balances.ex | 2 +- .../utility/missing_balance_of_token.ex | 40 ++++++++++++---- ...plemented_to_missing_balance_of_tokens.exs | 9 ++++ .../runner/address/token_balances_test.exs | 3 +- .../lib/indexer/fetcher/token_balance.ex | 47 +++++++++++++++---- .../indexer/fetcher/token_balance_test.exs | 35 +++++++++++++- config/runtime.exs | 3 ++ docker-compose/envs/common-blockscout.env | 1 + 8 files changed, 121 insertions(+), 19 deletions(-) create mode 100644 apps/explorer/priv/repo/migrations/20240527152734_add_currently_implemented_to_missing_balance_of_tokens.exs diff --git a/apps/explorer/lib/explorer/chain/import/runner/address/token_balances.ex b/apps/explorer/lib/explorer/chain/import/runner/address/token_balances.ex index e4e20d08fa6f..af07a27626d1 100644 --- a/apps/explorer/lib/explorer/chain/import/runner/address/token_balances.ex +++ b/apps/explorer/lib/explorer/chain/import/runner/address/token_balances.ex @@ -75,7 +75,7 @@ defmodule Explorer.Chain.Import.Runner.Address.TokenBalances do is_nil(Map.get(balance_params, :value_fetched_at)) or is_nil(Map.get(balance_params, :value)) end) - {:ok, filled_balances ++ MissingBalanceOfToken.filter_token_balances_params(placeholders)} + {:ok, filled_balances ++ MissingBalanceOfToken.filter_token_balances_params(placeholders, false)} end @spec insert(Repo.t(), [map()], %{ diff --git a/apps/explorer/lib/explorer/utility/missing_balance_of_token.ex b/apps/explorer/lib/explorer/utility/missing_balance_of_token.ex index 671de226badc..af1333cc9061 100644 --- a/apps/explorer/lib/explorer/utility/missing_balance_of_token.ex +++ b/apps/explorer/lib/explorer/utility/missing_balance_of_token.ex @@ -12,6 +12,7 @@ defmodule Explorer.Utility.MissingBalanceOfToken do @primary_key false typed_schema "missing_balance_of_tokens" do field(:block_number, :integer) + field(:currently_implemented, :boolean) belongs_to( :token, @@ -28,7 +29,7 @@ defmodule Explorer.Utility.MissingBalanceOfToken do @doc false def changeset(missing_balance_of_token \\ %__MODULE__{}, params) do - cast(missing_balance_of_token, params, [:token_contract_address_hash, :block_number]) + cast(missing_balance_of_token, params, [:token_contract_address_hash, :block_number, :currently_implemented]) end @doc """ @@ -41,23 +42,38 @@ defmodule Explorer.Utility.MissingBalanceOfToken do |> Repo.all() end + @doc """ + Set currently_implemented: true for all provided token contract address hashes + """ + @spec mark_as_implemented([Hash.Address.t()]) :: {non_neg_integer(), nil | [term()]} + def mark_as_implemented(token_contract_address_hashes) do + __MODULE__ + |> where([mbot], mbot.token_contract_address_hash in ^token_contract_address_hashes) + |> Repo.update_all(set: [currently_implemented: true]) + end + @doc """ Filters provided token balances params by presence of record with the same `token_contract_address_hash` and above or equal `block_number` in `missing_balance_of_tokens`. """ - @spec filter_token_balances_params([map()]) :: [map()] - def filter_token_balances_params(params) do + @spec filter_token_balances_params([map()], boolean(), [__MODULE__.t()] | nil) :: [map()] + def filter_token_balances_params(params, use_window?, missing_balance_of_tokens \\ nil) do + existing_missing_balance_of_tokens = missing_balance_of_tokens || fetch_from_params(params) + missing_balance_of_tokens_map = - params - |> Enum.map(& &1.token_contract_address_hash) - |> get_by_hashes() - |> Enum.map(&{to_string(&1.token_contract_address_hash), &1.block_number}) + existing_missing_balance_of_tokens + |> Enum.map( + &{to_string(&1.token_contract_address_hash), + %{block_number: &1.block_number, currently_implemented: &1.currently_implemented}} + ) |> Map.new() Enum.filter(params, fn %{token_contract_address_hash: token_contract_address_hash, block_number: block_number} -> case missing_balance_of_tokens_map[to_string(token_contract_address_hash)] do nil -> true - missing_balance_of_block_number -> block_number > missing_balance_of_block_number + %{block_number: bn, currently_implemented: true} -> block_number > bn + %{block_number: bn} when not use_window? -> block_number > bn + %{block_number: bn} -> block_number > bn + missing_balance_of_window() end end) end @@ -87,6 +103,14 @@ defmodule Explorer.Utility.MissingBalanceOfToken do Repo.insert_all(__MODULE__, params, on_conflict: on_conflict(), conflict_target: :token_contract_address_hash) end + defp fetch_from_params(params) do + params + |> Enum.map(& &1.token_contract_address_hash) + |> get_by_hashes() + end + + defp missing_balance_of_window, do: Application.get_env(:explorer, __MODULE__)[:window_size] + defp on_conflict do from( mbot in __MODULE__, diff --git a/apps/explorer/priv/repo/migrations/20240527152734_add_currently_implemented_to_missing_balance_of_tokens.exs b/apps/explorer/priv/repo/migrations/20240527152734_add_currently_implemented_to_missing_balance_of_tokens.exs new file mode 100644 index 000000000000..84b8e02cf431 --- /dev/null +++ b/apps/explorer/priv/repo/migrations/20240527152734_add_currently_implemented_to_missing_balance_of_tokens.exs @@ -0,0 +1,9 @@ +defmodule Explorer.Repo.Migrations.AddCurrentlyImplementedToMissingBalanceOfTokens do + use Ecto.Migration + + def change do + alter table(:missing_balance_of_tokens) do + add(:currently_implemented, :boolean) + end + end +end diff --git a/apps/explorer/test/explorer/chain/import/runner/address/token_balances_test.exs b/apps/explorer/test/explorer/chain/import/runner/address/token_balances_test.exs index f391aaeb8076..4d7f24eec8d0 100644 --- a/apps/explorer/test/explorer/chain/import/runner/address/token_balances_test.exs +++ b/apps/explorer/test/explorer/chain/import/runner/address/token_balances_test.exs @@ -234,7 +234,8 @@ defmodule Explorer.Chain.Import.Runner.Address.TokenBalancesTest do insert(:missing_balance_of_token, token_contract_address_hash: token_contract_address_hash, - block_number: block_number + block_number: block_number, + currently_implemented: true ) address_hash = address.hash diff --git a/apps/indexer/lib/indexer/fetcher/token_balance.ex b/apps/indexer/lib/indexer/fetcher/token_balance.ex index bfabded15ad0..be3bfafb2eb9 100644 --- a/apps/indexer/lib/indexer/fetcher/token_balance.ex +++ b/apps/indexer/lib/indexer/fetcher/token_balance.ex @@ -96,12 +96,19 @@ defmodule Indexer.Fetcher.TokenBalance do @impl BufferedTask @decorate trace(name: "fetch", resource: "Indexer.Fetcher.TokenBalance.run/2", tracer: Tracer, service: :indexer) def run(entries, _json_rpc_named_arguments) do + params = Enum.map(entries, &format_params/1) + + missing_balance_of_tokens = + params + |> Enum.map(& &1.token_contract_address_hash) + |> Enum.uniq() + |> MissingBalanceOfToken.get_by_hashes() + result = - entries - |> Enum.map(&format_params/1) - |> MissingBalanceOfToken.filter_token_balances_params() + params + |> MissingBalanceOfToken.filter_token_balances_params(true, missing_balance_of_tokens) |> increase_retries_count() - |> fetch_from_blockchain() + |> fetch_from_blockchain(missing_balance_of_tokens) |> import_token_balances() if result == :ok do @@ -111,7 +118,7 @@ defmodule Indexer.Fetcher.TokenBalance do end end - def fetch_from_blockchain(params_list) do + def fetch_from_blockchain(params_list, missing_balance_of_tokens) do retryable_params_list = params_list |> Enum.filter(&(&1.retries_count <= @max_retries)) @@ -130,6 +137,8 @@ defmodule Indexer.Fetcher.TokenBalance do failed_token_balances: failed_token_balances } + handle_success_balances(fetched_token_balances, missing_balance_of_tokens) + if Enum.empty?(failed_token_balances) do {:halt, all_token_balances} else @@ -149,6 +158,23 @@ defmodule Indexer.Fetcher.TokenBalance do fetched_token_balances end + defp handle_success_balances(fetched_token_balances, missing_balance_of_tokens) do + successful_token_hashes = + fetched_token_balances + |> Enum.map(&to_string(&1.token_contract_address_hash)) + |> MapSet.new() + + missing_balance_of_token_hashes = + missing_balance_of_tokens + |> Enum.map(&to_string(&1.token_contract_address_hash)) + |> MapSet.new() + + successful_token_hashes + |> MapSet.intersection(missing_balance_of_token_hashes) + |> MapSet.to_list() + |> MissingBalanceOfToken.mark_as_implemented() + end + defp handle_failed_balances(failed_token_balances) do {missing_balance_of_balances, other_failed_balances} = Enum.split_with(failed_token_balances, fn @@ -158,9 +184,14 @@ defmodule Indexer.Fetcher.TokenBalance do MissingBalanceOfToken.insert_from_params(missing_balance_of_balances) - Enum.each(missing_balance_of_balances, fn balance -> - TokenBalance.delete_placeholders_below(balance.token_contract_address_hash, balance.block_number) - CurrentTokenBalance.delete_placeholders_below(balance.token_contract_address_hash, balance.block_number) + missing_balance_of_balances + |> Enum.group_by(& &1.token_contract_address_hash, & &1.block_number) + |> Enum.map(fn {token_contract_address_hash, block_numbers} -> + {token_contract_address_hash, Enum.max(block_numbers)} + end) + |> Enum.each(fn {token_contract_address_hash, block_number} -> + TokenBalance.delete_placeholders_below(token_contract_address_hash, block_number) + CurrentTokenBalance.delete_placeholders_below(token_contract_address_hash, block_number) end) other_failed_balances diff --git a/apps/indexer/test/indexer/fetcher/token_balance_test.exs b/apps/indexer/test/indexer/fetcher/token_balance_test.exs index 5d33543b75eb..e39cc6e98ae6 100644 --- a/apps/indexer/test/indexer/fetcher/token_balance_test.exs +++ b/apps/indexer/test/indexer/fetcher/token_balance_test.exs @@ -187,7 +187,7 @@ defmodule Indexer.Fetcher.TokenBalanceTest do test "filters out params with tokens that doesn't implement balanceOf function" do address = insert(:address) - missing_balance_of_token = insert(:missing_balance_of_token) + missing_balance_of_token = insert(:missing_balance_of_token, currently_implemented: true) assert TokenBalance.run( [ @@ -200,6 +200,39 @@ defmodule Indexer.Fetcher.TokenBalanceTest do assert Repo.all(Address.TokenBalance) == [] end + test "set currently_implemented: true for missing balanceOf token if balance was successfully fetched" do + address = insert(:address) + missing_balance_of_token = insert(:missing_balance_of_token) + window_size = Application.get_env(:explorer, MissingBalanceOfToken)[:window_size] + + expect( + EthereumJSONRPC.Mox, + :json_rpc, + fn [%{id: id, method: "eth_call", params: [%{data: _, to: _}, _]}], _options -> + {:ok, + [ + %{ + id: id, + jsonrpc: "2.0", + result: "0x00000000000000000000000000000000000000000000d3c21bcecceda1000000" + } + ]} + end + ) + + refute missing_balance_of_token.currently_implemented + + assert TokenBalance.run( + [ + {address.hash.bytes, missing_balance_of_token.token_contract_address_hash.bytes, + missing_balance_of_token.block_number + window_size + 1, "ERC-20", nil, 0} + ], + nil + ) == :ok + + assert %{currently_implemented: true} = Repo.one(MissingBalanceOfToken) + end + test "in case of error deletes token balance placeholders below the given number and inserts new missing balanceOf tokens" do address = insert(:address) %{contract_address_hash: token_contract_address_hash} = insert(:token) diff --git a/config/runtime.exs b/config/runtime.exs index a9fb04c4dfba..c9490d7bccd8 100644 --- a/config/runtime.exs +++ b/config/runtime.exs @@ -575,6 +575,9 @@ config :explorer, Explorer.Chain.BridgedToken, amb_bridge_mediators: System.get_env("BRIDGED_TOKENS_AMB_BRIDGE_MEDIATORS"), foreign_json_rpc: System.get_env("BRIDGED_TOKENS_FOREIGN_JSON_RPC", "") +config :explorer, Explorer.Utility.MissingBalanceOfToken, + window_size: ConfigHelper.parse_integer_env_var("MISSING_BALANCE_OF_TOKENS_WINDOW_SIZE", 100) + ############### ### Indexer ### ############### diff --git a/docker-compose/envs/common-blockscout.env b/docker-compose/envs/common-blockscout.env index 0f3c2a579143..66728d07b52d 100644 --- a/docker-compose/envs/common-blockscout.env +++ b/docker-compose/envs/common-blockscout.env @@ -263,6 +263,7 @@ INDEXER_DISABLE_INTERNAL_TRANSACTIONS_FETCHER=false # TOKEN_ID_MIGRATION_FIRST_BLOCK= # TOKEN_ID_MIGRATION_CONCURRENCY= # TOKEN_ID_MIGRATION_BATCH_SIZE= +# MISSING_BALANCE_OF_TOKENS_WINDOW_SIZE= # INDEXER_INTERNAL_TRANSACTIONS_TRACER_TYPE= # WEBAPP_URL= # API_URL= From 016881a0130c858936414b15f9335ef97b0a7a06 Mon Sep 17 00:00:00 2001 From: sirawt <31649128+MASDXI@users.noreply.github.com> Date: Thu, 30 May 2024 19:46:09 +0700 Subject: [PATCH 043/150] refactor: Remove hardcoded numResults from fetch_pending_transactions_besu (#10117) * update remove fixed value * refactor fetch_pending_transactions_besu * add PR number and revised CHANGLOG.md * remove change in CHANGELOG.md --- .../lib/ethereum_jsonrpc/pending_transaction.ex | 12 +----------- 1 file changed, 1 insertion(+), 11 deletions(-) diff --git a/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/pending_transaction.ex b/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/pending_transaction.ex index cfce452c8647..b797585ec5d3 100644 --- a/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/pending_transaction.ex +++ b/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/pending_transaction.ex @@ -60,18 +60,8 @@ defmodule EthereumJSONRPC.PendingTransaction do @spec fetch_pending_transactions_besu(EthereumJSONRPC.json_rpc_named_arguments()) :: {:ok, [Transaction.params()]} | {:error, reason :: term} def fetch_pending_transactions_besu(json_rpc_named_arguments) do - # `txpool_besuPendingTransactions` required parameter `numResults` for number of maximum pending transaction to return. - # - # TODO: Remove fix value when hyperledger besu client change `numResults` from required to optional parameter. - # Current fix value set to `512` bonsai storage default value is 512. - # to handle pending transaction in Ethereum mainnet require more than 100000. - # reference: - # https://etherscan.io/chart/pendingtx - # https://besu.hyperledger.org/public-networks/reference/cli/options#bonsai-historical-block-limit - # - # https://besu.hyperledger.org/public-networks/reference/api#txpool_besupendingtransactions with {:ok, transactions} <- - %{id: 1, method: "txpool_besuPendingTransactions", params: [512]} + %{id: 1, method: "txpool_besuPendingTransactions", params: []} |> request() |> json_rpc(json_rpc_named_arguments) do transactions_params = From d3e15d337937a23553b1f20008b17d578d603f7e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 3 Jun 2024 11:01:08 +0300 Subject: [PATCH 044/150] chore(deps-dev): bump @babel/core in /apps/block_scout_web/assets (#10172) Bumps [@babel/core](https://github.com/babel/babel/tree/HEAD/packages/babel-core) from 7.24.5 to 7.24.6. - [Release notes](https://github.com/babel/babel/releases) - [Changelog](https://github.com/babel/babel/blob/main/CHANGELOG.md) - [Commits](https://github.com/babel/babel/commits/v7.24.6/packages/babel-core) --- updated-dependencies: - dependency-name: "@babel/core" dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- apps/block_scout_web/assets/package-lock.json | 420 +++++++++--------- apps/block_scout_web/assets/package.json | 2 +- 2 files changed, 210 insertions(+), 212 deletions(-) diff --git a/apps/block_scout_web/assets/package-lock.json b/apps/block_scout_web/assets/package-lock.json index b7f9b31f2aa3..eb83cec1bbe0 100644 --- a/apps/block_scout_web/assets/package-lock.json +++ b/apps/block_scout_web/assets/package-lock.json @@ -71,7 +71,7 @@ "xss": "^1.0.15" }, "devDependencies": { - "@babel/core": "^7.24.5", + "@babel/core": "^7.24.6", "@babel/preset-env": "^7.24.5", "autoprefixer": "^10.4.19", "babel-loader": "^9.1.3", @@ -212,11 +212,11 @@ } }, "node_modules/@babel/code-frame": { - "version": "7.24.2", - "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.24.2.tgz", - "integrity": "sha512-y5+tLQyV8pg3fsiln67BVLD1P13Eg4lh5RW9mF0zUuvLrv9uIQ4MCL+CRT+FTsBlBjcIan6PGsLcBN0m3ClUyQ==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.24.6.tgz", + "integrity": "sha512-ZJhac6FkEd1yhG2AHOmfcXG4ceoLltoCVJjN5XsWN9BifBQr+cHJbWi0h68HZuSORq+3WtJ2z0hwF2NG1b5kcA==", "dependencies": { - "@babel/highlight": "^7.24.2", + "@babel/highlight": "^7.24.6", "picocolors": "^1.0.0" }, "engines": { @@ -224,28 +224,28 @@ } }, "node_modules/@babel/compat-data": { - "version": "7.24.4", - "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.24.4.tgz", - "integrity": "sha512-vg8Gih2MLK+kOkHJp4gBEIkyaIi00jgWot2D9QOmmfLC8jINSOzmCLta6Bvz/JSBCqnegV0L80jhxkol5GWNfQ==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.24.6.tgz", + "integrity": "sha512-aC2DGhBq5eEdyXWqrDInSqQjO0k8xtPRf5YylULqx8MCd6jBtzqfta/3ETMRpuKIc5hyswfO80ObyA1MvkCcUQ==", "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/core": { - "version": "7.24.5", - "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.24.5.tgz", - "integrity": "sha512-tVQRucExLQ02Boi4vdPp49svNGcfL2GhdTCT9aldhXgCJVAI21EtRfBettiuLUwce/7r6bFdgs6JFkcdTiFttA==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.24.6.tgz", + "integrity": "sha512-qAHSfAdVyFmIvl0VHELib8xar7ONuSHrE2hLnsaWkYNTI68dmi1x8GYDhJjMI/e7XWal9QBlZkwbOnkcw7Z8gQ==", "dependencies": { "@ampproject/remapping": "^2.2.0", - "@babel/code-frame": "^7.24.2", - "@babel/generator": "^7.24.5", - "@babel/helper-compilation-targets": "^7.23.6", - "@babel/helper-module-transforms": "^7.24.5", - "@babel/helpers": "^7.24.5", - "@babel/parser": "^7.24.5", - "@babel/template": "^7.24.0", - "@babel/traverse": "^7.24.5", - "@babel/types": "^7.24.5", + "@babel/code-frame": "^7.24.6", + "@babel/generator": "^7.24.6", + "@babel/helper-compilation-targets": "^7.24.6", + "@babel/helper-module-transforms": "^7.24.6", + "@babel/helpers": "^7.24.6", + "@babel/parser": "^7.24.6", + "@babel/template": "^7.24.6", + "@babel/traverse": "^7.24.6", + "@babel/types": "^7.24.6", "convert-source-map": "^2.0.0", "debug": "^4.1.0", "gensync": "^1.0.0-beta.2", @@ -266,11 +266,11 @@ "integrity": "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==" }, "node_modules/@babel/generator": { - "version": "7.24.5", - "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.24.5.tgz", - "integrity": "sha512-x32i4hEXvr+iI0NEoEfDKzlemF8AmtOP8CcrRaEcpzysWuoEb1KknpcvMsHKPONoKZiDuItklgWhB18xEhr9PA==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.24.6.tgz", + "integrity": "sha512-S7m4eNa6YAPJRHmKsLHIDJhNAGNKoWNiWefz1MBbpnt8g9lvMDl1hir4P9bo/57bQEmuwEhnRU/AMWsD0G/Fbg==", "dependencies": { - "@babel/types": "^7.24.5", + "@babel/types": "^7.24.6", "@jridgewell/gen-mapping": "^0.3.5", "@jridgewell/trace-mapping": "^0.3.25", "jsesc": "^2.5.1" @@ -303,12 +303,12 @@ } }, "node_modules/@babel/helper-compilation-targets": { - "version": "7.23.6", - "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.23.6.tgz", - "integrity": "sha512-9JB548GZoQVmzrFgp8o7KxdgkTGm6xs9DW0o/Pim72UDjzr5ObUQ6ZzYPqA+g9OTS2bBQoctLJrky0RDCAWRgQ==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.24.6.tgz", + "integrity": "sha512-VZQ57UsDGlX/5fFA7GkVPplZhHsVc+vuErWgdOiysI9Ksnw0Pbbd6pnPiR/mmJyKHgyIW0c7KT32gmhiF+cirg==", "dependencies": { - "@babel/compat-data": "^7.23.5", - "@babel/helper-validator-option": "^7.23.5", + "@babel/compat-data": "^7.24.6", + "@babel/helper-validator-option": "^7.24.6", "browserslist": "^4.22.2", "lru-cache": "^5.1.1", "semver": "^6.3.1" @@ -387,31 +387,31 @@ } }, "node_modules/@babel/helper-environment-visitor": { - "version": "7.22.20", - "resolved": "https://registry.npmjs.org/@babel/helper-environment-visitor/-/helper-environment-visitor-7.22.20.tgz", - "integrity": "sha512-zfedSIzFhat/gFhWfHtgWvlec0nqB9YEIVrpuwjruLlXfUSnA8cJB0miHKwqDnQ7d32aKo2xt88/xZptwxbfhA==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/helper-environment-visitor/-/helper-environment-visitor-7.24.6.tgz", + "integrity": "sha512-Y50Cg3k0LKLMjxdPjIl40SdJgMB85iXn27Vk/qbHZCFx/o5XO3PSnpi675h1KEmmDb6OFArfd5SCQEQ5Q4H88g==", "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-function-name": { - "version": "7.23.0", - "resolved": "https://registry.npmjs.org/@babel/helper-function-name/-/helper-function-name-7.23.0.tgz", - "integrity": "sha512-OErEqsrxjZTJciZ4Oo+eoZqeW9UIiOcuYKRJA4ZAgV9myA+pOXhhmpfNCKjEH/auVfEYVFJ6y1Tc4r0eIApqiw==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/helper-function-name/-/helper-function-name-7.24.6.tgz", + "integrity": "sha512-xpeLqeeRkbxhnYimfr2PC+iA0Q7ljX/d1eZ9/inYbmfG2jpl8Lu3DyXvpOAnrS5kxkfOWJjioIMQsaMBXFI05w==", "dependencies": { - "@babel/template": "^7.22.15", - "@babel/types": "^7.23.0" + "@babel/template": "^7.24.6", + "@babel/types": "^7.24.6" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-hoist-variables": { - "version": "7.22.5", - "resolved": "https://registry.npmjs.org/@babel/helper-hoist-variables/-/helper-hoist-variables-7.22.5.tgz", - "integrity": "sha512-wGjk9QZVzvknA6yKIUURb8zY3grXCcOZt+/7Wcy8O2uctxhplmUPkOdlgoNhmdVee2c92JXbf1xpMtVNbfoxRw==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/helper-hoist-variables/-/helper-hoist-variables-7.24.6.tgz", + "integrity": "sha512-SF/EMrC3OD7dSta1bLJIlrsVxwtd0UpjRJqLno6125epQMJ/kyFmpTT4pbvPbdQHzCHg+biQ7Syo8lnDtbR+uA==", "dependencies": { - "@babel/types": "^7.22.5" + "@babel/types": "^7.24.6" }, "engines": { "node": ">=6.9.0" @@ -430,26 +430,26 @@ } }, "node_modules/@babel/helper-module-imports": { - "version": "7.24.3", - "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.24.3.tgz", - "integrity": "sha512-viKb0F9f2s0BCS22QSF308z/+1YWKV/76mwt61NBzS5izMzDPwdq1pTrzf+Li3npBWX9KdQbkeCt1jSAM7lZqg==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.24.6.tgz", + "integrity": "sha512-a26dmxFJBF62rRO9mmpgrfTLsAuyHk4e1hKTUkD/fcMfynt8gvEKwQPQDVxWhca8dHoDck+55DFt42zV0QMw5g==", "dependencies": { - "@babel/types": "^7.24.0" + "@babel/types": "^7.24.6" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-module-transforms": { - "version": "7.24.5", - "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.24.5.tgz", - "integrity": "sha512-9GxeY8c2d2mdQUP1Dye0ks3VDyIMS98kt/llQ2nUId8IsWqTF0l1LkSX0/uP7l7MCDrzXS009Hyhe2gzTiGW8A==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.24.6.tgz", + "integrity": "sha512-Y/YMPm83mV2HJTbX1Qh2sjgjqcacvOlhbzdCCsSlblOKjSYmQqEbO6rUniWQyRo9ncyfjT8hnUjlG06RXDEmcA==", "dependencies": { - "@babel/helper-environment-visitor": "^7.22.20", - "@babel/helper-module-imports": "^7.24.3", - "@babel/helper-simple-access": "^7.24.5", - "@babel/helper-split-export-declaration": "^7.24.5", - "@babel/helper-validator-identifier": "^7.24.5" + "@babel/helper-environment-visitor": "^7.24.6", + "@babel/helper-module-imports": "^7.24.6", + "@babel/helper-simple-access": "^7.24.6", + "@babel/helper-split-export-declaration": "^7.24.6", + "@babel/helper-validator-identifier": "^7.24.6" }, "engines": { "node": ">=6.9.0" @@ -513,11 +513,11 @@ } }, "node_modules/@babel/helper-simple-access": { - "version": "7.24.5", - "resolved": "https://registry.npmjs.org/@babel/helper-simple-access/-/helper-simple-access-7.24.5.tgz", - "integrity": "sha512-uH3Hmf5q5n7n8mz7arjUlDOCbttY/DW4DYhE6FUsjKJ/oYC1kQQUvwEQWxRwUpX9qQKRXeqLwWxrqilMrf32sQ==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/helper-simple-access/-/helper-simple-access-7.24.6.tgz", + "integrity": "sha512-nZzcMMD4ZhmB35MOOzQuiGO5RzL6tJbsT37Zx8M5L/i9KSrukGXWTjLe1knIbb/RmxoJE9GON9soq0c0VEMM5g==", "dependencies": { - "@babel/types": "^7.24.5" + "@babel/types": "^7.24.6" }, "engines": { "node": ">=6.9.0" @@ -536,36 +536,36 @@ } }, "node_modules/@babel/helper-split-export-declaration": { - "version": "7.24.5", - "resolved": "https://registry.npmjs.org/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.24.5.tgz", - "integrity": "sha512-5CHncttXohrHk8GWOFCcCl4oRD9fKosWlIRgWm4ql9VYioKm52Mk2xsmoohvm7f3JoiLSM5ZgJuRaf5QZZYd3Q==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.24.6.tgz", + "integrity": "sha512-CvLSkwXGWnYlF9+J3iZUvwgAxKiYzK3BWuo+mLzD/MDGOZDj7Gq8+hqaOkMxmJwmlv0iu86uH5fdADd9Hxkymw==", "dependencies": { - "@babel/types": "^7.24.5" + "@babel/types": "^7.24.6" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-string-parser": { - "version": "7.24.1", - "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.24.1.tgz", - "integrity": "sha512-2ofRCjnnA9y+wk8b9IAREroeUP02KHp431N2mhKniy2yKIDKpbrHv9eXwm8cBeWQYcJmzv5qKCu65P47eCF7CQ==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.24.6.tgz", + "integrity": "sha512-WdJjwMEkmBicq5T9fm/cHND3+UlFa2Yj8ALLgmoSQAJZysYbBjw+azChSGPN4DSPLXOcooGRvDwZWMcF/mLO2Q==", "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-validator-identifier": { - "version": "7.24.5", - "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.24.5.tgz", - "integrity": "sha512-3q93SSKX2TWCG30M2G2kwaKeTYgEUp5Snjuj8qm729SObL6nbtUldAi37qbxkD5gg3xnBio+f9nqpSepGZMvxA==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.24.6.tgz", + "integrity": "sha512-4yA7s865JHaqUdRbnaxarZREuPTHrjpDT+pXoAZ1yhyo6uFnIEpS8VMu16siFOHDpZNKYv5BObhsB//ycbICyw==", "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-validator-option": { - "version": "7.23.5", - "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.23.5.tgz", - "integrity": "sha512-85ttAOMLsr53VgXkTbkx8oA6YTfT4q7/HzXSLEYmjcSTJPMPQtvq1BD79Byep5xMUYbGRzEpDsjUf3dyp54IKw==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.24.6.tgz", + "integrity": "sha512-Jktc8KkF3zIkePb48QO+IapbXlSapOW9S+ogZZkcO6bABgYAxtZcjZ/O005111YLf+j4M84uEgwYoidDkXbCkQ==", "engines": { "node": ">=6.9.0" } @@ -585,24 +585,23 @@ } }, "node_modules/@babel/helpers": { - "version": "7.24.5", - "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.24.5.tgz", - "integrity": "sha512-CiQmBMMpMQHwM5m01YnrM6imUG1ebgYJ+fAIW4FZe6m4qHTPaRHti+R8cggAwkdz4oXhtO4/K9JWlh+8hIfR2Q==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.24.6.tgz", + "integrity": "sha512-V2PI+NqnyFu1i0GyTd/O/cTpxzQCYioSkUIRmgo7gFEHKKCg5w46+r/A6WeUR1+P3TeQ49dspGPNd/E3n9AnnA==", "dependencies": { - "@babel/template": "^7.24.0", - "@babel/traverse": "^7.24.5", - "@babel/types": "^7.24.5" + "@babel/template": "^7.24.6", + "@babel/types": "^7.24.6" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/highlight": { - "version": "7.24.2", - "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.24.2.tgz", - "integrity": "sha512-Yac1ao4flkTxTteCDZLEvdxg2fZfz1v8M4QpaGypq/WPDqg3ijHYbDfs+LG5hvzSoqaSZ9/Z9lKSP3CjZjv+pA==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.24.6.tgz", + "integrity": "sha512-2YnuOp4HAk2BsBrJJvYCbItHx0zWscI1C3zgWkz+wDyD9I7GIVrfnLyrR4Y1VR+7p+chAEcrgRQYZAGIKMV7vQ==", "dependencies": { - "@babel/helper-validator-identifier": "^7.22.20", + "@babel/helper-validator-identifier": "^7.24.6", "chalk": "^2.4.2", "js-tokens": "^4.0.0", "picocolors": "^1.0.0" @@ -612,9 +611,9 @@ } }, "node_modules/@babel/parser": { - "version": "7.24.5", - "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.24.5.tgz", - "integrity": "sha512-EOv5IK8arwh3LI47dz1b0tKUb/1uhHAnHJOrjgtQMIpu1uXd9mlFrJg9IUgGUgZ41Ch0K8REPTYpO7B76b4vJg==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.24.6.tgz", + "integrity": "sha512-eNZXdfU35nJC2h24RznROuOpO94h6x8sg9ju0tT9biNtLZ2vuP8SduLqqV+/8+cebSLV9SJEAN5Z3zQbJG/M+Q==", "bin": { "parser": "bin/babel-parser.js" }, @@ -1946,31 +1945,31 @@ } }, "node_modules/@babel/template": { - "version": "7.24.0", - "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.24.0.tgz", - "integrity": "sha512-Bkf2q8lMB0AFpX0NFEqSbx1OkTHf0f+0j82mkw+ZpzBnkk7e9Ql0891vlfgi+kHwOk8tQjiQHpqh4LaSa0fKEA==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.24.6.tgz", + "integrity": "sha512-3vgazJlLwNXi9jhrR1ef8qiB65L1RK90+lEQwv4OxveHnqC3BfmnHdgySwRLzf6akhlOYenT+b7AfWq+a//AHw==", "dependencies": { - "@babel/code-frame": "^7.23.5", - "@babel/parser": "^7.24.0", - "@babel/types": "^7.24.0" + "@babel/code-frame": "^7.24.6", + "@babel/parser": "^7.24.6", + "@babel/types": "^7.24.6" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/traverse": { - "version": "7.24.5", - "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.24.5.tgz", - "integrity": "sha512-7aaBLeDQ4zYcUFDUD41lJc1fG8+5IU9DaNSJAgal866FGvmD5EbWQgnEC6kO1gGLsX0esNkfnJSndbTXA3r7UA==", - "dependencies": { - "@babel/code-frame": "^7.24.2", - "@babel/generator": "^7.24.5", - "@babel/helper-environment-visitor": "^7.22.20", - "@babel/helper-function-name": "^7.23.0", - "@babel/helper-hoist-variables": "^7.22.5", - "@babel/helper-split-export-declaration": "^7.24.5", - "@babel/parser": "^7.24.5", - "@babel/types": "^7.24.5", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.24.6.tgz", + "integrity": "sha512-OsNjaJwT9Zn8ozxcfoBc+RaHdj3gFmCmYoQLUII1o6ZrUwku0BMg80FoOTPx+Gi6XhcQxAYE4xyjPTo4SxEQqw==", + "dependencies": { + "@babel/code-frame": "^7.24.6", + "@babel/generator": "^7.24.6", + "@babel/helper-environment-visitor": "^7.24.6", + "@babel/helper-function-name": "^7.24.6", + "@babel/helper-hoist-variables": "^7.24.6", + "@babel/helper-split-export-declaration": "^7.24.6", + "@babel/parser": "^7.24.6", + "@babel/types": "^7.24.6", "debug": "^4.3.1", "globals": "^11.1.0" }, @@ -1979,12 +1978,12 @@ } }, "node_modules/@babel/types": { - "version": "7.24.5", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.24.5.tgz", - "integrity": "sha512-6mQNsaLeXTw0nxYUYu+NSa4Hx4BlF1x1x8/PMFbiR+GBSr+2DkECc69b8hgy2frEodNcvPffeH8YfWd3LI6jhQ==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.24.6.tgz", + "integrity": "sha512-WaMsgi6Q8zMgMth93GvWPXkhAIEobfsIkLTacoVZoK1J0CevIPGYY2Vo5YvJGqyHqXM6P4ppOYGsIRU8MM9pFQ==", "dependencies": { - "@babel/helper-string-parser": "^7.24.1", - "@babel/helper-validator-identifier": "^7.24.5", + "@babel/helper-string-parser": "^7.24.6", + "@babel/helper-validator-identifier": "^7.24.6", "to-fast-properties": "^2.0.0" }, "engines": { @@ -18066,34 +18065,34 @@ } }, "@babel/code-frame": { - "version": "7.24.2", - "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.24.2.tgz", - "integrity": "sha512-y5+tLQyV8pg3fsiln67BVLD1P13Eg4lh5RW9mF0zUuvLrv9uIQ4MCL+CRT+FTsBlBjcIan6PGsLcBN0m3ClUyQ==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.24.6.tgz", + "integrity": "sha512-ZJhac6FkEd1yhG2AHOmfcXG4ceoLltoCVJjN5XsWN9BifBQr+cHJbWi0h68HZuSORq+3WtJ2z0hwF2NG1b5kcA==", "requires": { - "@babel/highlight": "^7.24.2", + "@babel/highlight": "^7.24.6", "picocolors": "^1.0.0" } }, "@babel/compat-data": { - "version": "7.24.4", - "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.24.4.tgz", - "integrity": "sha512-vg8Gih2MLK+kOkHJp4gBEIkyaIi00jgWot2D9QOmmfLC8jINSOzmCLta6Bvz/JSBCqnegV0L80jhxkol5GWNfQ==" + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.24.6.tgz", + "integrity": "sha512-aC2DGhBq5eEdyXWqrDInSqQjO0k8xtPRf5YylULqx8MCd6jBtzqfta/3ETMRpuKIc5hyswfO80ObyA1MvkCcUQ==" }, "@babel/core": { - "version": "7.24.5", - "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.24.5.tgz", - "integrity": "sha512-tVQRucExLQ02Boi4vdPp49svNGcfL2GhdTCT9aldhXgCJVAI21EtRfBettiuLUwce/7r6bFdgs6JFkcdTiFttA==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.24.6.tgz", + "integrity": "sha512-qAHSfAdVyFmIvl0VHELib8xar7ONuSHrE2hLnsaWkYNTI68dmi1x8GYDhJjMI/e7XWal9QBlZkwbOnkcw7Z8gQ==", "requires": { "@ampproject/remapping": "^2.2.0", - "@babel/code-frame": "^7.24.2", - "@babel/generator": "^7.24.5", - "@babel/helper-compilation-targets": "^7.23.6", - "@babel/helper-module-transforms": "^7.24.5", - "@babel/helpers": "^7.24.5", - "@babel/parser": "^7.24.5", - "@babel/template": "^7.24.0", - "@babel/traverse": "^7.24.5", - "@babel/types": "^7.24.5", + "@babel/code-frame": "^7.24.6", + "@babel/generator": "^7.24.6", + "@babel/helper-compilation-targets": "^7.24.6", + "@babel/helper-module-transforms": "^7.24.6", + "@babel/helpers": "^7.24.6", + "@babel/parser": "^7.24.6", + "@babel/template": "^7.24.6", + "@babel/traverse": "^7.24.6", + "@babel/types": "^7.24.6", "convert-source-map": "^2.0.0", "debug": "^4.1.0", "gensync": "^1.0.0-beta.2", @@ -18109,11 +18108,11 @@ } }, "@babel/generator": { - "version": "7.24.5", - "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.24.5.tgz", - "integrity": "sha512-x32i4hEXvr+iI0NEoEfDKzlemF8AmtOP8CcrRaEcpzysWuoEb1KknpcvMsHKPONoKZiDuItklgWhB18xEhr9PA==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.24.6.tgz", + "integrity": "sha512-S7m4eNa6YAPJRHmKsLHIDJhNAGNKoWNiWefz1MBbpnt8g9lvMDl1hir4P9bo/57bQEmuwEhnRU/AMWsD0G/Fbg==", "requires": { - "@babel/types": "^7.24.5", + "@babel/types": "^7.24.6", "@jridgewell/gen-mapping": "^0.3.5", "@jridgewell/trace-mapping": "^0.3.25", "jsesc": "^2.5.1" @@ -18137,12 +18136,12 @@ } }, "@babel/helper-compilation-targets": { - "version": "7.23.6", - "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.23.6.tgz", - "integrity": "sha512-9JB548GZoQVmzrFgp8o7KxdgkTGm6xs9DW0o/Pim72UDjzr5ObUQ6ZzYPqA+g9OTS2bBQoctLJrky0RDCAWRgQ==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.24.6.tgz", + "integrity": "sha512-VZQ57UsDGlX/5fFA7GkVPplZhHsVc+vuErWgdOiysI9Ksnw0Pbbd6pnPiR/mmJyKHgyIW0c7KT32gmhiF+cirg==", "requires": { - "@babel/compat-data": "^7.23.5", - "@babel/helper-validator-option": "^7.23.5", + "@babel/compat-data": "^7.24.6", + "@babel/helper-validator-option": "^7.24.6", "browserslist": "^4.22.2", "lru-cache": "^5.1.1", "semver": "^6.3.1" @@ -18205,25 +18204,25 @@ } }, "@babel/helper-environment-visitor": { - "version": "7.22.20", - "resolved": "https://registry.npmjs.org/@babel/helper-environment-visitor/-/helper-environment-visitor-7.22.20.tgz", - "integrity": "sha512-zfedSIzFhat/gFhWfHtgWvlec0nqB9YEIVrpuwjruLlXfUSnA8cJB0miHKwqDnQ7d32aKo2xt88/xZptwxbfhA==" + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/helper-environment-visitor/-/helper-environment-visitor-7.24.6.tgz", + "integrity": "sha512-Y50Cg3k0LKLMjxdPjIl40SdJgMB85iXn27Vk/qbHZCFx/o5XO3PSnpi675h1KEmmDb6OFArfd5SCQEQ5Q4H88g==" }, "@babel/helper-function-name": { - "version": "7.23.0", - "resolved": "https://registry.npmjs.org/@babel/helper-function-name/-/helper-function-name-7.23.0.tgz", - "integrity": "sha512-OErEqsrxjZTJciZ4Oo+eoZqeW9UIiOcuYKRJA4ZAgV9myA+pOXhhmpfNCKjEH/auVfEYVFJ6y1Tc4r0eIApqiw==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/helper-function-name/-/helper-function-name-7.24.6.tgz", + "integrity": "sha512-xpeLqeeRkbxhnYimfr2PC+iA0Q7ljX/d1eZ9/inYbmfG2jpl8Lu3DyXvpOAnrS5kxkfOWJjioIMQsaMBXFI05w==", "requires": { - "@babel/template": "^7.22.15", - "@babel/types": "^7.23.0" + "@babel/template": "^7.24.6", + "@babel/types": "^7.24.6" } }, "@babel/helper-hoist-variables": { - "version": "7.22.5", - "resolved": "https://registry.npmjs.org/@babel/helper-hoist-variables/-/helper-hoist-variables-7.22.5.tgz", - "integrity": "sha512-wGjk9QZVzvknA6yKIUURb8zY3grXCcOZt+/7Wcy8O2uctxhplmUPkOdlgoNhmdVee2c92JXbf1xpMtVNbfoxRw==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/helper-hoist-variables/-/helper-hoist-variables-7.24.6.tgz", + "integrity": "sha512-SF/EMrC3OD7dSta1bLJIlrsVxwtd0UpjRJqLno6125epQMJ/kyFmpTT4pbvPbdQHzCHg+biQ7Syo8lnDtbR+uA==", "requires": { - "@babel/types": "^7.22.5" + "@babel/types": "^7.24.6" } }, "@babel/helper-member-expression-to-functions": { @@ -18236,23 +18235,23 @@ } }, "@babel/helper-module-imports": { - "version": "7.24.3", - "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.24.3.tgz", - "integrity": "sha512-viKb0F9f2s0BCS22QSF308z/+1YWKV/76mwt61NBzS5izMzDPwdq1pTrzf+Li3npBWX9KdQbkeCt1jSAM7lZqg==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.24.6.tgz", + "integrity": "sha512-a26dmxFJBF62rRO9mmpgrfTLsAuyHk4e1hKTUkD/fcMfynt8gvEKwQPQDVxWhca8dHoDck+55DFt42zV0QMw5g==", "requires": { - "@babel/types": "^7.24.0" + "@babel/types": "^7.24.6" } }, "@babel/helper-module-transforms": { - "version": "7.24.5", - "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.24.5.tgz", - "integrity": "sha512-9GxeY8c2d2mdQUP1Dye0ks3VDyIMS98kt/llQ2nUId8IsWqTF0l1LkSX0/uP7l7MCDrzXS009Hyhe2gzTiGW8A==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.24.6.tgz", + "integrity": "sha512-Y/YMPm83mV2HJTbX1Qh2sjgjqcacvOlhbzdCCsSlblOKjSYmQqEbO6rUniWQyRo9ncyfjT8hnUjlG06RXDEmcA==", "requires": { - "@babel/helper-environment-visitor": "^7.22.20", - "@babel/helper-module-imports": "^7.24.3", - "@babel/helper-simple-access": "^7.24.5", - "@babel/helper-split-export-declaration": "^7.24.5", - "@babel/helper-validator-identifier": "^7.24.5" + "@babel/helper-environment-visitor": "^7.24.6", + "@babel/helper-module-imports": "^7.24.6", + "@babel/helper-simple-access": "^7.24.6", + "@babel/helper-split-export-declaration": "^7.24.6", + "@babel/helper-validator-identifier": "^7.24.6" } }, "@babel/helper-optimise-call-expression": { @@ -18292,11 +18291,11 @@ } }, "@babel/helper-simple-access": { - "version": "7.24.5", - "resolved": "https://registry.npmjs.org/@babel/helper-simple-access/-/helper-simple-access-7.24.5.tgz", - "integrity": "sha512-uH3Hmf5q5n7n8mz7arjUlDOCbttY/DW4DYhE6FUsjKJ/oYC1kQQUvwEQWxRwUpX9qQKRXeqLwWxrqilMrf32sQ==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/helper-simple-access/-/helper-simple-access-7.24.6.tgz", + "integrity": "sha512-nZzcMMD4ZhmB35MOOzQuiGO5RzL6tJbsT37Zx8M5L/i9KSrukGXWTjLe1knIbb/RmxoJE9GON9soq0c0VEMM5g==", "requires": { - "@babel/types": "^7.24.5" + "@babel/types": "^7.24.6" } }, "@babel/helper-skip-transparent-expression-wrappers": { @@ -18309,27 +18308,27 @@ } }, "@babel/helper-split-export-declaration": { - "version": "7.24.5", - "resolved": "https://registry.npmjs.org/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.24.5.tgz", - "integrity": "sha512-5CHncttXohrHk8GWOFCcCl4oRD9fKosWlIRgWm4ql9VYioKm52Mk2xsmoohvm7f3JoiLSM5ZgJuRaf5QZZYd3Q==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.24.6.tgz", + "integrity": "sha512-CvLSkwXGWnYlF9+J3iZUvwgAxKiYzK3BWuo+mLzD/MDGOZDj7Gq8+hqaOkMxmJwmlv0iu86uH5fdADd9Hxkymw==", "requires": { - "@babel/types": "^7.24.5" + "@babel/types": "^7.24.6" } }, "@babel/helper-string-parser": { - "version": "7.24.1", - "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.24.1.tgz", - "integrity": "sha512-2ofRCjnnA9y+wk8b9IAREroeUP02KHp431N2mhKniy2yKIDKpbrHv9eXwm8cBeWQYcJmzv5qKCu65P47eCF7CQ==" + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.24.6.tgz", + "integrity": "sha512-WdJjwMEkmBicq5T9fm/cHND3+UlFa2Yj8ALLgmoSQAJZysYbBjw+azChSGPN4DSPLXOcooGRvDwZWMcF/mLO2Q==" }, "@babel/helper-validator-identifier": { - "version": "7.24.5", - "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.24.5.tgz", - "integrity": "sha512-3q93SSKX2TWCG30M2G2kwaKeTYgEUp5Snjuj8qm729SObL6nbtUldAi37qbxkD5gg3xnBio+f9nqpSepGZMvxA==" + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.24.6.tgz", + "integrity": "sha512-4yA7s865JHaqUdRbnaxarZREuPTHrjpDT+pXoAZ1yhyo6uFnIEpS8VMu16siFOHDpZNKYv5BObhsB//ycbICyw==" }, "@babel/helper-validator-option": { - "version": "7.23.5", - "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.23.5.tgz", - "integrity": "sha512-85ttAOMLsr53VgXkTbkx8oA6YTfT4q7/HzXSLEYmjcSTJPMPQtvq1BD79Byep5xMUYbGRzEpDsjUf3dyp54IKw==" + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.24.6.tgz", + "integrity": "sha512-Jktc8KkF3zIkePb48QO+IapbXlSapOW9S+ogZZkcO6bABgYAxtZcjZ/O005111YLf+j4M84uEgwYoidDkXbCkQ==" }, "@babel/helper-wrap-function": { "version": "7.22.20", @@ -18343,30 +18342,29 @@ } }, "@babel/helpers": { - "version": "7.24.5", - "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.24.5.tgz", - "integrity": "sha512-CiQmBMMpMQHwM5m01YnrM6imUG1ebgYJ+fAIW4FZe6m4qHTPaRHti+R8cggAwkdz4oXhtO4/K9JWlh+8hIfR2Q==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.24.6.tgz", + "integrity": "sha512-V2PI+NqnyFu1i0GyTd/O/cTpxzQCYioSkUIRmgo7gFEHKKCg5w46+r/A6WeUR1+P3TeQ49dspGPNd/E3n9AnnA==", "requires": { - "@babel/template": "^7.24.0", - "@babel/traverse": "^7.24.5", - "@babel/types": "^7.24.5" + "@babel/template": "^7.24.6", + "@babel/types": "^7.24.6" } }, "@babel/highlight": { - "version": "7.24.2", - "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.24.2.tgz", - "integrity": "sha512-Yac1ao4flkTxTteCDZLEvdxg2fZfz1v8M4QpaGypq/WPDqg3ijHYbDfs+LG5hvzSoqaSZ9/Z9lKSP3CjZjv+pA==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.24.6.tgz", + "integrity": "sha512-2YnuOp4HAk2BsBrJJvYCbItHx0zWscI1C3zgWkz+wDyD9I7GIVrfnLyrR4Y1VR+7p+chAEcrgRQYZAGIKMV7vQ==", "requires": { - "@babel/helper-validator-identifier": "^7.22.20", + "@babel/helper-validator-identifier": "^7.24.6", "chalk": "^2.4.2", "js-tokens": "^4.0.0", "picocolors": "^1.0.0" } }, "@babel/parser": { - "version": "7.24.5", - "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.24.5.tgz", - "integrity": "sha512-EOv5IK8arwh3LI47dz1b0tKUb/1uhHAnHJOrjgtQMIpu1uXd9mlFrJg9IUgGUgZ41Ch0K8REPTYpO7B76b4vJg==" + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.24.6.tgz", + "integrity": "sha512-eNZXdfU35nJC2h24RznROuOpO94h6x8sg9ju0tT9biNtLZ2vuP8SduLqqV+/8+cebSLV9SJEAN5Z3zQbJG/M+Q==" }, "@babel/plugin-bugfix-firefox-class-in-computed-class-key": { "version": "7.24.5", @@ -19262,39 +19260,39 @@ } }, "@babel/template": { - "version": "7.24.0", - "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.24.0.tgz", - "integrity": "sha512-Bkf2q8lMB0AFpX0NFEqSbx1OkTHf0f+0j82mkw+ZpzBnkk7e9Ql0891vlfgi+kHwOk8tQjiQHpqh4LaSa0fKEA==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.24.6.tgz", + "integrity": "sha512-3vgazJlLwNXi9jhrR1ef8qiB65L1RK90+lEQwv4OxveHnqC3BfmnHdgySwRLzf6akhlOYenT+b7AfWq+a//AHw==", "requires": { - "@babel/code-frame": "^7.23.5", - "@babel/parser": "^7.24.0", - "@babel/types": "^7.24.0" + "@babel/code-frame": "^7.24.6", + "@babel/parser": "^7.24.6", + "@babel/types": "^7.24.6" } }, "@babel/traverse": { - "version": "7.24.5", - "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.24.5.tgz", - "integrity": "sha512-7aaBLeDQ4zYcUFDUD41lJc1fG8+5IU9DaNSJAgal866FGvmD5EbWQgnEC6kO1gGLsX0esNkfnJSndbTXA3r7UA==", - "requires": { - "@babel/code-frame": "^7.24.2", - "@babel/generator": "^7.24.5", - "@babel/helper-environment-visitor": "^7.22.20", - "@babel/helper-function-name": "^7.23.0", - "@babel/helper-hoist-variables": "^7.22.5", - "@babel/helper-split-export-declaration": "^7.24.5", - "@babel/parser": "^7.24.5", - "@babel/types": "^7.24.5", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.24.6.tgz", + "integrity": "sha512-OsNjaJwT9Zn8ozxcfoBc+RaHdj3gFmCmYoQLUII1o6ZrUwku0BMg80FoOTPx+Gi6XhcQxAYE4xyjPTo4SxEQqw==", + "requires": { + "@babel/code-frame": "^7.24.6", + "@babel/generator": "^7.24.6", + "@babel/helper-environment-visitor": "^7.24.6", + "@babel/helper-function-name": "^7.24.6", + "@babel/helper-hoist-variables": "^7.24.6", + "@babel/helper-split-export-declaration": "^7.24.6", + "@babel/parser": "^7.24.6", + "@babel/types": "^7.24.6", "debug": "^4.3.1", "globals": "^11.1.0" } }, "@babel/types": { - "version": "7.24.5", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.24.5.tgz", - "integrity": "sha512-6mQNsaLeXTw0nxYUYu+NSa4Hx4BlF1x1x8/PMFbiR+GBSr+2DkECc69b8hgy2frEodNcvPffeH8YfWd3LI6jhQ==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.24.6.tgz", + "integrity": "sha512-WaMsgi6Q8zMgMth93GvWPXkhAIEobfsIkLTacoVZoK1J0CevIPGYY2Vo5YvJGqyHqXM6P4ppOYGsIRU8MM9pFQ==", "requires": { - "@babel/helper-string-parser": "^7.24.1", - "@babel/helper-validator-identifier": "^7.24.5", + "@babel/helper-string-parser": "^7.24.6", + "@babel/helper-validator-identifier": "^7.24.6", "to-fast-properties": "^2.0.0" } }, diff --git a/apps/block_scout_web/assets/package.json b/apps/block_scout_web/assets/package.json index 3dacea19ff78..599282e9a82e 100644 --- a/apps/block_scout_web/assets/package.json +++ b/apps/block_scout_web/assets/package.json @@ -83,7 +83,7 @@ "xss": "^1.0.15" }, "devDependencies": { - "@babel/core": "^7.24.5", + "@babel/core": "^7.24.6", "@babel/preset-env": "^7.24.5", "autoprefixer": "^10.4.19", "babel-loader": "^9.1.3", From 034bc181b70895ab21a94c516a28f57e0100d7aa Mon Sep 17 00:00:00 2001 From: nikitosing <32202610+nikitosing@users.noreply.github.com> Date: Mon, 3 Jun 2024 11:01:37 +0300 Subject: [PATCH 045/150] fix: Fix Retry NFT fetcher (#10146) --- apps/explorer/lib/explorer/chain.ex | 8 +++-- .../fetcher/token_instance/helper_test.exs | 31 +++++++++++++++++++ 2 files changed, 36 insertions(+), 3 deletions(-) diff --git a/apps/explorer/lib/explorer/chain.ex b/apps/explorer/lib/explorer/chain.ex index 34f8cd5ca392..8f283b4d792e 100644 --- a/apps/explorer/lib/explorer/chain.ex +++ b/apps/explorer/lib/explorer/chain.ex @@ -3707,7 +3707,7 @@ defmodule Explorer.Chain do Instance |> where([instance], not is_nil(instance.error)) - |> where([instance], is_nil(instance.refetch_after) or instance.refetch_after > ^DateTime.utc_now()) + |> where([instance], is_nil(instance.refetch_after) or instance.refetch_after < ^DateTime.utc_now()) |> select([instance], %{ contract_address_hash: instance.token_contract_address_hash, token_id: instance.token_id @@ -3918,6 +3918,8 @@ defmodule Explorer.Chain do base = config[:exp_timeout_base] max_refetch_interval = config[:max_refetch_interval] + max_retry_count = :math.log(max_refetch_interval / 1000 / coef) / :math.log(base) + from( token_instance in Instance, update: [ @@ -3934,7 +3936,7 @@ defmodule Explorer.Chain do fragment( """ CASE WHEN EXCLUDED.metadata IS NULL THEN - NOW() AT TIME ZONE 'UTC' + LEAST(interval '1 seconds' * (? * ? ^ (? + 1)), interval '1 milliseconds' * ?) + NOW() AT TIME ZONE 'UTC' + interval '1 seconds' * (? * ? ^ LEAST(? + 1.0, ?)) ELSE NULL END @@ -3942,7 +3944,7 @@ defmodule Explorer.Chain do ^coef, ^base, token_instance.retries_count, - ^max_refetch_interval + ^max_retry_count ) ] ], diff --git a/apps/indexer/test/indexer/fetcher/token_instance/helper_test.exs b/apps/indexer/test/indexer/fetcher/token_instance/helper_test.exs index 3c13ce2abea3..552ea312090b 100644 --- a/apps/indexer/test/indexer/fetcher/token_instance/helper_test.exs +++ b/apps/indexer/test/indexer/fetcher/token_instance/helper_test.exs @@ -520,5 +520,36 @@ defmodule Indexer.Fetcher.TokenInstance.HelperTest do "https://ipfs.io/ipfs/QmU6DGXciSZXTH1fUKkEqj74P8FeXPRKxSTjgRsVKUQa95/base/300067000000000000.JPG" } end + + test "Don't fail on high retries count" do + config = Application.get_env(:indexer, Indexer.Fetcher.TokenInstance.Retry) + + coef = config[:exp_timeout_coeff] + base = config[:exp_timeout_base] + max_refetch_interval = config[:max_refetch_interval] + + erc_721_token = insert(:token, type: "ERC-721") + + token_instance = + insert(:token_instance, + token_contract_address_hash: erc_721_token.contract_address_hash, + error: "error", + metadata: nil, + retries_count: 50 + ) + + Helper.batch_fetch_instances([ + %{contract_address_hash: token_instance.token_contract_address_hash, token_id: token_instance.token_id} + ]) + + now = DateTime.utc_now() + refetch_after = DateTime.add(now, max_refetch_interval, :millisecond) + + [instance] = Repo.all(Instance) + + assert instance.retries_count == 51 + assert DateTime.diff(refetch_after, instance.refetch_after) < 1 + assert !is_nil(instance.error) + end end end From 13c027f90ca67c4337011ff89a83d12cd43251f8 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 3 Jun 2024 11:02:06 +0300 Subject: [PATCH 046/150] chore(deps-dev): bump eslint-plugin-promise (#10168) Bumps [eslint-plugin-promise](https://github.com/eslint-community/eslint-plugin-promise) from 6.1.1 to 6.2.0. - [Release notes](https://github.com/eslint-community/eslint-plugin-promise/releases) - [Changelog](https://github.com/eslint-community/eslint-plugin-promise/blob/main/CHANGELOG.md) - [Commits](https://github.com/eslint-community/eslint-plugin-promise/compare/v6.1.1...v6.2.0) --- updated-dependencies: - dependency-name: eslint-plugin-promise dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- apps/block_scout_web/assets/package-lock.json | 19 +++++++++++-------- apps/block_scout_web/assets/package.json | 2 +- 2 files changed, 12 insertions(+), 9 deletions(-) diff --git a/apps/block_scout_web/assets/package-lock.json b/apps/block_scout_web/assets/package-lock.json index eb83cec1bbe0..846611dafcc0 100644 --- a/apps/block_scout_web/assets/package-lock.json +++ b/apps/block_scout_web/assets/package-lock.json @@ -82,7 +82,7 @@ "eslint-config-standard": "^17.1.0", "eslint-plugin-import": "^2.29.1", "eslint-plugin-node": "^11.1.0", - "eslint-plugin-promise": "^6.1.1", + "eslint-plugin-promise": "^6.2.0", "file-loader": "^6.2.0", "jest": "^29.7.0", "jest-environment-jsdom": "^29.7.0", @@ -7670,15 +7670,18 @@ } }, "node_modules/eslint-plugin-promise": { - "version": "6.1.1", - "resolved": "https://registry.npmjs.org/eslint-plugin-promise/-/eslint-plugin-promise-6.1.1.tgz", - "integrity": "sha512-tjqWDwVZQo7UIPMeDReOpUgHCmCiH+ePnVT+5zVapL0uuHnegBUs2smM13CzOs2Xb5+MHMRFTs9v24yjba4Oig==", + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-promise/-/eslint-plugin-promise-6.2.0.tgz", + "integrity": "sha512-QmAqwizauvnKOlifxyDj2ObfULpHQawlg/zQdgEixur9vl0CvZGv/LCJV2rtj3210QCoeGBzVMfMXqGAOr/4fA==", "dev": true, "engines": { "node": "^12.22.0 || ^14.17.0 || >=16.0.0" }, + "funding": { + "url": "https://opencollective.com/eslint" + }, "peerDependencies": { - "eslint": "^7.0.0 || ^8.0.0" + "eslint": "^7.0.0 || ^8.0.0 || ^9.0.0" } }, "node_modules/eslint-scope": { @@ -23743,9 +23746,9 @@ } }, "eslint-plugin-promise": { - "version": "6.1.1", - "resolved": "https://registry.npmjs.org/eslint-plugin-promise/-/eslint-plugin-promise-6.1.1.tgz", - "integrity": "sha512-tjqWDwVZQo7UIPMeDReOpUgHCmCiH+ePnVT+5zVapL0uuHnegBUs2smM13CzOs2Xb5+MHMRFTs9v24yjba4Oig==", + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-promise/-/eslint-plugin-promise-6.2.0.tgz", + "integrity": "sha512-QmAqwizauvnKOlifxyDj2ObfULpHQawlg/zQdgEixur9vl0CvZGv/LCJV2rtj3210QCoeGBzVMfMXqGAOr/4fA==", "dev": true, "requires": {} }, diff --git a/apps/block_scout_web/assets/package.json b/apps/block_scout_web/assets/package.json index 599282e9a82e..cc68c8b9cf4e 100644 --- a/apps/block_scout_web/assets/package.json +++ b/apps/block_scout_web/assets/package.json @@ -94,7 +94,7 @@ "eslint-config-standard": "^17.1.0", "eslint-plugin-import": "^2.29.1", "eslint-plugin-node": "^11.1.0", - "eslint-plugin-promise": "^6.1.1", + "eslint-plugin-promise": "^6.2.0", "file-loader": "^6.2.0", "jest": "^29.7.0", "jest-environment-jsdom": "^29.7.0", From 80cb860df8f58d6fa1b6c63c5d0af0d2cf5c7962 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 3 Jun 2024 11:03:46 +0300 Subject: [PATCH 047/150] chore(deps): bump core-js in /apps/block_scout_web/assets (#10175) Bumps [core-js](https://github.com/zloirock/core-js/tree/HEAD/packages/core-js) from 3.36.1 to 3.37.1. - [Release notes](https://github.com/zloirock/core-js/releases) - [Changelog](https://github.com/zloirock/core-js/blob/master/CHANGELOG.md) - [Commits](https://github.com/zloirock/core-js/commits/v3.37.1/packages/core-js) --- updated-dependencies: - dependency-name: core-js dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- apps/block_scout_web/assets/package-lock.json | 14 +++++++------- apps/block_scout_web/assets/package.json | 2 +- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/apps/block_scout_web/assets/package-lock.json b/apps/block_scout_web/assets/package-lock.json index 846611dafcc0..ce271adf162c 100644 --- a/apps/block_scout_web/assets/package-lock.json +++ b/apps/block_scout_web/assets/package-lock.json @@ -17,7 +17,7 @@ "chart.js": "^4.4.2", "chartjs-adapter-luxon": "^1.3.1", "clipboard": "^2.0.11", - "core-js": "^3.36.1", + "core-js": "^3.37.1", "crypto-browserify": "^3.12.0", "dropzone": "^5.9.3", "eth-net-props": "^1.0.41", @@ -5974,9 +5974,9 @@ } }, "node_modules/core-js": { - "version": "3.36.1", - "resolved": "https://registry.npmjs.org/core-js/-/core-js-3.36.1.tgz", - "integrity": "sha512-BTvUrwxVBezj5SZ3f10ImnX2oRByMxql3EimVqMysepbC9EeMUOpLwdy6Eoili2x6E4kf+ZUB5k/+Jv55alPfA==", + "version": "3.37.1", + "resolved": "https://registry.npmjs.org/core-js/-/core-js-3.37.1.tgz", + "integrity": "sha512-Xn6qmxrQZyB0FFY8E3bgRXei3lWDJHhvI+u0q9TKIYM49G8pAr0FgnnrFRAmsbptZL1yxRADVXn+x5AGsbBfyw==", "hasInstallScript": true, "funding": { "type": "opencollective", @@ -22353,9 +22353,9 @@ } }, "core-js": { - "version": "3.36.1", - "resolved": "https://registry.npmjs.org/core-js/-/core-js-3.36.1.tgz", - "integrity": "sha512-BTvUrwxVBezj5SZ3f10ImnX2oRByMxql3EimVqMysepbC9EeMUOpLwdy6Eoili2x6E4kf+ZUB5k/+Jv55alPfA==" + "version": "3.37.1", + "resolved": "https://registry.npmjs.org/core-js/-/core-js-3.37.1.tgz", + "integrity": "sha512-Xn6qmxrQZyB0FFY8E3bgRXei3lWDJHhvI+u0q9TKIYM49G8pAr0FgnnrFRAmsbptZL1yxRADVXn+x5AGsbBfyw==" }, "core-js-compat": { "version": "3.36.1", diff --git a/apps/block_scout_web/assets/package.json b/apps/block_scout_web/assets/package.json index cc68c8b9cf4e..fa4e28c8a096 100644 --- a/apps/block_scout_web/assets/package.json +++ b/apps/block_scout_web/assets/package.json @@ -29,7 +29,7 @@ "chart.js": "^4.4.2", "chartjs-adapter-luxon": "^1.3.1", "clipboard": "^2.0.11", - "core-js": "^3.36.1", + "core-js": "^3.37.1", "crypto-browserify": "^3.12.0", "dropzone": "^5.9.3", "eth-net-props": "^1.0.41", From 064761b9aa24bce3c38e22325547f2a2d85be4d6 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 3 Jun 2024 11:04:04 +0300 Subject: [PATCH 048/150] chore(deps): bump sweetalert2 in /apps/block_scout_web/assets (#10171) Bumps [sweetalert2](https://github.com/sweetalert2/sweetalert2) from 11.10.8 to 11.11.0. - [Release notes](https://github.com/sweetalert2/sweetalert2/releases) - [Changelog](https://github.com/sweetalert2/sweetalert2/blob/main/CHANGELOG.md) - [Commits](https://github.com/sweetalert2/sweetalert2/compare/v11.10.8...v11.11.0) --- updated-dependencies: - dependency-name: sweetalert2 dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- apps/block_scout_web/assets/package-lock.json | 14 +++++++------- apps/block_scout_web/assets/package.json | 2 +- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/apps/block_scout_web/assets/package-lock.json b/apps/block_scout_web/assets/package-lock.json index ce271adf162c..ffdeee81cd8d 100644 --- a/apps/block_scout_web/assets/package-lock.json +++ b/apps/block_scout_web/assets/package-lock.json @@ -61,7 +61,7 @@ "redux": "^5.0.1", "stream-browserify": "^3.0.0", "stream-http": "^3.1.1", - "sweetalert2": "^11.10.8", + "sweetalert2": "^11.11.0", "urijs": "^1.19.11", "url": "^0.11.3", "util": "^0.12.5", @@ -16188,9 +16188,9 @@ } }, "node_modules/sweetalert2": { - "version": "11.10.8", - "resolved": "https://registry.npmjs.org/sweetalert2/-/sweetalert2-11.10.8.tgz", - "integrity": "sha512-oAkYROBfXBY+4sVbQEIcN+ZxAx69lsmz5WEBwdEpyS4m59vOBNlRU5/fJpAI1MVfiDwFZiGwVzB/KBpOyfLNtg==", + "version": "11.11.0", + "resolved": "https://registry.npmjs.org/sweetalert2/-/sweetalert2-11.11.0.tgz", + "integrity": "sha512-wKCTtoE6lQVDKaJ5FFq+znk/YykJmJlD8RnLZps8C7DyivctCoRlVeeOwnKfgwKS+QJYon7s++3dmNi3/am1tw==", "funding": { "type": "individual", "url": "https://github.com/sponsors/limonte" @@ -30061,9 +30061,9 @@ } }, "sweetalert2": { - "version": "11.10.8", - "resolved": "https://registry.npmjs.org/sweetalert2/-/sweetalert2-11.10.8.tgz", - "integrity": "sha512-oAkYROBfXBY+4sVbQEIcN+ZxAx69lsmz5WEBwdEpyS4m59vOBNlRU5/fJpAI1MVfiDwFZiGwVzB/KBpOyfLNtg==" + "version": "11.11.0", + "resolved": "https://registry.npmjs.org/sweetalert2/-/sweetalert2-11.11.0.tgz", + "integrity": "sha512-wKCTtoE6lQVDKaJ5FFq+znk/YykJmJlD8RnLZps8C7DyivctCoRlVeeOwnKfgwKS+QJYon7s++3dmNi3/am1tw==" }, "symbol-tree": { "version": "3.2.4", diff --git a/apps/block_scout_web/assets/package.json b/apps/block_scout_web/assets/package.json index fa4e28c8a096..a795340ac9d3 100644 --- a/apps/block_scout_web/assets/package.json +++ b/apps/block_scout_web/assets/package.json @@ -73,7 +73,7 @@ "redux": "^5.0.1", "stream-browserify": "^3.0.0", "stream-http": "^3.1.1", - "sweetalert2": "^11.10.8", + "sweetalert2": "^11.11.0", "urijs": "^1.19.11", "url": "^0.11.3", "util": "^0.12.5", From f2737ec09ead4808bc6e7f454424b8370377cf75 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 3 Jun 2024 11:04:32 +0300 Subject: [PATCH 049/150] chore(deps): bump mixpanel-browser in /apps/block_scout_web/assets (#10170) Bumps [mixpanel-browser](https://github.com/mixpanel/mixpanel-js) from 2.50.0 to 2.51.0. - [Release notes](https://github.com/mixpanel/mixpanel-js/releases) - [Changelog](https://github.com/mixpanel/mixpanel-js/blob/master/CHANGELOG.md) - [Commits](https://github.com/mixpanel/mixpanel-js/compare/v2.50.0...v2.51.0) --- updated-dependencies: - dependency-name: mixpanel-browser dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- apps/block_scout_web/assets/package-lock.json | 86 +++++++++---------- apps/block_scout_web/assets/package.json | 2 +- 2 files changed, 44 insertions(+), 44 deletions(-) diff --git a/apps/block_scout_web/assets/package-lock.json b/apps/block_scout_web/assets/package-lock.json index ffdeee81cd8d..7a6b859daf07 100644 --- a/apps/block_scout_web/assets/package-lock.json +++ b/apps/block_scout_web/assets/package-lock.json @@ -46,7 +46,7 @@ "lodash.reduce": "^4.6.0", "luxon": "^3.4.4", "malihu-custom-scrollbar-plugin": "3.1.5", - "mixpanel-browser": "^2.50.0", + "mixpanel-browser": "^2.51.0", "moment": "^2.30.1", "nanomorph": "^5.4.0", "numeral": "^2.0.6", @@ -3417,11 +3417,11 @@ } }, "node_modules/@rrweb/types": { - "version": "2.0.0-alpha.13", - "resolved": "https://registry.npmjs.org/@rrweb/types/-/types-2.0.0-alpha.13.tgz", - "integrity": "sha512-ytq+MeVm/vP2ybw+gTAN3Xvt7HN2yS+wlbfnwHpQMftxrwzq0kEZHdw+Jp5WUvvpONWzXriNAUU9dW0qLGkzNg==", + "version": "2.0.0-alpha.14", + "resolved": "https://registry.npmjs.org/@rrweb/types/-/types-2.0.0-alpha.14.tgz", + "integrity": "sha512-H0qKW75SdsZM4/4116fQDDC3QkUxbP7A9AY5PK2nyUV56KReAQ1sH8ZHu9tomvn0kFJUXhtvjv2H6G6xxSJNqA==", "dependencies": { - "rrweb-snapshot": "^2.0.0-alpha.13" + "rrweb-snapshot": "^2.0.0-alpha.14" } }, "node_modules/@scure/base": { @@ -12955,11 +12955,11 @@ "integrity": "sha512-vKivATfr97l2/QBCYAkXYDbrIWPM2IIKEl7YPhjCvKlG3kE2gm+uBo6nEXK3M5/Ffh/FLpKExzOQ3JJoJGFKBw==" }, "node_modules/mixpanel-browser": { - "version": "2.50.0", - "resolved": "https://registry.npmjs.org/mixpanel-browser/-/mixpanel-browser-2.50.0.tgz", - "integrity": "sha512-iP4sbSRMemjWbnH+KQZRxZ360bcXtFpoQuUiWjjdw9AsURn0MrR9/2RnPOJ8J8tt1dMm7kTKwOjGV8pkbWbmAA==", + "version": "2.51.0", + "resolved": "https://registry.npmjs.org/mixpanel-browser/-/mixpanel-browser-2.51.0.tgz", + "integrity": "sha512-cNjvvhlRkSlqW1w4nxRpK5y4R3QcfhY7H/ZfvZ4jLeiBUNeeuwyQTgOAk/mYiBDOonWEdeN7h3EkL2BdqKd2Sw==", "dependencies": { - "rrweb": "2.0.0-alpha.4" + "rrweb": "2.0.0-alpha.13" } }, "node_modules/mkdirp": { @@ -15172,32 +15172,32 @@ } }, "node_modules/rrdom": { - "version": "0.1.7", - "resolved": "https://registry.npmjs.org/rrdom/-/rrdom-0.1.7.tgz", - "integrity": "sha512-ZLd8f14z9pUy2Hk9y636cNv5Y2BMnNEY99wxzW9tD2BLDfe1xFxtLjB4q/xCBYo6HRe0wofzKzjm4JojmpBfFw==", + "version": "2.0.0-alpha.14", + "resolved": "https://registry.npmjs.org/rrdom/-/rrdom-2.0.0-alpha.14.tgz", + "integrity": "sha512-aEDi8MNfKWRnWHM1Mhfx535EtHHYHg6L17PC3rvMUJMgHLcyMQsmI+OMeWt5RzXw1J2fdwhMV0grpp5VDTqRaA==", "dependencies": { - "rrweb-snapshot": "^2.0.0-alpha.4" + "rrweb-snapshot": "^2.0.0-alpha.14" } }, "node_modules/rrweb": { - "version": "2.0.0-alpha.4", - "resolved": "https://registry.npmjs.org/rrweb/-/rrweb-2.0.0-alpha.4.tgz", - "integrity": "sha512-wEHUILbxDPcNwkM3m4qgPgXAiBJyqCbbOHyVoNEVBJzHszWEFYyTbrZqUdeb1EfmTRC2PsumCIkVcomJ/xcOzA==", + "version": "2.0.0-alpha.13", + "resolved": "https://registry.npmjs.org/rrweb/-/rrweb-2.0.0-alpha.13.tgz", + "integrity": "sha512-a8GXOCnzWHNaVZPa7hsrLZtNZ3CGjiL+YrkpLo0TfmxGLhjNZbWY2r7pE06p+FcjFNlgUVTmFrSJbK3kO7yxvw==", "dependencies": { - "@rrweb/types": "^2.0.0-alpha.4", + "@rrweb/types": "^2.0.0-alpha.13", "@types/css-font-loading-module": "0.0.7", "@xstate/fsm": "^1.4.0", "base64-arraybuffer": "^1.0.1", "fflate": "^0.4.4", "mitt": "^3.0.0", - "rrdom": "^0.1.7", - "rrweb-snapshot": "^2.0.0-alpha.4" + "rrdom": "^2.0.0-alpha.13", + "rrweb-snapshot": "^2.0.0-alpha.13" } }, "node_modules/rrweb-snapshot": { - "version": "2.0.0-alpha.13", - "resolved": "https://registry.npmjs.org/rrweb-snapshot/-/rrweb-snapshot-2.0.0-alpha.13.tgz", - "integrity": "sha512-slbhNBCYjxLGCeH95a67ECCy5a22nloXp1F5wF7DCzUNw80FN7tF9Lef1sRGLNo32g3mNqTc2sWLATlKejMxYw==" + "version": "2.0.0-alpha.14", + "resolved": "https://registry.npmjs.org/rrweb-snapshot/-/rrweb-snapshot-2.0.0-alpha.14.tgz", + "integrity": "sha512-HuJd7iZauzhf7XI5FFtCGeUXkHk1mgc8yvH9Km9zB09Yk2cr0bW4eKx9fWQhRFiry9yXf/vOMkUy403xTLPIrQ==" }, "node_modules/run-parallel": { "version": "1.2.0", @@ -20297,11 +20297,11 @@ } }, "@rrweb/types": { - "version": "2.0.0-alpha.13", - "resolved": "https://registry.npmjs.org/@rrweb/types/-/types-2.0.0-alpha.13.tgz", - "integrity": "sha512-ytq+MeVm/vP2ybw+gTAN3Xvt7HN2yS+wlbfnwHpQMftxrwzq0kEZHdw+Jp5WUvvpONWzXriNAUU9dW0qLGkzNg==", + "version": "2.0.0-alpha.14", + "resolved": "https://registry.npmjs.org/@rrweb/types/-/types-2.0.0-alpha.14.tgz", + "integrity": "sha512-H0qKW75SdsZM4/4116fQDDC3QkUxbP7A9AY5PK2nyUV56KReAQ1sH8ZHu9tomvn0kFJUXhtvjv2H6G6xxSJNqA==", "requires": { - "rrweb-snapshot": "^2.0.0-alpha.13" + "rrweb-snapshot": "^2.0.0-alpha.14" } }, "@scure/base": { @@ -27739,11 +27739,11 @@ "integrity": "sha512-vKivATfr97l2/QBCYAkXYDbrIWPM2IIKEl7YPhjCvKlG3kE2gm+uBo6nEXK3M5/Ffh/FLpKExzOQ3JJoJGFKBw==" }, "mixpanel-browser": { - "version": "2.50.0", - "resolved": "https://registry.npmjs.org/mixpanel-browser/-/mixpanel-browser-2.50.0.tgz", - "integrity": "sha512-iP4sbSRMemjWbnH+KQZRxZ360bcXtFpoQuUiWjjdw9AsURn0MrR9/2RnPOJ8J8tt1dMm7kTKwOjGV8pkbWbmAA==", + "version": "2.51.0", + "resolved": "https://registry.npmjs.org/mixpanel-browser/-/mixpanel-browser-2.51.0.tgz", + "integrity": "sha512-cNjvvhlRkSlqW1w4nxRpK5y4R3QcfhY7H/ZfvZ4jLeiBUNeeuwyQTgOAk/mYiBDOonWEdeN7h3EkL2BdqKd2Sw==", "requires": { - "rrweb": "2.0.0-alpha.4" + "rrweb": "2.0.0-alpha.13" } }, "mkdirp": { @@ -29317,32 +29317,32 @@ } }, "rrdom": { - "version": "0.1.7", - "resolved": "https://registry.npmjs.org/rrdom/-/rrdom-0.1.7.tgz", - "integrity": "sha512-ZLd8f14z9pUy2Hk9y636cNv5Y2BMnNEY99wxzW9tD2BLDfe1xFxtLjB4q/xCBYo6HRe0wofzKzjm4JojmpBfFw==", + "version": "2.0.0-alpha.14", + "resolved": "https://registry.npmjs.org/rrdom/-/rrdom-2.0.0-alpha.14.tgz", + "integrity": "sha512-aEDi8MNfKWRnWHM1Mhfx535EtHHYHg6L17PC3rvMUJMgHLcyMQsmI+OMeWt5RzXw1J2fdwhMV0grpp5VDTqRaA==", "requires": { - "rrweb-snapshot": "^2.0.0-alpha.4" + "rrweb-snapshot": "^2.0.0-alpha.14" } }, "rrweb": { - "version": "2.0.0-alpha.4", - "resolved": "https://registry.npmjs.org/rrweb/-/rrweb-2.0.0-alpha.4.tgz", - "integrity": "sha512-wEHUILbxDPcNwkM3m4qgPgXAiBJyqCbbOHyVoNEVBJzHszWEFYyTbrZqUdeb1EfmTRC2PsumCIkVcomJ/xcOzA==", + "version": "2.0.0-alpha.13", + "resolved": "https://registry.npmjs.org/rrweb/-/rrweb-2.0.0-alpha.13.tgz", + "integrity": "sha512-a8GXOCnzWHNaVZPa7hsrLZtNZ3CGjiL+YrkpLo0TfmxGLhjNZbWY2r7pE06p+FcjFNlgUVTmFrSJbK3kO7yxvw==", "requires": { - "@rrweb/types": "^2.0.0-alpha.4", + "@rrweb/types": "^2.0.0-alpha.13", "@types/css-font-loading-module": "0.0.7", "@xstate/fsm": "^1.4.0", "base64-arraybuffer": "^1.0.1", "fflate": "^0.4.4", "mitt": "^3.0.0", - "rrdom": "^0.1.7", - "rrweb-snapshot": "^2.0.0-alpha.4" + "rrdom": "^2.0.0-alpha.13", + "rrweb-snapshot": "^2.0.0-alpha.13" } }, "rrweb-snapshot": { - "version": "2.0.0-alpha.13", - "resolved": "https://registry.npmjs.org/rrweb-snapshot/-/rrweb-snapshot-2.0.0-alpha.13.tgz", - "integrity": "sha512-slbhNBCYjxLGCeH95a67ECCy5a22nloXp1F5wF7DCzUNw80FN7tF9Lef1sRGLNo32g3mNqTc2sWLATlKejMxYw==" + "version": "2.0.0-alpha.14", + "resolved": "https://registry.npmjs.org/rrweb-snapshot/-/rrweb-snapshot-2.0.0-alpha.14.tgz", + "integrity": "sha512-HuJd7iZauzhf7XI5FFtCGeUXkHk1mgc8yvH9Km9zB09Yk2cr0bW4eKx9fWQhRFiry9yXf/vOMkUy403xTLPIrQ==" }, "run-parallel": { "version": "1.2.0", diff --git a/apps/block_scout_web/assets/package.json b/apps/block_scout_web/assets/package.json index a795340ac9d3..a795b72d4a40 100644 --- a/apps/block_scout_web/assets/package.json +++ b/apps/block_scout_web/assets/package.json @@ -58,7 +58,7 @@ "lodash.reduce": "^4.6.0", "luxon": "^3.4.4", "malihu-custom-scrollbar-plugin": "3.1.5", - "mixpanel-browser": "^2.50.0", + "mixpanel-browser": "^2.51.0", "moment": "^2.30.1", "nanomorph": "^5.4.0", "numeral": "^2.0.6", From bf32aab579b93fc9d829fc0b0b5669d2463b948b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 3 Jun 2024 11:09:00 +0300 Subject: [PATCH 050/150] chore(deps-dev): bump css-minimizer-webpack-plugin (#10173) Bumps [css-minimizer-webpack-plugin](https://github.com/webpack-contrib/css-minimizer-webpack-plugin) from 6.0.0 to 7.0.0. - [Release notes](https://github.com/webpack-contrib/css-minimizer-webpack-plugin/releases) - [Changelog](https://github.com/webpack-contrib/css-minimizer-webpack-plugin/blob/master/CHANGELOG.md) - [Commits](https://github.com/webpack-contrib/css-minimizer-webpack-plugin/compare/v6.0.0...v7.0.0) --- updated-dependencies: - dependency-name: css-minimizer-webpack-plugin dependency-type: direct:development update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- apps/block_scout_web/assets/package-lock.json | 727 +++++++++--------- apps/block_scout_web/assets/package.json | 2 +- 2 files changed, 367 insertions(+), 362 deletions(-) diff --git a/apps/block_scout_web/assets/package-lock.json b/apps/block_scout_web/assets/package-lock.json index 7a6b859daf07..97b1d2d21be2 100644 --- a/apps/block_scout_web/assets/package-lock.json +++ b/apps/block_scout_web/assets/package-lock.json @@ -77,7 +77,7 @@ "babel-loader": "^9.1.3", "copy-webpack-plugin": "^12.0.2", "css-loader": "^7.1.1", - "css-minimizer-webpack-plugin": "^6.0.0", + "css-minimizer-webpack-plugin": "^7.0.0", "eslint": "^8.57.0", "eslint-config-standard": "^17.1.0", "eslint-plugin-import": "^2.29.1", @@ -6250,9 +6250,9 @@ } }, "node_modules/css-declaration-sorter": { - "version": "7.1.1", - "resolved": "https://registry.npmjs.org/css-declaration-sorter/-/css-declaration-sorter-7.1.1.tgz", - "integrity": "sha512-dZ3bVTEEc1vxr3Bek9vGwfB5Z6ESPULhcRvO472mfjVnj8jRcTnKO8/JTczlvxM10Myb+wBM++1MtdO76eWcaQ==", + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/css-declaration-sorter/-/css-declaration-sorter-7.2.0.tgz", + "integrity": "sha512-h70rUM+3PNFuaBDTLe8wF/cdWu+dOZmb7pJt8Z2sedYbAcQVQV/tEchueg3GWxwqS0cxtbxmaHEdkNACqcvsow==", "dev": true, "engines": { "node": "^14 || ^16 || >=18" @@ -6312,15 +6312,15 @@ } }, "node_modules/css-minimizer-webpack-plugin": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/css-minimizer-webpack-plugin/-/css-minimizer-webpack-plugin-6.0.0.tgz", - "integrity": "sha512-BLpR9CCDkKvhO3i0oZQgad6v9pCxUuhSc5RT6iUEy9M8hBXi4TJb5vqF2GQ2deqYHmRi3O6IR9hgAZQWg0EBwA==", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/css-minimizer-webpack-plugin/-/css-minimizer-webpack-plugin-7.0.0.tgz", + "integrity": "sha512-niy66jxsQHqO+EYbhPuIhqRQ1mNcNVUHrMnkzzir9kFOERJUaQDDRhh7dKDz33kBpkWMF9M8Vx0QlDbc5AHOsw==", "dev": true, "dependencies": { - "@jridgewell/trace-mapping": "^0.3.21", - "cssnano": "^6.0.3", + "@jridgewell/trace-mapping": "^0.3.25", + "cssnano": "^7.0.1", "jest-worker": "^29.7.0", - "postcss": "^8.4.33", + "postcss": "^8.4.38", "schema-utils": "^4.2.0", "serialize-javascript": "^6.0.2" }, @@ -6463,16 +6463,16 @@ "integrity": "sha1-xtJnJjKi5cg+AT5oZKQs6N79IK4=" }, "node_modules/cssnano": { - "version": "6.0.3", - "resolved": "https://registry.npmjs.org/cssnano/-/cssnano-6.0.3.tgz", - "integrity": "sha512-MRq4CIj8pnyZpcI2qs6wswoYoDD1t0aL28n+41c1Ukcpm56m1h6mCexIHBGjfZfnTqtGSSCP4/fB1ovxgjBOiw==", + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/cssnano/-/cssnano-7.0.1.tgz", + "integrity": "sha512-917Mej/4SdI7b55atsli3sU4MOJ9XDoKgnlCtQtXYj8XUFcM3riTuYHyqBBnnskawW+zWwp0KxJzpEUodlpqUg==", "dev": true, "dependencies": { - "cssnano-preset-default": "^6.0.3", - "lilconfig": "^3.0.0" + "cssnano-preset-default": "^7.0.1", + "lilconfig": "^3.1.1" }, "engines": { - "node": "^14 || ^16 || >=18.0" + "node": "^18.12.0 || ^20.9.0 || >=22.0" }, "funding": { "type": "opencollective", @@ -6483,55 +6483,56 @@ } }, "node_modules/cssnano-preset-default": { - "version": "6.0.3", - "resolved": "https://registry.npmjs.org/cssnano-preset-default/-/cssnano-preset-default-6.0.3.tgz", - "integrity": "sha512-4y3H370aZCkT9Ev8P4SO4bZbt+AExeKhh8wTbms/X7OLDo5E7AYUUy6YPxa/uF5Grf+AJwNcCnxKhZynJ6luBA==", - "dev": true, - "dependencies": { - "css-declaration-sorter": "^7.1.1", - "cssnano-utils": "^4.0.1", - "postcss-calc": "^9.0.1", - "postcss-colormin": "^6.0.2", - "postcss-convert-values": "^6.0.2", - "postcss-discard-comments": "^6.0.1", - "postcss-discard-duplicates": "^6.0.1", - "postcss-discard-empty": "^6.0.1", - "postcss-discard-overridden": "^6.0.1", - "postcss-merge-longhand": "^6.0.2", - "postcss-merge-rules": "^6.0.3", - "postcss-minify-font-values": "^6.0.1", - "postcss-minify-gradients": "^6.0.1", - "postcss-minify-params": "^6.0.2", - "postcss-minify-selectors": "^6.0.2", - "postcss-normalize-charset": "^6.0.1", - "postcss-normalize-display-values": "^6.0.1", - "postcss-normalize-positions": "^6.0.1", - "postcss-normalize-repeat-style": "^6.0.1", - "postcss-normalize-string": "^6.0.1", - "postcss-normalize-timing-functions": "^6.0.1", - "postcss-normalize-unicode": "^6.0.2", - "postcss-normalize-url": "^6.0.1", - "postcss-normalize-whitespace": "^6.0.1", - "postcss-ordered-values": "^6.0.1", - "postcss-reduce-initial": "^6.0.2", - "postcss-reduce-transforms": "^6.0.1", - "postcss-svgo": "^6.0.2", - "postcss-unique-selectors": "^6.0.2" - }, - "engines": { - "node": "^14 || ^16 || >=18.0" + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/cssnano-preset-default/-/cssnano-preset-default-7.0.1.tgz", + "integrity": "sha512-Fumyr+uZMcjYQeuHssAZxn0cKj3cdQc5GcxkBcmEzISGB+UW9CLNlU4tBOJbJGcPukFDlicG32eFbrc8K9V5pw==", + "dev": true, + "dependencies": { + "browserslist": "^4.23.0", + "css-declaration-sorter": "^7.2.0", + "cssnano-utils": "^5.0.0", + "postcss-calc": "^10.0.0", + "postcss-colormin": "^7.0.0", + "postcss-convert-values": "^7.0.0", + "postcss-discard-comments": "^7.0.0", + "postcss-discard-duplicates": "^7.0.0", + "postcss-discard-empty": "^7.0.0", + "postcss-discard-overridden": "^7.0.0", + "postcss-merge-longhand": "^7.0.0", + "postcss-merge-rules": "^7.0.0", + "postcss-minify-font-values": "^7.0.0", + "postcss-minify-gradients": "^7.0.0", + "postcss-minify-params": "^7.0.0", + "postcss-minify-selectors": "^7.0.0", + "postcss-normalize-charset": "^7.0.0", + "postcss-normalize-display-values": "^7.0.0", + "postcss-normalize-positions": "^7.0.0", + "postcss-normalize-repeat-style": "^7.0.0", + "postcss-normalize-string": "^7.0.0", + "postcss-normalize-timing-functions": "^7.0.0", + "postcss-normalize-unicode": "^7.0.0", + "postcss-normalize-url": "^7.0.0", + "postcss-normalize-whitespace": "^7.0.0", + "postcss-ordered-values": "^7.0.0", + "postcss-reduce-initial": "^7.0.0", + "postcss-reduce-transforms": "^7.0.0", + "postcss-svgo": "^7.0.0", + "postcss-unique-selectors": "^7.0.0" + }, + "engines": { + "node": "^18.12.0 || ^20.9.0 || >=22.0" }, "peerDependencies": { "postcss": "^8.4.31" } }, "node_modules/cssnano-utils": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/cssnano-utils/-/cssnano-utils-4.0.1.tgz", - "integrity": "sha512-6qQuYDqsGoiXssZ3zct6dcMxiqfT6epy7x4R0TQJadd4LWO3sPR6JH6ZByOvVLoZ6EdwPGgd7+DR1EmX3tiXQQ==", + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/cssnano-utils/-/cssnano-utils-5.0.0.tgz", + "integrity": "sha512-Uij0Xdxc24L6SirFr25MlwC2rCFX6scyUmuKpzI+JQ7cyqDEwD42fJ0xfB3yLfOnRDU5LKGgjQ9FA6LYh76GWQ==", "dev": true, "engines": { - "node": "^14 || ^16 || >=18.0" + "node": "^18.12.0 || ^20.9.0 || >=22.0" }, "peerDependencies": { "postcss": "^8.4.31" @@ -12406,12 +12407,15 @@ } }, "node_modules/lilconfig": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/lilconfig/-/lilconfig-3.0.0.tgz", - "integrity": "sha512-K2U4W2Ff5ibV7j7ydLr+zLAkIg5JJ4lPn1Ltsdt+Tz/IjQ8buJ55pZAxoP34lqIiwtF9iAvtLv3JGv7CAyAg+g==", + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/lilconfig/-/lilconfig-3.1.1.tgz", + "integrity": "sha512-O18pf7nyvHTckunPWCV1XUNXU1piu01y2b7ATJ0ppkUkk8ocqVWBrYjJBCwHDjD/ZWcfyrA0P4gKhzWGi5EINQ==", "dev": true, "engines": { "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/antonk52" } }, "node_modules/lines-and-columns": { @@ -13826,98 +13830,98 @@ } }, "node_modules/postcss-calc": { - "version": "9.0.1", - "resolved": "https://registry.npmjs.org/postcss-calc/-/postcss-calc-9.0.1.tgz", - "integrity": "sha512-TipgjGyzP5QzEhsOZUaIkeO5mKeMFpebWzRogWG/ysonUlnHcq5aJe0jOjpfzUU8PeSaBQnrE8ehR0QA5vs8PQ==", + "version": "10.0.0", + "resolved": "https://registry.npmjs.org/postcss-calc/-/postcss-calc-10.0.0.tgz", + "integrity": "sha512-OmjhudoNTP0QleZCwl1i6NeBwN+5MZbY5ersLZz69mjJiDVv/p57RjRuKDkHeDWr4T+S97wQfsqRTNoDHB2e3g==", "dev": true, "dependencies": { - "postcss-selector-parser": "^6.0.11", + "postcss-selector-parser": "^6.0.16", "postcss-value-parser": "^4.2.0" }, "engines": { - "node": "^14 || ^16 || >=18.0" + "node": "^18.12 || ^20.9 || >=22.0" }, "peerDependencies": { - "postcss": "^8.2.2" + "postcss": "^8.4.38" } }, "node_modules/postcss-colormin": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/postcss-colormin/-/postcss-colormin-6.0.2.tgz", - "integrity": "sha512-TXKOxs9LWcdYo5cgmcSHPkyrLAh86hX1ijmyy6J8SbOhyv6ua053M3ZAM/0j44UsnQNIWdl8gb5L7xX2htKeLw==", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/postcss-colormin/-/postcss-colormin-7.0.0.tgz", + "integrity": "sha512-5CN6fqtsEtEtwf3mFV3B4UaZnlYljPpzmGeDB4yCK067PnAtfLe9uX2aFZaEwxHE7HopG5rUkW8gyHrNAesHEg==", "dev": true, "dependencies": { - "browserslist": "^4.22.2", + "browserslist": "^4.23.0", "caniuse-api": "^3.0.0", - "colord": "^2.9.1", + "colord": "^2.9.3", "postcss-value-parser": "^4.2.0" }, "engines": { - "node": "^14 || ^16 || >=18.0" + "node": "^18.12.0 || ^20.9.0 || >=22.0" }, "peerDependencies": { "postcss": "^8.4.31" } }, "node_modules/postcss-convert-values": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/postcss-convert-values/-/postcss-convert-values-6.0.2.tgz", - "integrity": "sha512-aeBmaTnGQ+NUSVQT8aY0sKyAD/BaLJenEKZ03YK0JnDE1w1Rr8XShoxdal2V2H26xTJKr3v5haByOhJuyT4UYw==", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/postcss-convert-values/-/postcss-convert-values-7.0.0.tgz", + "integrity": "sha512-bMuzDgXBbFbByPgj+/r6va8zNuIDUaIIbvAFgdO1t3zdgJZ77BZvu6dfWyd6gHEJnYzmeVr9ayUsAQL3/qLJ0w==", "dev": true, "dependencies": { - "browserslist": "^4.22.2", + "browserslist": "^4.23.0", "postcss-value-parser": "^4.2.0" }, "engines": { - "node": "^14 || ^16 || >=18.0" + "node": "^18.12.0 || ^20.9.0 || >=22.0" }, "peerDependencies": { "postcss": "^8.4.31" } }, "node_modules/postcss-discard-comments": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/postcss-discard-comments/-/postcss-discard-comments-6.0.1.tgz", - "integrity": "sha512-f1KYNPtqYLUeZGCHQPKzzFtsHaRuECe6jLakf/RjSRqvF5XHLZnM2+fXLhb8Qh/HBFHs3M4cSLb1k3B899RYIg==", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/postcss-discard-comments/-/postcss-discard-comments-7.0.0.tgz", + "integrity": "sha512-xpSdzRqYmy4YIVmjfGyYXKaI1SRnK6CTr+4Zmvyof8ANwvgfZgGdVtmgAvzh59gJm808mJCWQC9tFN0KF5dEXA==", "dev": true, "engines": { - "node": "^14 || ^16 || >=18.0" + "node": "^18.12.0 || ^20.9.0 || >=22.0" }, "peerDependencies": { "postcss": "^8.4.31" } }, "node_modules/postcss-discard-duplicates": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/postcss-discard-duplicates/-/postcss-discard-duplicates-6.0.1.tgz", - "integrity": "sha512-1hvUs76HLYR8zkScbwyJ8oJEugfPV+WchpnA+26fpJ7Smzs51CzGBHC32RS03psuX/2l0l0UKh2StzNxOrKCYg==", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/postcss-discard-duplicates/-/postcss-discard-duplicates-7.0.0.tgz", + "integrity": "sha512-bAnSuBop5LpAIUmmOSsuvtKAAKREB6BBIYStWUTGq8oG5q9fClDMMuY8i4UPI/cEcDx2TN+7PMnXYIId20UVDw==", "dev": true, "engines": { - "node": "^14 || ^16 || >=18.0" + "node": "^18.12.0 || ^20.9.0 || >=22.0" }, "peerDependencies": { "postcss": "^8.4.31" } }, "node_modules/postcss-discard-empty": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/postcss-discard-empty/-/postcss-discard-empty-6.0.1.tgz", - "integrity": "sha512-yitcmKwmVWtNsrrRqGJ7/C0YRy53i0mjexBDQ9zYxDwTWVBgbU4+C9jIZLmQlTDT9zhml+u0OMFJh8+31krmOg==", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/postcss-discard-empty/-/postcss-discard-empty-7.0.0.tgz", + "integrity": "sha512-e+QzoReTZ8IAwhnSdp/++7gBZ/F+nBq9y6PomfwORfP7q9nBpK5AMP64kOt0bA+lShBFbBDcgpJ3X4etHg4lzA==", "dev": true, "engines": { - "node": "^14 || ^16 || >=18.0" + "node": "^18.12.0 || ^20.9.0 || >=22.0" }, "peerDependencies": { "postcss": "^8.4.31" } }, "node_modules/postcss-discard-overridden": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/postcss-discard-overridden/-/postcss-discard-overridden-6.0.1.tgz", - "integrity": "sha512-qs0ehZMMZpSESbRkw1+inkf51kak6OOzNRaoLd/U7Fatp0aN2HQ1rxGOrJvYcRAN9VpX8kUF13R2ofn8OlvFVA==", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/postcss-discard-overridden/-/postcss-discard-overridden-7.0.0.tgz", + "integrity": "sha512-GmNAzx88u3k2+sBTZrJSDauR0ccpE24omTQCVmaTTZFz1du6AasspjaUPMJ2ud4RslZpoFKyf+6MSPETLojc6w==", "dev": true, "engines": { - "node": "^14 || ^16 || >=18.0" + "node": "^18.12.0 || ^20.9.0 || >=22.0" }, "peerDependencies": { "postcss": "^8.4.31" @@ -13970,98 +13974,98 @@ } }, "node_modules/postcss-merge-longhand": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/postcss-merge-longhand/-/postcss-merge-longhand-6.0.2.tgz", - "integrity": "sha512-+yfVB7gEM8SrCo9w2lCApKIEzrTKl5yS1F4yGhV3kSim6JzbfLGJyhR1B6X+6vOT0U33Mgx7iv4X9MVWuaSAfw==", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/postcss-merge-longhand/-/postcss-merge-longhand-7.0.0.tgz", + "integrity": "sha512-0X8I4/9+G03X5/5NnrfopG/YEln2XU8heDh7YqBaiq2SeaKIG3n66ShZPjIolmVuLBQ0BEm3yS8o1mlCLHdW7A==", "dev": true, "dependencies": { "postcss-value-parser": "^4.2.0", - "stylehacks": "^6.0.2" + "stylehacks": "^7.0.0" }, "engines": { - "node": "^14 || ^16 || >=18.0" + "node": "^18.12.0 || ^20.9.0 || >=22.0" }, "peerDependencies": { "postcss": "^8.4.31" } }, "node_modules/postcss-merge-rules": { - "version": "6.0.3", - "resolved": "https://registry.npmjs.org/postcss-merge-rules/-/postcss-merge-rules-6.0.3.tgz", - "integrity": "sha512-yfkDqSHGohy8sGYIJwBmIGDv4K4/WrJPX355XrxQb/CSsT4Kc/RxDi6akqn5s9bap85AWgv21ArcUWwWdGNSHA==", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/postcss-merge-rules/-/postcss-merge-rules-7.0.0.tgz", + "integrity": "sha512-Zty3VlOsD6VSjBMu6PiHCVpLegtBT/qtZRVBcSeyEZ6q1iU5qTYT0WtEoLRV+YubZZguS5/ycfP+NRiKfjv6aw==", "dev": true, "dependencies": { - "browserslist": "^4.22.2", + "browserslist": "^4.23.0", "caniuse-api": "^3.0.0", - "cssnano-utils": "^4.0.1", - "postcss-selector-parser": "^6.0.15" + "cssnano-utils": "^5.0.0", + "postcss-selector-parser": "^6.0.16" }, "engines": { - "node": "^14 || ^16 || >=18.0" + "node": "^18.12.0 || ^20.9.0 || >=22.0" }, "peerDependencies": { "postcss": "^8.4.31" } }, "node_modules/postcss-minify-font-values": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/postcss-minify-font-values/-/postcss-minify-font-values-6.0.1.tgz", - "integrity": "sha512-tIwmF1zUPoN6xOtA/2FgVk1ZKrLcCvE0dpZLtzyyte0j9zUeB8RTbCqrHZGjJlxOvNWKMYtunLrrl7HPOiR46w==", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/postcss-minify-font-values/-/postcss-minify-font-values-7.0.0.tgz", + "integrity": "sha512-2ckkZtgT0zG8SMc5aoNwtm5234eUx1GGFJKf2b1bSp8UflqaeFzR50lid4PfqVI9NtGqJ2J4Y7fwvnP/u1cQog==", "dev": true, "dependencies": { "postcss-value-parser": "^4.2.0" }, "engines": { - "node": "^14 || ^16 || >=18.0" + "node": "^18.12.0 || ^20.9.0 || >=22.0" }, "peerDependencies": { "postcss": "^8.4.31" } }, "node_modules/postcss-minify-gradients": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/postcss-minify-gradients/-/postcss-minify-gradients-6.0.1.tgz", - "integrity": "sha512-M1RJWVjd6IOLPl1hYiOd5HQHgpp6cvJVLrieQYS9y07Yo8itAr6jaekzJphaJFR0tcg4kRewCk3kna9uHBxn/w==", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/postcss-minify-gradients/-/postcss-minify-gradients-7.0.0.tgz", + "integrity": "sha512-pdUIIdj/C93ryCHew0UgBnL2DtUS3hfFa5XtERrs4x+hmpMYGhbzo6l/Ir5de41O0GaKVpK1ZbDNXSY6GkXvtg==", "dev": true, "dependencies": { - "colord": "^2.9.1", - "cssnano-utils": "^4.0.1", + "colord": "^2.9.3", + "cssnano-utils": "^5.0.0", "postcss-value-parser": "^4.2.0" }, "engines": { - "node": "^14 || ^16 || >=18.0" + "node": "^18.12.0 || ^20.9.0 || >=22.0" }, "peerDependencies": { "postcss": "^8.4.31" } }, "node_modules/postcss-minify-params": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/postcss-minify-params/-/postcss-minify-params-6.0.2.tgz", - "integrity": "sha512-zwQtbrPEBDj+ApELZ6QylLf2/c5zmASoOuA4DzolyVGdV38iR2I5QRMsZcHkcdkZzxpN8RS4cN7LPskOkTwTZw==", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/postcss-minify-params/-/postcss-minify-params-7.0.0.tgz", + "integrity": "sha512-XOJAuX8Q/9GT1sGxlUvaFEe2H9n50bniLZblXXsAT/BwSfFYvzSZeFG7uupwc0KbKpTnflnQ7aMwGzX6JUWliQ==", "dev": true, "dependencies": { - "browserslist": "^4.22.2", - "cssnano-utils": "^4.0.1", + "browserslist": "^4.23.0", + "cssnano-utils": "^5.0.0", "postcss-value-parser": "^4.2.0" }, "engines": { - "node": "^14 || ^16 || >=18.0" + "node": "^18.12.0 || ^20.9.0 || >=22.0" }, "peerDependencies": { "postcss": "^8.4.31" } }, "node_modules/postcss-minify-selectors": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/postcss-minify-selectors/-/postcss-minify-selectors-6.0.2.tgz", - "integrity": "sha512-0b+m+w7OAvZejPQdN2GjsXLv5o0jqYHX3aoV0e7RBKPCsB7TYG5KKWBFhGnB/iP3213Ts8c5H4wLPLMm7z28Sg==", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/postcss-minify-selectors/-/postcss-minify-selectors-7.0.0.tgz", + "integrity": "sha512-f00CExZhD6lNw2vTZbcnmfxVgaVKzUw6IRsIFX3JTT8GdsoABc1WnhhGwL1i8YPJ3sSWw39fv7XPtvLb+3Uitw==", "dev": true, "dependencies": { - "postcss-selector-parser": "^6.0.15" + "postcss-selector-parser": "^6.0.16" }, "engines": { - "node": "^14 || ^16 || >=18.0" + "node": "^18.12.0 || ^20.9.0 || >=22.0" }, "peerDependencies": { "postcss": "^8.4.31" @@ -14127,189 +14131,189 @@ } }, "node_modules/postcss-normalize-charset": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/postcss-normalize-charset/-/postcss-normalize-charset-6.0.1.tgz", - "integrity": "sha512-aW5LbMNRZ+oDV57PF9K+WI1Z8MPnF+A8qbajg/T8PP126YrGX1f9IQx21GI2OlGz7XFJi/fNi0GTbY948XJtXg==", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/postcss-normalize-charset/-/postcss-normalize-charset-7.0.0.tgz", + "integrity": "sha512-ABisNUXMeZeDNzCQxPxBCkXexvBrUHV+p7/BXOY+ulxkcjUZO0cp8ekGBwvIh2LbCwnWbyMPNJVtBSdyhM2zYQ==", "dev": true, "engines": { - "node": "^14 || ^16 || >=18.0" + "node": "^18.12.0 || ^20.9.0 || >=22.0" }, "peerDependencies": { "postcss": "^8.4.31" } }, "node_modules/postcss-normalize-display-values": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/postcss-normalize-display-values/-/postcss-normalize-display-values-6.0.1.tgz", - "integrity": "sha512-mc3vxp2bEuCb4LgCcmG1y6lKJu1Co8T+rKHrcbShJwUmKJiEl761qb/QQCfFwlrvSeET3jksolCR/RZuMURudw==", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/postcss-normalize-display-values/-/postcss-normalize-display-values-7.0.0.tgz", + "integrity": "sha512-lnFZzNPeDf5uGMPYgGOw7v0BfB45+irSRz9gHQStdkkhiM0gTfvWkWB5BMxpn0OqgOQuZG/mRlZyJxp0EImr2Q==", "dev": true, "dependencies": { "postcss-value-parser": "^4.2.0" }, "engines": { - "node": "^14 || ^16 || >=18.0" + "node": "^18.12.0 || ^20.9.0 || >=22.0" }, "peerDependencies": { "postcss": "^8.4.31" } }, "node_modules/postcss-normalize-positions": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/postcss-normalize-positions/-/postcss-normalize-positions-6.0.1.tgz", - "integrity": "sha512-HRsq8u/0unKNvm0cvwxcOUEcakFXqZ41fv3FOdPn916XFUrympjr+03oaLkuZENz3HE9RrQE9yU0Xv43ThWjQg==", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/postcss-normalize-positions/-/postcss-normalize-positions-7.0.0.tgz", + "integrity": "sha512-I0yt8wX529UKIGs2y/9Ybs2CelSvItfmvg/DBIjTnoUSrPxSV7Z0yZ8ShSVtKNaV/wAY+m7bgtyVQLhB00A1NQ==", "dev": true, "dependencies": { "postcss-value-parser": "^4.2.0" }, "engines": { - "node": "^14 || ^16 || >=18.0" + "node": "^18.12.0 || ^20.9.0 || >=22.0" }, "peerDependencies": { "postcss": "^8.4.31" } }, "node_modules/postcss-normalize-repeat-style": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/postcss-normalize-repeat-style/-/postcss-normalize-repeat-style-6.0.1.tgz", - "integrity": "sha512-Gbb2nmCy6tTiA7Sh2MBs3fj9W8swonk6lw+dFFeQT68B0Pzwp1kvisJQkdV6rbbMSd9brMlS8I8ts52tAGWmGQ==", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/postcss-normalize-repeat-style/-/postcss-normalize-repeat-style-7.0.0.tgz", + "integrity": "sha512-o3uSGYH+2q30ieM3ppu9GTjSXIzOrRdCUn8UOMGNw7Af61bmurHTWI87hRybrP6xDHvOe5WlAj3XzN6vEO8jLw==", "dev": true, "dependencies": { "postcss-value-parser": "^4.2.0" }, "engines": { - "node": "^14 || ^16 || >=18.0" + "node": "^18.12.0 || ^20.9.0 || >=22.0" }, "peerDependencies": { "postcss": "^8.4.31" } }, "node_modules/postcss-normalize-string": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/postcss-normalize-string/-/postcss-normalize-string-6.0.1.tgz", - "integrity": "sha512-5Fhx/+xzALJD9EI26Aq23hXwmv97Zfy2VFrt5PLT8lAhnBIZvmaT5pQk+NuJ/GWj/QWaKSKbnoKDGLbV6qnhXg==", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/postcss-normalize-string/-/postcss-normalize-string-7.0.0.tgz", + "integrity": "sha512-w/qzL212DFVOpMy3UGyxrND+Kb0fvCiBBujiaONIihq7VvtC7bswjWgKQU/w4VcRyDD8gpfqUiBQ4DUOwEJ6Qg==", "dev": true, "dependencies": { "postcss-value-parser": "^4.2.0" }, "engines": { - "node": "^14 || ^16 || >=18.0" + "node": "^18.12.0 || ^20.9.0 || >=22.0" }, "peerDependencies": { "postcss": "^8.4.31" } }, "node_modules/postcss-normalize-timing-functions": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/postcss-normalize-timing-functions/-/postcss-normalize-timing-functions-6.0.1.tgz", - "integrity": "sha512-4zcczzHqmCU7L5dqTB9rzeqPWRMc0K2HoR+Bfl+FSMbqGBUcP5LRfgcH4BdRtLuzVQK1/FHdFoGT3F7rkEnY+g==", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/postcss-normalize-timing-functions/-/postcss-normalize-timing-functions-7.0.0.tgz", + "integrity": "sha512-tNgw3YV0LYoRwg43N3lTe3AEWZ66W7Dh7lVEpJbHoKOuHc1sLrzMLMFjP8SNULHaykzsonUEDbKedv8C+7ej6g==", "dev": true, "dependencies": { "postcss-value-parser": "^4.2.0" }, "engines": { - "node": "^14 || ^16 || >=18.0" + "node": "^18.12.0 || ^20.9.0 || >=22.0" }, "peerDependencies": { "postcss": "^8.4.31" } }, "node_modules/postcss-normalize-unicode": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/postcss-normalize-unicode/-/postcss-normalize-unicode-6.0.2.tgz", - "integrity": "sha512-Ff2VdAYCTGyMUwpevTZPZ4w0+mPjbZzLLyoLh/RMpqUqeQKZ+xMm31hkxBavDcGKcxm6ACzGk0nBfZ8LZkStKA==", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/postcss-normalize-unicode/-/postcss-normalize-unicode-7.0.0.tgz", + "integrity": "sha512-OnKV52/VFFDAim4n0pdI+JAhsolLBdnCKxE6VV5lW5Q/JeVGFN8UM8ur6/A3EAMLsT1ZRm3fDHh/rBoBQpqi2w==", "dev": true, "dependencies": { - "browserslist": "^4.22.2", + "browserslist": "^4.23.0", "postcss-value-parser": "^4.2.0" }, "engines": { - "node": "^14 || ^16 || >=18.0" + "node": "^18.12.0 || ^20.9.0 || >=22.0" }, "peerDependencies": { "postcss": "^8.4.31" } }, "node_modules/postcss-normalize-url": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/postcss-normalize-url/-/postcss-normalize-url-6.0.1.tgz", - "integrity": "sha512-jEXL15tXSvbjm0yzUV7FBiEXwhIa9H88JOXDGQzmcWoB4mSjZIsmtto066s2iW9FYuIrIF4k04HA2BKAOpbsaQ==", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/postcss-normalize-url/-/postcss-normalize-url-7.0.0.tgz", + "integrity": "sha512-+d7+PpE+jyPX1hDQZYG+NaFD+Nd2ris6r8fPTBAjE8z/U41n/bib3vze8x7rKs5H1uEw5ppe9IojewouHk0klQ==", "dev": true, "dependencies": { "postcss-value-parser": "^4.2.0" }, "engines": { - "node": "^14 || ^16 || >=18.0" + "node": "^18.12.0 || ^20.9.0 || >=22.0" }, "peerDependencies": { "postcss": "^8.4.31" } }, "node_modules/postcss-normalize-whitespace": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/postcss-normalize-whitespace/-/postcss-normalize-whitespace-6.0.1.tgz", - "integrity": "sha512-76i3NpWf6bB8UHlVuLRxG4zW2YykF9CTEcq/9LGAiz2qBuX5cBStadkk0jSkg9a9TCIXbMQz7yzrygKoCW9JuA==", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/postcss-normalize-whitespace/-/postcss-normalize-whitespace-7.0.0.tgz", + "integrity": "sha512-37/toN4wwZErqohedXYqWgvcHUGlT8O/m2jVkAfAe9Bd4MzRqlBmXrJRePH0e9Wgnz2X7KymTgTOaaFizQe3AQ==", "dev": true, "dependencies": { "postcss-value-parser": "^4.2.0" }, "engines": { - "node": "^14 || ^16 || >=18.0" + "node": "^18.12.0 || ^20.9.0 || >=22.0" }, "peerDependencies": { "postcss": "^8.4.31" } }, "node_modules/postcss-ordered-values": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/postcss-ordered-values/-/postcss-ordered-values-6.0.1.tgz", - "integrity": "sha512-XXbb1O/MW9HdEhnBxitZpPFbIvDgbo9NK4c/5bOfiKpnIGZDoL2xd7/e6jW5DYLsWxBbs+1nZEnVgnjnlFViaA==", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/postcss-ordered-values/-/postcss-ordered-values-7.0.0.tgz", + "integrity": "sha512-KROvC63A8UQW1eYDljQe1dtwc1E/M+mMwDT6z7khV/weHYLWTghaLRLunU7x1xw85lWFwVZOAGakxekYvKV+0w==", "dev": true, "dependencies": { - "cssnano-utils": "^4.0.1", + "cssnano-utils": "^5.0.0", "postcss-value-parser": "^4.2.0" }, "engines": { - "node": "^14 || ^16 || >=18.0" + "node": "^18.12.0 || ^20.9.0 || >=22.0" }, "peerDependencies": { "postcss": "^8.4.31" } }, "node_modules/postcss-reduce-initial": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/postcss-reduce-initial/-/postcss-reduce-initial-6.0.2.tgz", - "integrity": "sha512-YGKalhNlCLcjcLvjU5nF8FyeCTkCO5UtvJEt0hrPZVCTtRLSOH4z00T1UntQPj4dUmIYZgMj8qK77JbSX95hSw==", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/postcss-reduce-initial/-/postcss-reduce-initial-7.0.0.tgz", + "integrity": "sha512-iqGgmBxY9LrblZ0BKLjmrA1mC/cf9A/wYCCqSmD6tMi+xAyVl0+DfixZIHSVDMbCPRPjNmVF0DFGth/IDGelFQ==", "dev": true, "dependencies": { - "browserslist": "^4.22.2", + "browserslist": "^4.23.0", "caniuse-api": "^3.0.0" }, "engines": { - "node": "^14 || ^16 || >=18.0" + "node": "^18.12.0 || ^20.9.0 || >=22.0" }, "peerDependencies": { "postcss": "^8.4.31" } }, "node_modules/postcss-reduce-transforms": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/postcss-reduce-transforms/-/postcss-reduce-transforms-6.0.1.tgz", - "integrity": "sha512-fUbV81OkUe75JM+VYO1gr/IoA2b/dRiH6HvMwhrIBSUrxq3jNZQZitSnugcTLDi1KkQh1eR/zi+iyxviUNBkcQ==", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/postcss-reduce-transforms/-/postcss-reduce-transforms-7.0.0.tgz", + "integrity": "sha512-pnt1HKKZ07/idH8cpATX/ujMbtOGhUfE+m8gbqwJE05aTaNw8gbo34a2e3if0xc0dlu75sUOiqvwCGY3fzOHew==", "dev": true, "dependencies": { "postcss-value-parser": "^4.2.0" }, "engines": { - "node": "^14 || ^16 || >=18.0" + "node": "^18.12.0 || ^20.9.0 || >=22.0" }, "peerDependencies": { "postcss": "^8.4.31" } }, "node_modules/postcss-selector-parser": { - "version": "6.0.15", - "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-6.0.15.tgz", - "integrity": "sha512-rEYkQOMUCEMhsKbK66tbEU9QVIxbhN18YiniAwA7XQYTVBqrBy+P2p5JcdqsHgKM2zWylp8d7J6eszocfds5Sw==", + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-6.1.0.tgz", + "integrity": "sha512-UMz42UD0UY0EApS0ZL9o1XnLhSTtvvvLe5Dc2H2O56fvRZi+KulDyf5ctDhhtYJBGKStV2FL1fy6253cmLgqVQ==", "dev": true, "dependencies": { "cssesc": "^3.0.0", @@ -14320,31 +14324,31 @@ } }, "node_modules/postcss-svgo": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/postcss-svgo/-/postcss-svgo-6.0.2.tgz", - "integrity": "sha512-IH5R9SjkTkh0kfFOQDImyy1+mTCb+E830+9SV1O+AaDcoHTvfsvt6WwJeo7KwcHbFnevZVCsXhDmjFiGVuwqFQ==", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/postcss-svgo/-/postcss-svgo-7.0.0.tgz", + "integrity": "sha512-Xj5DRdvA97yRy3wjbCH2NKXtDUwEnph6EHr5ZXszsBVKCNrKXYBjzAXqav7/Afz5WwJ/1peZoTguCEJIg7ytmA==", "dev": true, "dependencies": { "postcss-value-parser": "^4.2.0", "svgo": "^3.2.0" }, "engines": { - "node": "^14 || ^16 || >= 18" + "node": "^18.12.0 || ^20.9.0 || >= 18" }, "peerDependencies": { "postcss": "^8.4.31" } }, "node_modules/postcss-unique-selectors": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/postcss-unique-selectors/-/postcss-unique-selectors-6.0.2.tgz", - "integrity": "sha512-8IZGQ94nechdG7Y9Sh9FlIY2b4uS8/k8kdKRX040XHsS3B6d1HrJAkXrBSsSu4SuARruSsUjW3nlSw8BHkaAYQ==", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/postcss-unique-selectors/-/postcss-unique-selectors-7.0.0.tgz", + "integrity": "sha512-NYFqcft7vVQMZlQPsMdMPy+qU/zDpy95Malpw4GeA9ZZjM6dVXDshXtDmLc0m4WCD6XeZCJqjTfPT1USsdt+rA==", "dev": true, "dependencies": { - "postcss-selector-parser": "^6.0.15" + "postcss-selector-parser": "^6.0.16" }, "engines": { - "node": "^14 || ^16 || >=18.0" + "node": "^18.12.0 || ^20.9.0 || >=22.0" }, "peerDependencies": { "postcss": "^8.4.31" @@ -16036,16 +16040,16 @@ } }, "node_modules/stylehacks": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/stylehacks/-/stylehacks-6.0.2.tgz", - "integrity": "sha512-00zvJGnCu64EpMjX8b5iCZ3us2Ptyw8+toEkb92VdmkEaRaSGBNKAoK6aWZckhXxmQP8zWiTaFaiMGIU8Ve8sg==", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/stylehacks/-/stylehacks-7.0.0.tgz", + "integrity": "sha512-47Nw4pQ6QJb4CA6dzF2m9810sjQik4dfk4UwAm5wlwhrW3syzZKF8AR4/cfO3Cr6lsFgAoznQq0Wg57qhjTA2A==", "dev": true, "dependencies": { - "browserslist": "^4.22.2", - "postcss-selector-parser": "^6.0.15" + "browserslist": "^4.23.0", + "postcss-selector-parser": "^6.0.16" }, "engines": { - "node": "^14 || ^16 || >=18.0" + "node": "^18.12.0 || ^20.9.0 || >=22.0" }, "peerDependencies": { "postcss": "^8.4.31" @@ -16074,9 +16078,9 @@ } }, "node_modules/svgo": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/svgo/-/svgo-3.2.0.tgz", - "integrity": "sha512-4PP6CMW/V7l/GmKRKzsLR8xxjdHTV4IMvhTnpuHwwBazSIlw5W/5SmPjN8Dwyt7lKbSJrRDgp4t9ph0HgChFBQ==", + "version": "3.3.2", + "resolved": "https://registry.npmjs.org/svgo/-/svgo-3.3.2.tgz", + "integrity": "sha512-OoohrmuUlBs8B8o6MB2Aevn+pRIH9zDALSR+6hhqVfa6fRwG/Qw9VUMSMW9VNg2CFc/MTIfabtdOVl9ODIJjpw==", "dev": true, "dependencies": { "@trysound/sax": "0.2.0", @@ -22564,9 +22568,9 @@ "integrity": "sha1-/qJhbcZ2spYmhrOvjb2+GAskTgU=" }, "css-declaration-sorter": { - "version": "7.1.1", - "resolved": "https://registry.npmjs.org/css-declaration-sorter/-/css-declaration-sorter-7.1.1.tgz", - "integrity": "sha512-dZ3bVTEEc1vxr3Bek9vGwfB5Z6ESPULhcRvO472mfjVnj8jRcTnKO8/JTczlvxM10Myb+wBM++1MtdO76eWcaQ==", + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/css-declaration-sorter/-/css-declaration-sorter-7.2.0.tgz", + "integrity": "sha512-h70rUM+3PNFuaBDTLe8wF/cdWu+dOZmb7pJt8Z2sedYbAcQVQV/tEchueg3GWxwqS0cxtbxmaHEdkNACqcvsow==", "dev": true, "requires": {} }, @@ -22598,15 +22602,15 @@ } }, "css-minimizer-webpack-plugin": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/css-minimizer-webpack-plugin/-/css-minimizer-webpack-plugin-6.0.0.tgz", - "integrity": "sha512-BLpR9CCDkKvhO3i0oZQgad6v9pCxUuhSc5RT6iUEy9M8hBXi4TJb5vqF2GQ2deqYHmRi3O6IR9hgAZQWg0EBwA==", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/css-minimizer-webpack-plugin/-/css-minimizer-webpack-plugin-7.0.0.tgz", + "integrity": "sha512-niy66jxsQHqO+EYbhPuIhqRQ1mNcNVUHrMnkzzir9kFOERJUaQDDRhh7dKDz33kBpkWMF9M8Vx0QlDbc5AHOsw==", "dev": true, "requires": { - "@jridgewell/trace-mapping": "^0.3.21", - "cssnano": "^6.0.3", + "@jridgewell/trace-mapping": "^0.3.25", + "cssnano": "^7.0.1", "jest-worker": "^29.7.0", - "postcss": "^8.4.33", + "postcss": "^8.4.38", "schema-utils": "^4.2.0", "serialize-javascript": "^6.0.2" }, @@ -22691,56 +22695,57 @@ "integrity": "sha1-xtJnJjKi5cg+AT5oZKQs6N79IK4=" }, "cssnano": { - "version": "6.0.3", - "resolved": "https://registry.npmjs.org/cssnano/-/cssnano-6.0.3.tgz", - "integrity": "sha512-MRq4CIj8pnyZpcI2qs6wswoYoDD1t0aL28n+41c1Ukcpm56m1h6mCexIHBGjfZfnTqtGSSCP4/fB1ovxgjBOiw==", + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/cssnano/-/cssnano-7.0.1.tgz", + "integrity": "sha512-917Mej/4SdI7b55atsli3sU4MOJ9XDoKgnlCtQtXYj8XUFcM3riTuYHyqBBnnskawW+zWwp0KxJzpEUodlpqUg==", "dev": true, "requires": { - "cssnano-preset-default": "^6.0.3", - "lilconfig": "^3.0.0" + "cssnano-preset-default": "^7.0.1", + "lilconfig": "^3.1.1" } }, "cssnano-preset-default": { - "version": "6.0.3", - "resolved": "https://registry.npmjs.org/cssnano-preset-default/-/cssnano-preset-default-6.0.3.tgz", - "integrity": "sha512-4y3H370aZCkT9Ev8P4SO4bZbt+AExeKhh8wTbms/X7OLDo5E7AYUUy6YPxa/uF5Grf+AJwNcCnxKhZynJ6luBA==", - "dev": true, - "requires": { - "css-declaration-sorter": "^7.1.1", - "cssnano-utils": "^4.0.1", - "postcss-calc": "^9.0.1", - "postcss-colormin": "^6.0.2", - "postcss-convert-values": "^6.0.2", - "postcss-discard-comments": "^6.0.1", - "postcss-discard-duplicates": "^6.0.1", - "postcss-discard-empty": "^6.0.1", - "postcss-discard-overridden": "^6.0.1", - "postcss-merge-longhand": "^6.0.2", - "postcss-merge-rules": "^6.0.3", - "postcss-minify-font-values": "^6.0.1", - "postcss-minify-gradients": "^6.0.1", - "postcss-minify-params": "^6.0.2", - "postcss-minify-selectors": "^6.0.2", - "postcss-normalize-charset": "^6.0.1", - "postcss-normalize-display-values": "^6.0.1", - "postcss-normalize-positions": "^6.0.1", - "postcss-normalize-repeat-style": "^6.0.1", - "postcss-normalize-string": "^6.0.1", - "postcss-normalize-timing-functions": "^6.0.1", - "postcss-normalize-unicode": "^6.0.2", - "postcss-normalize-url": "^6.0.1", - "postcss-normalize-whitespace": "^6.0.1", - "postcss-ordered-values": "^6.0.1", - "postcss-reduce-initial": "^6.0.2", - "postcss-reduce-transforms": "^6.0.1", - "postcss-svgo": "^6.0.2", - "postcss-unique-selectors": "^6.0.2" + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/cssnano-preset-default/-/cssnano-preset-default-7.0.1.tgz", + "integrity": "sha512-Fumyr+uZMcjYQeuHssAZxn0cKj3cdQc5GcxkBcmEzISGB+UW9CLNlU4tBOJbJGcPukFDlicG32eFbrc8K9V5pw==", + "dev": true, + "requires": { + "browserslist": "^4.23.0", + "css-declaration-sorter": "^7.2.0", + "cssnano-utils": "^5.0.0", + "postcss-calc": "^10.0.0", + "postcss-colormin": "^7.0.0", + "postcss-convert-values": "^7.0.0", + "postcss-discard-comments": "^7.0.0", + "postcss-discard-duplicates": "^7.0.0", + "postcss-discard-empty": "^7.0.0", + "postcss-discard-overridden": "^7.0.0", + "postcss-merge-longhand": "^7.0.0", + "postcss-merge-rules": "^7.0.0", + "postcss-minify-font-values": "^7.0.0", + "postcss-minify-gradients": "^7.0.0", + "postcss-minify-params": "^7.0.0", + "postcss-minify-selectors": "^7.0.0", + "postcss-normalize-charset": "^7.0.0", + "postcss-normalize-display-values": "^7.0.0", + "postcss-normalize-positions": "^7.0.0", + "postcss-normalize-repeat-style": "^7.0.0", + "postcss-normalize-string": "^7.0.0", + "postcss-normalize-timing-functions": "^7.0.0", + "postcss-normalize-unicode": "^7.0.0", + "postcss-normalize-url": "^7.0.0", + "postcss-normalize-whitespace": "^7.0.0", + "postcss-ordered-values": "^7.0.0", + "postcss-reduce-initial": "^7.0.0", + "postcss-reduce-transforms": "^7.0.0", + "postcss-svgo": "^7.0.0", + "postcss-unique-selectors": "^7.0.0" } }, "cssnano-utils": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/cssnano-utils/-/cssnano-utils-4.0.1.tgz", - "integrity": "sha512-6qQuYDqsGoiXssZ3zct6dcMxiqfT6epy7x4R0TQJadd4LWO3sPR6JH6ZByOvVLoZ6EdwPGgd7+DR1EmX3tiXQQ==", + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/cssnano-utils/-/cssnano-utils-5.0.0.tgz", + "integrity": "sha512-Uij0Xdxc24L6SirFr25MlwC2rCFX6scyUmuKpzI+JQ7cyqDEwD42fJ0xfB3yLfOnRDU5LKGgjQ9FA6LYh76GWQ==", "dev": true, "requires": {} }, @@ -27265,9 +27270,9 @@ } }, "lilconfig": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/lilconfig/-/lilconfig-3.0.0.tgz", - "integrity": "sha512-K2U4W2Ff5ibV7j7ydLr+zLAkIg5JJ4lPn1Ltsdt+Tz/IjQ8buJ55pZAxoP34lqIiwtF9iAvtLv3JGv7CAyAg+g==", + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/lilconfig/-/lilconfig-3.1.1.tgz", + "integrity": "sha512-O18pf7nyvHTckunPWCV1XUNXU1piu01y2b7ATJ0ppkUkk8ocqVWBrYjJBCwHDjD/ZWcfyrA0P4gKhzWGi5EINQ==", "dev": true }, "lines-and-columns": { @@ -28365,62 +28370,62 @@ } }, "postcss-calc": { - "version": "9.0.1", - "resolved": "https://registry.npmjs.org/postcss-calc/-/postcss-calc-9.0.1.tgz", - "integrity": "sha512-TipgjGyzP5QzEhsOZUaIkeO5mKeMFpebWzRogWG/ysonUlnHcq5aJe0jOjpfzUU8PeSaBQnrE8ehR0QA5vs8PQ==", + "version": "10.0.0", + "resolved": "https://registry.npmjs.org/postcss-calc/-/postcss-calc-10.0.0.tgz", + "integrity": "sha512-OmjhudoNTP0QleZCwl1i6NeBwN+5MZbY5ersLZz69mjJiDVv/p57RjRuKDkHeDWr4T+S97wQfsqRTNoDHB2e3g==", "dev": true, "requires": { - "postcss-selector-parser": "^6.0.11", + "postcss-selector-parser": "^6.0.16", "postcss-value-parser": "^4.2.0" } }, "postcss-colormin": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/postcss-colormin/-/postcss-colormin-6.0.2.tgz", - "integrity": "sha512-TXKOxs9LWcdYo5cgmcSHPkyrLAh86hX1ijmyy6J8SbOhyv6ua053M3ZAM/0j44UsnQNIWdl8gb5L7xX2htKeLw==", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/postcss-colormin/-/postcss-colormin-7.0.0.tgz", + "integrity": "sha512-5CN6fqtsEtEtwf3mFV3B4UaZnlYljPpzmGeDB4yCK067PnAtfLe9uX2aFZaEwxHE7HopG5rUkW8gyHrNAesHEg==", "dev": true, "requires": { - "browserslist": "^4.22.2", + "browserslist": "^4.23.0", "caniuse-api": "^3.0.0", - "colord": "^2.9.1", + "colord": "^2.9.3", "postcss-value-parser": "^4.2.0" } }, "postcss-convert-values": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/postcss-convert-values/-/postcss-convert-values-6.0.2.tgz", - "integrity": "sha512-aeBmaTnGQ+NUSVQT8aY0sKyAD/BaLJenEKZ03YK0JnDE1w1Rr8XShoxdal2V2H26xTJKr3v5haByOhJuyT4UYw==", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/postcss-convert-values/-/postcss-convert-values-7.0.0.tgz", + "integrity": "sha512-bMuzDgXBbFbByPgj+/r6va8zNuIDUaIIbvAFgdO1t3zdgJZ77BZvu6dfWyd6gHEJnYzmeVr9ayUsAQL3/qLJ0w==", "dev": true, "requires": { - "browserslist": "^4.22.2", + "browserslist": "^4.23.0", "postcss-value-parser": "^4.2.0" } }, "postcss-discard-comments": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/postcss-discard-comments/-/postcss-discard-comments-6.0.1.tgz", - "integrity": "sha512-f1KYNPtqYLUeZGCHQPKzzFtsHaRuECe6jLakf/RjSRqvF5XHLZnM2+fXLhb8Qh/HBFHs3M4cSLb1k3B899RYIg==", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/postcss-discard-comments/-/postcss-discard-comments-7.0.0.tgz", + "integrity": "sha512-xpSdzRqYmy4YIVmjfGyYXKaI1SRnK6CTr+4Zmvyof8ANwvgfZgGdVtmgAvzh59gJm808mJCWQC9tFN0KF5dEXA==", "dev": true, "requires": {} }, "postcss-discard-duplicates": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/postcss-discard-duplicates/-/postcss-discard-duplicates-6.0.1.tgz", - "integrity": "sha512-1hvUs76HLYR8zkScbwyJ8oJEugfPV+WchpnA+26fpJ7Smzs51CzGBHC32RS03psuX/2l0l0UKh2StzNxOrKCYg==", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/postcss-discard-duplicates/-/postcss-discard-duplicates-7.0.0.tgz", + "integrity": "sha512-bAnSuBop5LpAIUmmOSsuvtKAAKREB6BBIYStWUTGq8oG5q9fClDMMuY8i4UPI/cEcDx2TN+7PMnXYIId20UVDw==", "dev": true, "requires": {} }, "postcss-discard-empty": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/postcss-discard-empty/-/postcss-discard-empty-6.0.1.tgz", - "integrity": "sha512-yitcmKwmVWtNsrrRqGJ7/C0YRy53i0mjexBDQ9zYxDwTWVBgbU4+C9jIZLmQlTDT9zhml+u0OMFJh8+31krmOg==", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/postcss-discard-empty/-/postcss-discard-empty-7.0.0.tgz", + "integrity": "sha512-e+QzoReTZ8IAwhnSdp/++7gBZ/F+nBq9y6PomfwORfP7q9nBpK5AMP64kOt0bA+lShBFbBDcgpJ3X4etHg4lzA==", "dev": true, "requires": {} }, "postcss-discard-overridden": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/postcss-discard-overridden/-/postcss-discard-overridden-6.0.1.tgz", - "integrity": "sha512-qs0ehZMMZpSESbRkw1+inkf51kak6OOzNRaoLd/U7Fatp0aN2HQ1rxGOrJvYcRAN9VpX8kUF13R2ofn8OlvFVA==", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/postcss-discard-overridden/-/postcss-discard-overridden-7.0.0.tgz", + "integrity": "sha512-GmNAzx88u3k2+sBTZrJSDauR0ccpE24omTQCVmaTTZFz1du6AasspjaUPMJ2ud4RslZpoFKyf+6MSPETLojc6w==", "dev": true, "requires": {} }, @@ -28447,65 +28452,65 @@ } }, "postcss-merge-longhand": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/postcss-merge-longhand/-/postcss-merge-longhand-6.0.2.tgz", - "integrity": "sha512-+yfVB7gEM8SrCo9w2lCApKIEzrTKl5yS1F4yGhV3kSim6JzbfLGJyhR1B6X+6vOT0U33Mgx7iv4X9MVWuaSAfw==", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/postcss-merge-longhand/-/postcss-merge-longhand-7.0.0.tgz", + "integrity": "sha512-0X8I4/9+G03X5/5NnrfopG/YEln2XU8heDh7YqBaiq2SeaKIG3n66ShZPjIolmVuLBQ0BEm3yS8o1mlCLHdW7A==", "dev": true, "requires": { "postcss-value-parser": "^4.2.0", - "stylehacks": "^6.0.2" + "stylehacks": "^7.0.0" } }, "postcss-merge-rules": { - "version": "6.0.3", - "resolved": "https://registry.npmjs.org/postcss-merge-rules/-/postcss-merge-rules-6.0.3.tgz", - "integrity": "sha512-yfkDqSHGohy8sGYIJwBmIGDv4K4/WrJPX355XrxQb/CSsT4Kc/RxDi6akqn5s9bap85AWgv21ArcUWwWdGNSHA==", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/postcss-merge-rules/-/postcss-merge-rules-7.0.0.tgz", + "integrity": "sha512-Zty3VlOsD6VSjBMu6PiHCVpLegtBT/qtZRVBcSeyEZ6q1iU5qTYT0WtEoLRV+YubZZguS5/ycfP+NRiKfjv6aw==", "dev": true, "requires": { - "browserslist": "^4.22.2", + "browserslist": "^4.23.0", "caniuse-api": "^3.0.0", - "cssnano-utils": "^4.0.1", - "postcss-selector-parser": "^6.0.15" + "cssnano-utils": "^5.0.0", + "postcss-selector-parser": "^6.0.16" } }, "postcss-minify-font-values": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/postcss-minify-font-values/-/postcss-minify-font-values-6.0.1.tgz", - "integrity": "sha512-tIwmF1zUPoN6xOtA/2FgVk1ZKrLcCvE0dpZLtzyyte0j9zUeB8RTbCqrHZGjJlxOvNWKMYtunLrrl7HPOiR46w==", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/postcss-minify-font-values/-/postcss-minify-font-values-7.0.0.tgz", + "integrity": "sha512-2ckkZtgT0zG8SMc5aoNwtm5234eUx1GGFJKf2b1bSp8UflqaeFzR50lid4PfqVI9NtGqJ2J4Y7fwvnP/u1cQog==", "dev": true, "requires": { "postcss-value-parser": "^4.2.0" } }, "postcss-minify-gradients": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/postcss-minify-gradients/-/postcss-minify-gradients-6.0.1.tgz", - "integrity": "sha512-M1RJWVjd6IOLPl1hYiOd5HQHgpp6cvJVLrieQYS9y07Yo8itAr6jaekzJphaJFR0tcg4kRewCk3kna9uHBxn/w==", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/postcss-minify-gradients/-/postcss-minify-gradients-7.0.0.tgz", + "integrity": "sha512-pdUIIdj/C93ryCHew0UgBnL2DtUS3hfFa5XtERrs4x+hmpMYGhbzo6l/Ir5de41O0GaKVpK1ZbDNXSY6GkXvtg==", "dev": true, "requires": { - "colord": "^2.9.1", - "cssnano-utils": "^4.0.1", + "colord": "^2.9.3", + "cssnano-utils": "^5.0.0", "postcss-value-parser": "^4.2.0" } }, "postcss-minify-params": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/postcss-minify-params/-/postcss-minify-params-6.0.2.tgz", - "integrity": "sha512-zwQtbrPEBDj+ApELZ6QylLf2/c5zmASoOuA4DzolyVGdV38iR2I5QRMsZcHkcdkZzxpN8RS4cN7LPskOkTwTZw==", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/postcss-minify-params/-/postcss-minify-params-7.0.0.tgz", + "integrity": "sha512-XOJAuX8Q/9GT1sGxlUvaFEe2H9n50bniLZblXXsAT/BwSfFYvzSZeFG7uupwc0KbKpTnflnQ7aMwGzX6JUWliQ==", "dev": true, "requires": { - "browserslist": "^4.22.2", - "cssnano-utils": "^4.0.1", + "browserslist": "^4.23.0", + "cssnano-utils": "^5.0.0", "postcss-value-parser": "^4.2.0" } }, "postcss-minify-selectors": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/postcss-minify-selectors/-/postcss-minify-selectors-6.0.2.tgz", - "integrity": "sha512-0b+m+w7OAvZejPQdN2GjsXLv5o0jqYHX3aoV0e7RBKPCsB7TYG5KKWBFhGnB/iP3213Ts8c5H4wLPLMm7z28Sg==", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/postcss-minify-selectors/-/postcss-minify-selectors-7.0.0.tgz", + "integrity": "sha512-f00CExZhD6lNw2vTZbcnmfxVgaVKzUw6IRsIFX3JTT8GdsoABc1WnhhGwL1i8YPJ3sSWw39fv7XPtvLb+3Uitw==", "dev": true, "requires": { - "postcss-selector-parser": "^6.0.15" + "postcss-selector-parser": "^6.0.16" } }, "postcss-modules-extract-imports": { @@ -28545,118 +28550,118 @@ } }, "postcss-normalize-charset": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/postcss-normalize-charset/-/postcss-normalize-charset-6.0.1.tgz", - "integrity": "sha512-aW5LbMNRZ+oDV57PF9K+WI1Z8MPnF+A8qbajg/T8PP126YrGX1f9IQx21GI2OlGz7XFJi/fNi0GTbY948XJtXg==", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/postcss-normalize-charset/-/postcss-normalize-charset-7.0.0.tgz", + "integrity": "sha512-ABisNUXMeZeDNzCQxPxBCkXexvBrUHV+p7/BXOY+ulxkcjUZO0cp8ekGBwvIh2LbCwnWbyMPNJVtBSdyhM2zYQ==", "dev": true, "requires": {} }, "postcss-normalize-display-values": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/postcss-normalize-display-values/-/postcss-normalize-display-values-6.0.1.tgz", - "integrity": "sha512-mc3vxp2bEuCb4LgCcmG1y6lKJu1Co8T+rKHrcbShJwUmKJiEl761qb/QQCfFwlrvSeET3jksolCR/RZuMURudw==", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/postcss-normalize-display-values/-/postcss-normalize-display-values-7.0.0.tgz", + "integrity": "sha512-lnFZzNPeDf5uGMPYgGOw7v0BfB45+irSRz9gHQStdkkhiM0gTfvWkWB5BMxpn0OqgOQuZG/mRlZyJxp0EImr2Q==", "dev": true, "requires": { "postcss-value-parser": "^4.2.0" } }, "postcss-normalize-positions": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/postcss-normalize-positions/-/postcss-normalize-positions-6.0.1.tgz", - "integrity": "sha512-HRsq8u/0unKNvm0cvwxcOUEcakFXqZ41fv3FOdPn916XFUrympjr+03oaLkuZENz3HE9RrQE9yU0Xv43ThWjQg==", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/postcss-normalize-positions/-/postcss-normalize-positions-7.0.0.tgz", + "integrity": "sha512-I0yt8wX529UKIGs2y/9Ybs2CelSvItfmvg/DBIjTnoUSrPxSV7Z0yZ8ShSVtKNaV/wAY+m7bgtyVQLhB00A1NQ==", "dev": true, "requires": { "postcss-value-parser": "^4.2.0" } }, "postcss-normalize-repeat-style": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/postcss-normalize-repeat-style/-/postcss-normalize-repeat-style-6.0.1.tgz", - "integrity": "sha512-Gbb2nmCy6tTiA7Sh2MBs3fj9W8swonk6lw+dFFeQT68B0Pzwp1kvisJQkdV6rbbMSd9brMlS8I8ts52tAGWmGQ==", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/postcss-normalize-repeat-style/-/postcss-normalize-repeat-style-7.0.0.tgz", + "integrity": "sha512-o3uSGYH+2q30ieM3ppu9GTjSXIzOrRdCUn8UOMGNw7Af61bmurHTWI87hRybrP6xDHvOe5WlAj3XzN6vEO8jLw==", "dev": true, "requires": { "postcss-value-parser": "^4.2.0" } }, "postcss-normalize-string": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/postcss-normalize-string/-/postcss-normalize-string-6.0.1.tgz", - "integrity": "sha512-5Fhx/+xzALJD9EI26Aq23hXwmv97Zfy2VFrt5PLT8lAhnBIZvmaT5pQk+NuJ/GWj/QWaKSKbnoKDGLbV6qnhXg==", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/postcss-normalize-string/-/postcss-normalize-string-7.0.0.tgz", + "integrity": "sha512-w/qzL212DFVOpMy3UGyxrND+Kb0fvCiBBujiaONIihq7VvtC7bswjWgKQU/w4VcRyDD8gpfqUiBQ4DUOwEJ6Qg==", "dev": true, "requires": { "postcss-value-parser": "^4.2.0" } }, "postcss-normalize-timing-functions": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/postcss-normalize-timing-functions/-/postcss-normalize-timing-functions-6.0.1.tgz", - "integrity": "sha512-4zcczzHqmCU7L5dqTB9rzeqPWRMc0K2HoR+Bfl+FSMbqGBUcP5LRfgcH4BdRtLuzVQK1/FHdFoGT3F7rkEnY+g==", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/postcss-normalize-timing-functions/-/postcss-normalize-timing-functions-7.0.0.tgz", + "integrity": "sha512-tNgw3YV0LYoRwg43N3lTe3AEWZ66W7Dh7lVEpJbHoKOuHc1sLrzMLMFjP8SNULHaykzsonUEDbKedv8C+7ej6g==", "dev": true, "requires": { "postcss-value-parser": "^4.2.0" } }, "postcss-normalize-unicode": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/postcss-normalize-unicode/-/postcss-normalize-unicode-6.0.2.tgz", - "integrity": "sha512-Ff2VdAYCTGyMUwpevTZPZ4w0+mPjbZzLLyoLh/RMpqUqeQKZ+xMm31hkxBavDcGKcxm6ACzGk0nBfZ8LZkStKA==", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/postcss-normalize-unicode/-/postcss-normalize-unicode-7.0.0.tgz", + "integrity": "sha512-OnKV52/VFFDAim4n0pdI+JAhsolLBdnCKxE6VV5lW5Q/JeVGFN8UM8ur6/A3EAMLsT1ZRm3fDHh/rBoBQpqi2w==", "dev": true, "requires": { - "browserslist": "^4.22.2", + "browserslist": "^4.23.0", "postcss-value-parser": "^4.2.0" } }, "postcss-normalize-url": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/postcss-normalize-url/-/postcss-normalize-url-6.0.1.tgz", - "integrity": "sha512-jEXL15tXSvbjm0yzUV7FBiEXwhIa9H88JOXDGQzmcWoB4mSjZIsmtto066s2iW9FYuIrIF4k04HA2BKAOpbsaQ==", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/postcss-normalize-url/-/postcss-normalize-url-7.0.0.tgz", + "integrity": "sha512-+d7+PpE+jyPX1hDQZYG+NaFD+Nd2ris6r8fPTBAjE8z/U41n/bib3vze8x7rKs5H1uEw5ppe9IojewouHk0klQ==", "dev": true, "requires": { "postcss-value-parser": "^4.2.0" } }, "postcss-normalize-whitespace": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/postcss-normalize-whitespace/-/postcss-normalize-whitespace-6.0.1.tgz", - "integrity": "sha512-76i3NpWf6bB8UHlVuLRxG4zW2YykF9CTEcq/9LGAiz2qBuX5cBStadkk0jSkg9a9TCIXbMQz7yzrygKoCW9JuA==", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/postcss-normalize-whitespace/-/postcss-normalize-whitespace-7.0.0.tgz", + "integrity": "sha512-37/toN4wwZErqohedXYqWgvcHUGlT8O/m2jVkAfAe9Bd4MzRqlBmXrJRePH0e9Wgnz2X7KymTgTOaaFizQe3AQ==", "dev": true, "requires": { "postcss-value-parser": "^4.2.0" } }, "postcss-ordered-values": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/postcss-ordered-values/-/postcss-ordered-values-6.0.1.tgz", - "integrity": "sha512-XXbb1O/MW9HdEhnBxitZpPFbIvDgbo9NK4c/5bOfiKpnIGZDoL2xd7/e6jW5DYLsWxBbs+1nZEnVgnjnlFViaA==", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/postcss-ordered-values/-/postcss-ordered-values-7.0.0.tgz", + "integrity": "sha512-KROvC63A8UQW1eYDljQe1dtwc1E/M+mMwDT6z7khV/weHYLWTghaLRLunU7x1xw85lWFwVZOAGakxekYvKV+0w==", "dev": true, "requires": { - "cssnano-utils": "^4.0.1", + "cssnano-utils": "^5.0.0", "postcss-value-parser": "^4.2.0" } }, "postcss-reduce-initial": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/postcss-reduce-initial/-/postcss-reduce-initial-6.0.2.tgz", - "integrity": "sha512-YGKalhNlCLcjcLvjU5nF8FyeCTkCO5UtvJEt0hrPZVCTtRLSOH4z00T1UntQPj4dUmIYZgMj8qK77JbSX95hSw==", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/postcss-reduce-initial/-/postcss-reduce-initial-7.0.0.tgz", + "integrity": "sha512-iqGgmBxY9LrblZ0BKLjmrA1mC/cf9A/wYCCqSmD6tMi+xAyVl0+DfixZIHSVDMbCPRPjNmVF0DFGth/IDGelFQ==", "dev": true, "requires": { - "browserslist": "^4.22.2", + "browserslist": "^4.23.0", "caniuse-api": "^3.0.0" } }, "postcss-reduce-transforms": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/postcss-reduce-transforms/-/postcss-reduce-transforms-6.0.1.tgz", - "integrity": "sha512-fUbV81OkUe75JM+VYO1gr/IoA2b/dRiH6HvMwhrIBSUrxq3jNZQZitSnugcTLDi1KkQh1eR/zi+iyxviUNBkcQ==", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/postcss-reduce-transforms/-/postcss-reduce-transforms-7.0.0.tgz", + "integrity": "sha512-pnt1HKKZ07/idH8cpATX/ujMbtOGhUfE+m8gbqwJE05aTaNw8gbo34a2e3if0xc0dlu75sUOiqvwCGY3fzOHew==", "dev": true, "requires": { "postcss-value-parser": "^4.2.0" } }, "postcss-selector-parser": { - "version": "6.0.15", - "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-6.0.15.tgz", - "integrity": "sha512-rEYkQOMUCEMhsKbK66tbEU9QVIxbhN18YiniAwA7XQYTVBqrBy+P2p5JcdqsHgKM2zWylp8d7J6eszocfds5Sw==", + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-6.1.0.tgz", + "integrity": "sha512-UMz42UD0UY0EApS0ZL9o1XnLhSTtvvvLe5Dc2H2O56fvRZi+KulDyf5ctDhhtYJBGKStV2FL1fy6253cmLgqVQ==", "dev": true, "requires": { "cssesc": "^3.0.0", @@ -28664,9 +28669,9 @@ } }, "postcss-svgo": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/postcss-svgo/-/postcss-svgo-6.0.2.tgz", - "integrity": "sha512-IH5R9SjkTkh0kfFOQDImyy1+mTCb+E830+9SV1O+AaDcoHTvfsvt6WwJeo7KwcHbFnevZVCsXhDmjFiGVuwqFQ==", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/postcss-svgo/-/postcss-svgo-7.0.0.tgz", + "integrity": "sha512-Xj5DRdvA97yRy3wjbCH2NKXtDUwEnph6EHr5ZXszsBVKCNrKXYBjzAXqav7/Afz5WwJ/1peZoTguCEJIg7ytmA==", "dev": true, "requires": { "postcss-value-parser": "^4.2.0", @@ -28674,12 +28679,12 @@ } }, "postcss-unique-selectors": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/postcss-unique-selectors/-/postcss-unique-selectors-6.0.2.tgz", - "integrity": "sha512-8IZGQ94nechdG7Y9Sh9FlIY2b4uS8/k8kdKRX040XHsS3B6d1HrJAkXrBSsSu4SuARruSsUjW3nlSw8BHkaAYQ==", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/postcss-unique-selectors/-/postcss-unique-selectors-7.0.0.tgz", + "integrity": "sha512-NYFqcft7vVQMZlQPsMdMPy+qU/zDpy95Malpw4GeA9ZZjM6dVXDshXtDmLc0m4WCD6XeZCJqjTfPT1USsdt+rA==", "dev": true, "requires": { - "postcss-selector-parser": "^6.0.15" + "postcss-selector-parser": "^6.0.16" } }, "postcss-value-parser": { @@ -29953,13 +29958,13 @@ } }, "stylehacks": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/stylehacks/-/stylehacks-6.0.2.tgz", - "integrity": "sha512-00zvJGnCu64EpMjX8b5iCZ3us2Ptyw8+toEkb92VdmkEaRaSGBNKAoK6aWZckhXxmQP8zWiTaFaiMGIU8Ve8sg==", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/stylehacks/-/stylehacks-7.0.0.tgz", + "integrity": "sha512-47Nw4pQ6QJb4CA6dzF2m9810sjQik4dfk4UwAm5wlwhrW3syzZKF8AR4/cfO3Cr6lsFgAoznQq0Wg57qhjTA2A==", "dev": true, "requires": { - "browserslist": "^4.22.2", - "postcss-selector-parser": "^6.0.15" + "browserslist": "^4.23.0", + "postcss-selector-parser": "^6.0.16" } }, "supports-color": { @@ -29976,9 +29981,9 @@ "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==" }, "svgo": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/svgo/-/svgo-3.2.0.tgz", - "integrity": "sha512-4PP6CMW/V7l/GmKRKzsLR8xxjdHTV4IMvhTnpuHwwBazSIlw5W/5SmPjN8Dwyt7lKbSJrRDgp4t9ph0HgChFBQ==", + "version": "3.3.2", + "resolved": "https://registry.npmjs.org/svgo/-/svgo-3.3.2.tgz", + "integrity": "sha512-OoohrmuUlBs8B8o6MB2Aevn+pRIH9zDALSR+6hhqVfa6fRwG/Qw9VUMSMW9VNg2CFc/MTIfabtdOVl9ODIJjpw==", "dev": true, "requires": { "@trysound/sax": "0.2.0", diff --git a/apps/block_scout_web/assets/package.json b/apps/block_scout_web/assets/package.json index a795b72d4a40..081bf04dad21 100644 --- a/apps/block_scout_web/assets/package.json +++ b/apps/block_scout_web/assets/package.json @@ -89,7 +89,7 @@ "babel-loader": "^9.1.3", "copy-webpack-plugin": "^12.0.2", "css-loader": "^7.1.1", - "css-minimizer-webpack-plugin": "^6.0.0", + "css-minimizer-webpack-plugin": "^7.0.0", "eslint": "^8.57.0", "eslint-config-standard": "^17.1.0", "eslint-plugin-import": "^2.29.1", From ba402c1e998777f88a4c4525f7f656fb6c464936 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 3 Jun 2024 12:09:59 +0300 Subject: [PATCH 051/150] chore(deps): bump solc from 0.8.25 to 0.8.26 in /apps/explorer (#10177) Bumps [solc](https://github.com/ethereum/solc-js) from 0.8.25 to 0.8.26. - [Commits](https://github.com/ethereum/solc-js/compare/v0.8.25...v0.8.26) --- updated-dependencies: - dependency-name: solc dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- apps/explorer/package-lock.json | 14 +++++++------- apps/explorer/package.json | 2 +- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/apps/explorer/package-lock.json b/apps/explorer/package-lock.json index 372f46363c8a..e62ea298b99f 100644 --- a/apps/explorer/package-lock.json +++ b/apps/explorer/package-lock.json @@ -7,7 +7,7 @@ "name": "blockscout", "license": "GPL-3.0", "dependencies": { - "solc": "0.8.25" + "solc": "0.8.26" }, "engines": { "node": "18.x", @@ -76,9 +76,9 @@ } }, "node_modules/solc": { - "version": "0.8.25", - "resolved": "https://registry.npmjs.org/solc/-/solc-0.8.25.tgz", - "integrity": "sha512-7P0TF8gPeudl1Ko3RGkyY6XVCxe2SdD/qQhtns1vl3yAbK/PDifKDLHGtx1t7mX3LgR7ojV7Fg/Kc6Q9D2T8UQ==", + "version": "0.8.26", + "resolved": "https://registry.npmjs.org/solc/-/solc-0.8.26.tgz", + "integrity": "sha512-yiPQNVf5rBFHwN6SIf3TUUvVAFKcQqmSUFeq+fb6pNRCo0ZCgpYOZDi3BVoezCPIAcKrVYd/qXlBLUP9wVrZ9g==", "dependencies": { "command-exists": "^1.2.8", "commander": "^8.1.0", @@ -144,9 +144,9 @@ "integrity": "sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g==" }, "solc": { - "version": "0.8.25", - "resolved": "https://registry.npmjs.org/solc/-/solc-0.8.25.tgz", - "integrity": "sha512-7P0TF8gPeudl1Ko3RGkyY6XVCxe2SdD/qQhtns1vl3yAbK/PDifKDLHGtx1t7mX3LgR7ojV7Fg/Kc6Q9D2T8UQ==", + "version": "0.8.26", + "resolved": "https://registry.npmjs.org/solc/-/solc-0.8.26.tgz", + "integrity": "sha512-yiPQNVf5rBFHwN6SIf3TUUvVAFKcQqmSUFeq+fb6pNRCo0ZCgpYOZDi3BVoezCPIAcKrVYd/qXlBLUP9wVrZ9g==", "requires": { "command-exists": "^1.2.8", "commander": "^8.1.0", diff --git a/apps/explorer/package.json b/apps/explorer/package.json index c17256626e88..effecd6187bd 100644 --- a/apps/explorer/package.json +++ b/apps/explorer/package.json @@ -13,6 +13,6 @@ }, "scripts": {}, "dependencies": { - "solc": "0.8.25" + "solc": "0.8.26" } } From f3092afa96355f523c6baa2e9fb11f793c682763 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 3 Jun 2024 12:38:00 +0300 Subject: [PATCH 052/150] chore(deps): bump @amplitude/analytics-browser (#10176) Bumps [@amplitude/analytics-browser](https://github.com/amplitude/Amplitude-TypeScript) from 2.7.0 to 2.8.1. - [Release notes](https://github.com/amplitude/Amplitude-TypeScript/releases) - [Commits](https://github.com/amplitude/Amplitude-TypeScript/compare/@amplitude/analytics-browser@2.7.0...@amplitude/analytics-browser@2.8.1) --- updated-dependencies: - dependency-name: "@amplitude/analytics-browser" dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- apps/block_scout_web/assets/package-lock.json | 102 +++++++++--------- apps/block_scout_web/assets/package.json | 2 +- 2 files changed, 52 insertions(+), 52 deletions(-) diff --git a/apps/block_scout_web/assets/package-lock.json b/apps/block_scout_web/assets/package-lock.json index 97b1d2d21be2..d3f418dcb6c2 100644 --- a/apps/block_scout_web/assets/package-lock.json +++ b/apps/block_scout_web/assets/package-lock.json @@ -7,7 +7,7 @@ "name": "blockscout", "license": "GPL-3.0", "dependencies": { - "@amplitude/analytics-browser": "^2.7.0", + "@amplitude/analytics-browser": "^2.8.1", "@fortawesome/fontawesome-free": "^6.5.2", "@tarekraafat/autocomplete.js": "^10.2.7", "@walletconnect/web3-provider": "^1.8.0", @@ -116,14 +116,14 @@ } }, "node_modules/@amplitude/analytics-browser": { - "version": "2.7.0", - "resolved": "https://registry.npmjs.org/@amplitude/analytics-browser/-/analytics-browser-2.7.0.tgz", - "integrity": "sha512-dUsNo7tIVIylfKIdhkLu6gViG9sOIxWDkiMnxhd/gf8faUK6J5JW+rtHnlyfifKMcH65yXvN1Hg6Q9B2BRs87A==", - "dependencies": { - "@amplitude/analytics-client-common": "^2.1.4", - "@amplitude/analytics-core": "^2.2.5", - "@amplitude/analytics-types": "^2.5.0", - "@amplitude/plugin-page-view-tracking-browser": "^2.2.7", + "version": "2.8.1", + "resolved": "https://registry.npmjs.org/@amplitude/analytics-browser/-/analytics-browser-2.8.1.tgz", + "integrity": "sha512-gvCvGQUbHjrmQQma5qBDQo25TD/p+TCoWX20XfGJC0rR51MCWacbYQZ6CQrECXte7c6XNzc11hC6Kf50bO0Sxw==", + "dependencies": { + "@amplitude/analytics-client-common": "^2.2.1", + "@amplitude/analytics-core": "^2.2.8", + "@amplitude/analytics-types": "^2.5.1", + "@amplitude/plugin-page-view-tracking-browser": "^2.2.13", "tslib": "^2.4.1" } }, @@ -133,13 +133,13 @@ "integrity": "sha512-tGyy4dAjRIEwI7BzsB0lynWgOpfqjUdq91XXAlIWD2OwKBH7oCl/GZG/HT4BOHrTlPMOASlMQ7veyTqpmRcrNA==" }, "node_modules/@amplitude/analytics-client-common": { - "version": "2.1.4", - "resolved": "https://registry.npmjs.org/@amplitude/analytics-client-common/-/analytics-client-common-2.1.4.tgz", - "integrity": "sha512-v1XhIkX23/qu49F8xqQMJSN9uBdV0JnoYV1M9TP3TERicYK5dHIPX2q5vFnnPwGl6D7fxm/80MgpA6hwVf4MrA==", + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/@amplitude/analytics-client-common/-/analytics-client-common-2.2.1.tgz", + "integrity": "sha512-JXpLjKgP7LiBMZF5eMjtiPiSPW8FxvF4bHubDg26Ck2buBUQvrBq6D4VyOky0m1zG7WA4vOoLANu3sWfF02/7Q==", "dependencies": { "@amplitude/analytics-connector": "^1.4.8", - "@amplitude/analytics-core": "^2.2.5", - "@amplitude/analytics-types": "^2.5.0", + "@amplitude/analytics-core": "^2.2.8", + "@amplitude/analytics-types": "^2.5.1", "tslib": "^2.4.1" } }, @@ -154,11 +154,11 @@ "integrity": "sha512-T8mOYzB9RRxckzhL0NTHwdge9xuFxXEOplC8B1Y3UX3NHa3BLh7DlBUZlCOwQgMc2nxDfnSweDL5S3bhC+W90g==" }, "node_modules/@amplitude/analytics-core": { - "version": "2.2.5", - "resolved": "https://registry.npmjs.org/@amplitude/analytics-core/-/analytics-core-2.2.5.tgz", - "integrity": "sha512-GNgZoY8515TODx+Pc+IN/g/71umwjsZcCkM0isMV8dZZfh0gCd2IBhg7GLeQoAVnFzQh0+vr4Z3Agdm2RytA6w==", + "version": "2.2.8", + "resolved": "https://registry.npmjs.org/@amplitude/analytics-core/-/analytics-core-2.2.8.tgz", + "integrity": "sha512-T4ZFk1LUD+4z02XMSWK4qPvMREUTgNtEoYQc6BvNyfOx1Ih21qyJLFcdSw05gL26JuuuYZWFkpHxxKm8IoZh8Q==", "dependencies": { - "@amplitude/analytics-types": "^2.5.0", + "@amplitude/analytics-types": "^2.5.1", "tslib": "^2.4.1" } }, @@ -168,17 +168,17 @@ "integrity": "sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q==" }, "node_modules/@amplitude/analytics-types": { - "version": "2.5.0", - "resolved": "https://registry.npmjs.org/@amplitude/analytics-types/-/analytics-types-2.5.0.tgz", - "integrity": "sha512-aY69WxUvVlaCU+9geShjTsAYdUTvegEXH9i4WK/97kNbNLl4/7qUuIPe4hNireDeKLuQA9SA3H7TKynuNomDxw==" + "version": "2.5.1", + "resolved": "https://registry.npmjs.org/@amplitude/analytics-types/-/analytics-types-2.5.1.tgz", + "integrity": "sha512-xGuLiHRLv5z3RSAHiICjzHK4TjUZJ7aHr/jM0XhSgD/V1YdmG/s8y1UAWGI2Stsxm2x0DWNOyEpjYkGohlnXtA==" }, "node_modules/@amplitude/plugin-page-view-tracking-browser": { - "version": "2.2.7", - "resolved": "https://registry.npmjs.org/@amplitude/plugin-page-view-tracking-browser/-/plugin-page-view-tracking-browser-2.2.7.tgz", - "integrity": "sha512-UF/Oyqxk29ZAW2VlguSL4oIPbtDqsO7GZsfnnPpBXGwB/CWegB7FtAreK4n8HHMDA/mgt+YS9I9ZJmfq26rIBA==", + "version": "2.2.13", + "resolved": "https://registry.npmjs.org/@amplitude/plugin-page-view-tracking-browser/-/plugin-page-view-tracking-browser-2.2.13.tgz", + "integrity": "sha512-EYvG4RTyNqIGlq3J+UnrTRRj75xp3nOSETeInPQbnHhiPaCzxTjWrb9URkzy2IYSyBSn4KEQkZegmzhWXxlvvw==", "dependencies": { - "@amplitude/analytics-client-common": "^2.1.4", - "@amplitude/analytics-types": "^2.5.0", + "@amplitude/analytics-client-common": "^2.2.1", + "@amplitude/analytics-types": "^2.5.1", "tslib": "^2.4.1" } }, @@ -17972,14 +17972,14 @@ "dev": true }, "@amplitude/analytics-browser": { - "version": "2.7.0", - "resolved": "https://registry.npmjs.org/@amplitude/analytics-browser/-/analytics-browser-2.7.0.tgz", - "integrity": "sha512-dUsNo7tIVIylfKIdhkLu6gViG9sOIxWDkiMnxhd/gf8faUK6J5JW+rtHnlyfifKMcH65yXvN1Hg6Q9B2BRs87A==", - "requires": { - "@amplitude/analytics-client-common": "^2.1.4", - "@amplitude/analytics-core": "^2.2.5", - "@amplitude/analytics-types": "^2.5.0", - "@amplitude/plugin-page-view-tracking-browser": "^2.2.7", + "version": "2.8.1", + "resolved": "https://registry.npmjs.org/@amplitude/analytics-browser/-/analytics-browser-2.8.1.tgz", + "integrity": "sha512-gvCvGQUbHjrmQQma5qBDQo25TD/p+TCoWX20XfGJC0rR51MCWacbYQZ6CQrECXte7c6XNzc11hC6Kf50bO0Sxw==", + "requires": { + "@amplitude/analytics-client-common": "^2.2.1", + "@amplitude/analytics-core": "^2.2.8", + "@amplitude/analytics-types": "^2.5.1", + "@amplitude/plugin-page-view-tracking-browser": "^2.2.13", "tslib": "^2.4.1" }, "dependencies": { @@ -17991,13 +17991,13 @@ } }, "@amplitude/analytics-client-common": { - "version": "2.1.4", - "resolved": "https://registry.npmjs.org/@amplitude/analytics-client-common/-/analytics-client-common-2.1.4.tgz", - "integrity": "sha512-v1XhIkX23/qu49F8xqQMJSN9uBdV0JnoYV1M9TP3TERicYK5dHIPX2q5vFnnPwGl6D7fxm/80MgpA6hwVf4MrA==", + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/@amplitude/analytics-client-common/-/analytics-client-common-2.2.1.tgz", + "integrity": "sha512-JXpLjKgP7LiBMZF5eMjtiPiSPW8FxvF4bHubDg26Ck2buBUQvrBq6D4VyOky0m1zG7WA4vOoLANu3sWfF02/7Q==", "requires": { "@amplitude/analytics-connector": "^1.4.8", - "@amplitude/analytics-core": "^2.2.5", - "@amplitude/analytics-types": "^2.5.0", + "@amplitude/analytics-core": "^2.2.8", + "@amplitude/analytics-types": "^2.5.1", "tslib": "^2.4.1" }, "dependencies": { @@ -18014,11 +18014,11 @@ "integrity": "sha512-T8mOYzB9RRxckzhL0NTHwdge9xuFxXEOplC8B1Y3UX3NHa3BLh7DlBUZlCOwQgMc2nxDfnSweDL5S3bhC+W90g==" }, "@amplitude/analytics-core": { - "version": "2.2.5", - "resolved": "https://registry.npmjs.org/@amplitude/analytics-core/-/analytics-core-2.2.5.tgz", - "integrity": "sha512-GNgZoY8515TODx+Pc+IN/g/71umwjsZcCkM0isMV8dZZfh0gCd2IBhg7GLeQoAVnFzQh0+vr4Z3Agdm2RytA6w==", + "version": "2.2.8", + "resolved": "https://registry.npmjs.org/@amplitude/analytics-core/-/analytics-core-2.2.8.tgz", + "integrity": "sha512-T4ZFk1LUD+4z02XMSWK4qPvMREUTgNtEoYQc6BvNyfOx1Ih21qyJLFcdSw05gL26JuuuYZWFkpHxxKm8IoZh8Q==", "requires": { - "@amplitude/analytics-types": "^2.5.0", + "@amplitude/analytics-types": "^2.5.1", "tslib": "^2.4.1" }, "dependencies": { @@ -18030,17 +18030,17 @@ } }, "@amplitude/analytics-types": { - "version": "2.5.0", - "resolved": "https://registry.npmjs.org/@amplitude/analytics-types/-/analytics-types-2.5.0.tgz", - "integrity": "sha512-aY69WxUvVlaCU+9geShjTsAYdUTvegEXH9i4WK/97kNbNLl4/7qUuIPe4hNireDeKLuQA9SA3H7TKynuNomDxw==" + "version": "2.5.1", + "resolved": "https://registry.npmjs.org/@amplitude/analytics-types/-/analytics-types-2.5.1.tgz", + "integrity": "sha512-xGuLiHRLv5z3RSAHiICjzHK4TjUZJ7aHr/jM0XhSgD/V1YdmG/s8y1UAWGI2Stsxm2x0DWNOyEpjYkGohlnXtA==" }, "@amplitude/plugin-page-view-tracking-browser": { - "version": "2.2.7", - "resolved": "https://registry.npmjs.org/@amplitude/plugin-page-view-tracking-browser/-/plugin-page-view-tracking-browser-2.2.7.tgz", - "integrity": "sha512-UF/Oyqxk29ZAW2VlguSL4oIPbtDqsO7GZsfnnPpBXGwB/CWegB7FtAreK4n8HHMDA/mgt+YS9I9ZJmfq26rIBA==", + "version": "2.2.13", + "resolved": "https://registry.npmjs.org/@amplitude/plugin-page-view-tracking-browser/-/plugin-page-view-tracking-browser-2.2.13.tgz", + "integrity": "sha512-EYvG4RTyNqIGlq3J+UnrTRRj75xp3nOSETeInPQbnHhiPaCzxTjWrb9URkzy2IYSyBSn4KEQkZegmzhWXxlvvw==", "requires": { - "@amplitude/analytics-client-common": "^2.1.4", - "@amplitude/analytics-types": "^2.5.0", + "@amplitude/analytics-client-common": "^2.2.1", + "@amplitude/analytics-types": "^2.5.1", "tslib": "^2.4.1" }, "dependencies": { diff --git a/apps/block_scout_web/assets/package.json b/apps/block_scout_web/assets/package.json index 081bf04dad21..92cf81054397 100644 --- a/apps/block_scout_web/assets/package.json +++ b/apps/block_scout_web/assets/package.json @@ -20,7 +20,7 @@ }, "dependencies": { "@fortawesome/fontawesome-free": "^6.5.2", - "@amplitude/analytics-browser": "^2.7.0", + "@amplitude/analytics-browser": "^2.8.1", "@tarekraafat/autocomplete.js": "^10.2.7", "@walletconnect/web3-provider": "^1.8.0", "assert": "^2.1.0", From 2dc1dbe67808d223a93cfa7190e8584d1589cf43 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 3 Jun 2024 12:38:37 +0300 Subject: [PATCH 053/150] chore(deps-dev): bump @babel/preset-env in /apps/block_scout_web/assets (#10174) Bumps [@babel/preset-env](https://github.com/babel/babel/tree/HEAD/packages/babel-preset-env) from 7.24.5 to 7.24.6. - [Release notes](https://github.com/babel/babel/releases) - [Changelog](https://github.com/babel/babel/blob/main/CHANGELOG.md) - [Commits](https://github.com/babel/babel/commits/v7.24.6/packages/babel-preset-env) --- updated-dependencies: - dependency-name: "@babel/preset-env" dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- apps/block_scout_web/assets/package-lock.json | 1522 ++++++++--------- apps/block_scout_web/assets/package.json | 2 +- 2 files changed, 762 insertions(+), 762 deletions(-) diff --git a/apps/block_scout_web/assets/package-lock.json b/apps/block_scout_web/assets/package-lock.json index d3f418dcb6c2..72b6a74fcbb6 100644 --- a/apps/block_scout_web/assets/package-lock.json +++ b/apps/block_scout_web/assets/package-lock.json @@ -72,7 +72,7 @@ }, "devDependencies": { "@babel/core": "^7.24.6", - "@babel/preset-env": "^7.24.5", + "@babel/preset-env": "^7.24.6", "autoprefixer": "^10.4.19", "babel-loader": "^9.1.3", "copy-webpack-plugin": "^12.0.2", @@ -280,23 +280,23 @@ } }, "node_modules/@babel/helper-annotate-as-pure": { - "version": "7.22.5", - "resolved": "https://registry.npmjs.org/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.22.5.tgz", - "integrity": "sha512-LvBTxu8bQSQkcyKOU+a1btnNFQ1dMAd0R6PyW3arXes06F6QLWLIrd681bxRPIXlrMGR3XYnW9JyML7dP3qgxg==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.24.6.tgz", + "integrity": "sha512-DitEzDfOMnd13kZnDqns1ccmftwJTS9DMkyn9pYTxulS7bZxUxpMly3Nf23QQ6NwA4UB8lAqjbqWtyvElEMAkg==", "dependencies": { - "@babel/types": "^7.22.5" + "@babel/types": "^7.24.6" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-builder-binary-assignment-operator-visitor": { - "version": "7.22.15", - "resolved": "https://registry.npmjs.org/@babel/helper-builder-binary-assignment-operator-visitor/-/helper-builder-binary-assignment-operator-visitor-7.22.15.tgz", - "integrity": "sha512-QkBXwGgaoC2GtGZRoma6kv7Szfv06khvhFav67ZExau2RaXzy8MpHSMO2PNoP2XtmQphJQRHFfg77Bq731Yizw==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/helper-builder-binary-assignment-operator-visitor/-/helper-builder-binary-assignment-operator-visitor-7.24.6.tgz", + "integrity": "sha512-+wnfqc5uHiMYtvRX7qu80Toef8BXeh4HHR1SPeonGb1SKPniNEd4a/nlaJJMv/OIEYvIVavvo0yR7u10Gqz0Iw==", "dev": true, "dependencies": { - "@babel/types": "^7.22.15" + "@babel/types": "^7.24.6" }, "engines": { "node": ">=6.9.0" @@ -331,19 +331,19 @@ "integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==" }, "node_modules/@babel/helper-create-class-features-plugin": { - "version": "7.24.5", - "resolved": "https://registry.npmjs.org/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.24.5.tgz", - "integrity": "sha512-uRc4Cv8UQWnE4NXlYTIIdM7wfFkOqlFztcC/gVXDKohKoVB3OyonfelUBaJzSwpBntZ2KYGF/9S7asCHsXwW6g==", - "dev": true, - "dependencies": { - "@babel/helper-annotate-as-pure": "^7.22.5", - "@babel/helper-environment-visitor": "^7.22.20", - "@babel/helper-function-name": "^7.23.0", - "@babel/helper-member-expression-to-functions": "^7.24.5", - "@babel/helper-optimise-call-expression": "^7.22.5", - "@babel/helper-replace-supers": "^7.24.1", - "@babel/helper-skip-transparent-expression-wrappers": "^7.22.5", - "@babel/helper-split-export-declaration": "^7.24.5", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.24.6.tgz", + "integrity": "sha512-djsosdPJVZE6Vsw3kk7IPRWethP94WHGOhQTc67SNXE0ZzMhHgALw8iGmYS0TD1bbMM0VDROy43od7/hN6WYcA==", + "dev": true, + "dependencies": { + "@babel/helper-annotate-as-pure": "^7.24.6", + "@babel/helper-environment-visitor": "^7.24.6", + "@babel/helper-function-name": "^7.24.6", + "@babel/helper-member-expression-to-functions": "^7.24.6", + "@babel/helper-optimise-call-expression": "^7.24.6", + "@babel/helper-replace-supers": "^7.24.6", + "@babel/helper-skip-transparent-expression-wrappers": "^7.24.6", + "@babel/helper-split-export-declaration": "^7.24.6", "semver": "^6.3.1" }, "engines": { @@ -354,12 +354,12 @@ } }, "node_modules/@babel/helper-create-regexp-features-plugin": { - "version": "7.22.15", - "resolved": "https://registry.npmjs.org/@babel/helper-create-regexp-features-plugin/-/helper-create-regexp-features-plugin-7.22.15.tgz", - "integrity": "sha512-29FkPLFjn4TPEa3RE7GpW+qbE8tlsu3jntNYNfcGsc49LphF1PQIiD+vMZ1z1xVOKt+93khA9tc2JBs3kBjA7w==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/helper-create-regexp-features-plugin/-/helper-create-regexp-features-plugin-7.24.6.tgz", + "integrity": "sha512-C875lFBIWWwyv6MHZUG9HmRrlTDgOsLWZfYR0nW69gaKJNe0/Mpxx5r0EID2ZdHQkdUmQo2t0uNckTL08/1BgA==", "dev": true, "dependencies": { - "@babel/helper-annotate-as-pure": "^7.22.5", + "@babel/helper-annotate-as-pure": "^7.24.6", "regexpu-core": "^5.3.1", "semver": "^6.3.1" }, @@ -418,12 +418,12 @@ } }, "node_modules/@babel/helper-member-expression-to-functions": { - "version": "7.24.5", - "resolved": "https://registry.npmjs.org/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.24.5.tgz", - "integrity": "sha512-4owRteeihKWKamtqg4JmWSsEZU445xpFRXPEwp44HbgbxdWlUV1b4Agg4lkA806Lil5XM/e+FJyS0vj5T6vmcA==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.24.6.tgz", + "integrity": "sha512-OTsCufZTxDUsv2/eDXanw/mUZHWOxSbEmC3pP8cgjcy5rgeVPWWMStnv274DV60JtHxTk0adT0QrCzC4M9NWGg==", "dev": true, "dependencies": { - "@babel/types": "^7.24.5" + "@babel/types": "^7.24.6" }, "engines": { "node": ">=6.9.0" @@ -459,34 +459,34 @@ } }, "node_modules/@babel/helper-optimise-call-expression": { - "version": "7.22.5", - "resolved": "https://registry.npmjs.org/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.22.5.tgz", - "integrity": "sha512-HBwaojN0xFRx4yIvpwGqxiV2tUfl7401jlok564NgB9EHS1y6QT17FmKWm4ztqjeVdXLuC4fSvHc5ePpQjoTbw==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.24.6.tgz", + "integrity": "sha512-3SFDJRbx7KuPRl8XDUr8O7GAEB8iGyWPjLKJh/ywP/Iy9WOmEfMrsWbaZpvBu2HSYn4KQygIsz0O7m8y10ncMA==", "dev": true, "dependencies": { - "@babel/types": "^7.22.5" + "@babel/types": "^7.24.6" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-plugin-utils": { - "version": "7.24.5", - "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.24.5.tgz", - "integrity": "sha512-xjNLDopRzW2o6ba0gKbkZq5YWEBaK3PCyTOY1K2P/O07LGMhMqlMXPxwN4S5/RhWuCobT8z0jrlKGlYmeR1OhQ==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.24.6.tgz", + "integrity": "sha512-MZG/JcWfxybKwsA9N9PmtF2lOSFSEMVCpIRrbxccZFLJPrJciJdG/UhSh5W96GEteJI2ARqm5UAHxISwRDLSNg==", "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-remap-async-to-generator": { - "version": "7.22.20", - "resolved": "https://registry.npmjs.org/@babel/helper-remap-async-to-generator/-/helper-remap-async-to-generator-7.22.20.tgz", - "integrity": "sha512-pBGyV4uBqOns+0UvhsTO8qgl8hO89PmiDYv+/COyp1aeMcmfrfruz+/nCMFiYyFF/Knn0yfrC85ZzNFjembFTw==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/helper-remap-async-to-generator/-/helper-remap-async-to-generator-7.24.6.tgz", + "integrity": "sha512-1Qursq9ArRZPAMOZf/nuzVW8HgJLkTB9y9LfP4lW2MVp4e9WkLJDovfKBxoDcCk6VuzIxyqWHyBoaCtSRP10yg==", "dev": true, "dependencies": { - "@babel/helper-annotate-as-pure": "^7.22.5", - "@babel/helper-environment-visitor": "^7.22.20", - "@babel/helper-wrap-function": "^7.22.20" + "@babel/helper-annotate-as-pure": "^7.24.6", + "@babel/helper-environment-visitor": "^7.24.6", + "@babel/helper-wrap-function": "^7.24.6" }, "engines": { "node": ">=6.9.0" @@ -496,14 +496,14 @@ } }, "node_modules/@babel/helper-replace-supers": { - "version": "7.24.1", - "resolved": "https://registry.npmjs.org/@babel/helper-replace-supers/-/helper-replace-supers-7.24.1.tgz", - "integrity": "sha512-QCR1UqC9BzG5vZl8BMicmZ28RuUBnHhAMddD8yHFHDRH9lLTZ9uUPehX8ctVPT8l0TKblJidqcgUUKGVrePleQ==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/helper-replace-supers/-/helper-replace-supers-7.24.6.tgz", + "integrity": "sha512-mRhfPwDqDpba8o1F8ESxsEkJMQkUF8ZIWrAc0FtWhxnjfextxMWxr22RtFizxxSYLjVHDeMgVsRq8BBZR2ikJQ==", "dev": true, "dependencies": { - "@babel/helper-environment-visitor": "^7.22.20", - "@babel/helper-member-expression-to-functions": "^7.23.0", - "@babel/helper-optimise-call-expression": "^7.22.5" + "@babel/helper-environment-visitor": "^7.24.6", + "@babel/helper-member-expression-to-functions": "^7.24.6", + "@babel/helper-optimise-call-expression": "^7.24.6" }, "engines": { "node": ">=6.9.0" @@ -524,12 +524,12 @@ } }, "node_modules/@babel/helper-skip-transparent-expression-wrappers": { - "version": "7.22.5", - "resolved": "https://registry.npmjs.org/@babel/helper-skip-transparent-expression-wrappers/-/helper-skip-transparent-expression-wrappers-7.22.5.tgz", - "integrity": "sha512-tK14r66JZKiC43p8Ki33yLBVJKlQDFoA8GYN67lWCDCqoL6EMMSuM9b+Iff2jHaM/RRFYl7K+iiru7hbRqNx8Q==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/helper-skip-transparent-expression-wrappers/-/helper-skip-transparent-expression-wrappers-7.24.6.tgz", + "integrity": "sha512-jhbbkK3IUKc4T43WadP96a27oYti9gEf1LdyGSP2rHGH77kwLwfhO7TgwnWvxxQVmke0ImmCSS47vcuxEMGD3Q==", "dev": true, "dependencies": { - "@babel/types": "^7.22.5" + "@babel/types": "^7.24.6" }, "engines": { "node": ">=6.9.0" @@ -571,14 +571,14 @@ } }, "node_modules/@babel/helper-wrap-function": { - "version": "7.22.20", - "resolved": "https://registry.npmjs.org/@babel/helper-wrap-function/-/helper-wrap-function-7.22.20.tgz", - "integrity": "sha512-pms/UwkOpnQe/PDAEdV/d7dVCoBbB+R4FvYoHGZz+4VPcg7RtYy2KP7S2lbuWM6FCSgob5wshfGESbC/hzNXZw==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/helper-wrap-function/-/helper-wrap-function-7.24.6.tgz", + "integrity": "sha512-f1JLrlw/jbiNfxvdrfBgio/gRBk3yTAEJWirpAkiJG2Hb22E7cEYKHWo0dFPTv/niPovzIdPdEDetrv6tC6gPQ==", "dev": true, "dependencies": { - "@babel/helper-function-name": "^7.22.5", - "@babel/template": "^7.22.15", - "@babel/types": "^7.22.19" + "@babel/helper-function-name": "^7.24.6", + "@babel/template": "^7.24.6", + "@babel/types": "^7.24.6" }, "engines": { "node": ">=6.9.0" @@ -622,13 +622,13 @@ } }, "node_modules/@babel/plugin-bugfix-firefox-class-in-computed-class-key": { - "version": "7.24.5", - "resolved": "https://registry.npmjs.org/@babel/plugin-bugfix-firefox-class-in-computed-class-key/-/plugin-bugfix-firefox-class-in-computed-class-key-7.24.5.tgz", - "integrity": "sha512-LdXRi1wEMTrHVR4Zc9F8OewC3vdm5h4QB6L71zy6StmYeqGi1b3ttIO8UC+BfZKcH9jdr4aI249rBkm+3+YvHw==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-bugfix-firefox-class-in-computed-class-key/-/plugin-bugfix-firefox-class-in-computed-class-key-7.24.6.tgz", + "integrity": "sha512-bYndrJ6Ph6Ar+GaB5VAc0JPoP80bQCm4qon6JEzXfRl5QZyQ8Ur1K6k7htxWmPA5z+k7JQvaMUrtXlqclWYzKw==", "dev": true, "dependencies": { - "@babel/helper-environment-visitor": "^7.22.20", - "@babel/helper-plugin-utils": "^7.24.5" + "@babel/helper-environment-visitor": "^7.24.6", + "@babel/helper-plugin-utils": "^7.24.6" }, "engines": { "node": ">=6.9.0" @@ -638,12 +638,12 @@ } }, "node_modules/@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression": { - "version": "7.24.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression/-/plugin-bugfix-safari-id-destructuring-collision-in-function-expression-7.24.1.tgz", - "integrity": "sha512-y4HqEnkelJIOQGd+3g1bTeKsA5c6qM7eOn7VggGVbBc0y8MLSKHacwcIE2PplNlQSj0PqS9rrXL/nkPVK+kUNg==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression/-/plugin-bugfix-safari-id-destructuring-collision-in-function-expression-7.24.6.tgz", + "integrity": "sha512-iVuhb6poq5ikqRq2XWU6OQ+R5o9wF+r/or9CeUyovgptz0UlnK4/seOQ1Istu/XybYjAhQv1FRSSfHHufIku5Q==", "dev": true, "dependencies": { - "@babel/helper-plugin-utils": "^7.24.0" + "@babel/helper-plugin-utils": "^7.24.6" }, "engines": { "node": ">=6.9.0" @@ -653,14 +653,14 @@ } }, "node_modules/@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining": { - "version": "7.24.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining/-/plugin-bugfix-v8-spread-parameters-in-optional-chaining-7.24.1.tgz", - "integrity": "sha512-Hj791Ii4ci8HqnaKHAlLNs+zaLXb0EzSDhiAWp5VNlyvCNymYfacs64pxTxbH1znW/NcArSmwpmG9IKE/TUVVQ==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining/-/plugin-bugfix-v8-spread-parameters-in-optional-chaining-7.24.6.tgz", + "integrity": "sha512-c8TER5xMDYzzFcGqOEp9l4hvB7dcbhcGjcLVwxWfe4P5DOafdwjsBJZKsmv+o3aXh7NhopvayQIovHrh2zSRUQ==", "dev": true, "dependencies": { - "@babel/helper-plugin-utils": "^7.24.0", - "@babel/helper-skip-transparent-expression-wrappers": "^7.22.5", - "@babel/plugin-transform-optional-chaining": "^7.24.1" + "@babel/helper-plugin-utils": "^7.24.6", + "@babel/helper-skip-transparent-expression-wrappers": "^7.24.6", + "@babel/plugin-transform-optional-chaining": "^7.24.6" }, "engines": { "node": ">=6.9.0" @@ -670,13 +670,13 @@ } }, "node_modules/@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly": { - "version": "7.24.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly/-/plugin-bugfix-v8-static-class-fields-redefine-readonly-7.24.1.tgz", - "integrity": "sha512-m9m/fXsXLiHfwdgydIFnpk+7jlVbnvlK5B2EKiPdLUb6WX654ZaaEWJUjk8TftRbZpK0XibovlLWX4KIZhV6jw==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly/-/plugin-bugfix-v8-static-class-fields-redefine-readonly-7.24.6.tgz", + "integrity": "sha512-z8zEjYmwBUHN/pCF3NuWBhHQjJCrd33qAi8MgANfMrAvn72k2cImT8VjK9LJFu4ysOLJqhfkYYb3MvwANRUNZQ==", "dev": true, "dependencies": { - "@babel/helper-environment-visitor": "^7.22.20", - "@babel/helper-plugin-utils": "^7.24.0" + "@babel/helper-environment-visitor": "^7.24.6", + "@babel/helper-plugin-utils": "^7.24.6" }, "engines": { "node": ">=6.9.0" @@ -773,12 +773,12 @@ } }, "node_modules/@babel/plugin-syntax-import-assertions": { - "version": "7.24.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-import-assertions/-/plugin-syntax-import-assertions-7.24.1.tgz", - "integrity": "sha512-IuwnI5XnuF189t91XbxmXeCDz3qs6iDRO7GJ++wcfgeXNs/8FmIlKcpDSXNVyuLQxlwvskmI3Ct73wUODkJBlQ==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-import-assertions/-/plugin-syntax-import-assertions-7.24.6.tgz", + "integrity": "sha512-BE6o2BogJKJImTmGpkmOic4V0hlRRxVtzqxiSPa8TIFxyhi4EFjHm08nq1M4STK4RytuLMgnSz0/wfflvGFNOg==", "dev": true, "dependencies": { - "@babel/helper-plugin-utils": "^7.24.0" + "@babel/helper-plugin-utils": "^7.24.6" }, "engines": { "node": ">=6.9.0" @@ -788,12 +788,12 @@ } }, "node_modules/@babel/plugin-syntax-import-attributes": { - "version": "7.24.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-import-attributes/-/plugin-syntax-import-attributes-7.24.1.tgz", - "integrity": "sha512-zhQTMH0X2nVLnb04tz+s7AMuasX8U0FnpE+nHTOhSOINjWMnopoZTxtIKsd45n4GQ/HIZLyfIpoul8e2m0DnRA==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-import-attributes/-/plugin-syntax-import-attributes-7.24.6.tgz", + "integrity": "sha512-D+CfsVZousPXIdudSII7RGy52+dYRtbyKAZcvtQKq/NpsivyMVduepzcLqG5pMBugtMdedxdC8Ramdpcne9ZWQ==", "dev": true, "dependencies": { - "@babel/helper-plugin-utils": "^7.24.0" + "@babel/helper-plugin-utils": "^7.24.6" }, "engines": { "node": ">=6.9.0" @@ -975,12 +975,12 @@ } }, "node_modules/@babel/plugin-transform-arrow-functions": { - "version": "7.24.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-arrow-functions/-/plugin-transform-arrow-functions-7.24.1.tgz", - "integrity": "sha512-ngT/3NkRhsaep9ck9uj2Xhv9+xB1zShY3tM3g6om4xxCELwCDN4g4Aq5dRn48+0hasAql7s2hdBOysCfNpr4fw==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-arrow-functions/-/plugin-transform-arrow-functions-7.24.6.tgz", + "integrity": "sha512-jSSSDt4ZidNMggcLx8SaKsbGNEfIl0PHx/4mFEulorE7bpYLbN0d3pDW3eJ7Y5Z3yPhy3L3NaPCYyTUY7TuugQ==", "dev": true, "dependencies": { - "@babel/helper-plugin-utils": "^7.24.0" + "@babel/helper-plugin-utils": "^7.24.6" }, "engines": { "node": ">=6.9.0" @@ -990,14 +990,14 @@ } }, "node_modules/@babel/plugin-transform-async-generator-functions": { - "version": "7.24.3", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-async-generator-functions/-/plugin-transform-async-generator-functions-7.24.3.tgz", - "integrity": "sha512-Qe26CMYVjpQxJ8zxM1340JFNjZaF+ISWpr1Kt/jGo+ZTUzKkfw/pphEWbRCb+lmSM6k/TOgfYLvmbHkUQ0asIg==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-async-generator-functions/-/plugin-transform-async-generator-functions-7.24.6.tgz", + "integrity": "sha512-VEP2o4iR2DqQU6KPgizTW2mnMx6BG5b5O9iQdrW9HesLkv8GIA8x2daXBQxw1MrsIkFQGA/iJ204CKoQ8UcnAA==", "dev": true, "dependencies": { - "@babel/helper-environment-visitor": "^7.22.20", - "@babel/helper-plugin-utils": "^7.24.0", - "@babel/helper-remap-async-to-generator": "^7.22.20", + "@babel/helper-environment-visitor": "^7.24.6", + "@babel/helper-plugin-utils": "^7.24.6", + "@babel/helper-remap-async-to-generator": "^7.24.6", "@babel/plugin-syntax-async-generators": "^7.8.4" }, "engines": { @@ -1008,14 +1008,14 @@ } }, "node_modules/@babel/plugin-transform-async-to-generator": { - "version": "7.24.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-async-to-generator/-/plugin-transform-async-to-generator-7.24.1.tgz", - "integrity": "sha512-AawPptitRXp1y0n4ilKcGbRYWfbbzFWz2NqNu7dacYDtFtz0CMjG64b3LQsb3KIgnf4/obcUL78hfaOS7iCUfw==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-async-to-generator/-/plugin-transform-async-to-generator-7.24.6.tgz", + "integrity": "sha512-NTBA2SioI3OsHeIn6sQmhvXleSl9T70YY/hostQLveWs0ic+qvbA3fa0kwAwQ0OA/XGaAerNZRQGJyRfhbJK4g==", "dev": true, "dependencies": { - "@babel/helper-module-imports": "^7.24.1", - "@babel/helper-plugin-utils": "^7.24.0", - "@babel/helper-remap-async-to-generator": "^7.22.20" + "@babel/helper-module-imports": "^7.24.6", + "@babel/helper-plugin-utils": "^7.24.6", + "@babel/helper-remap-async-to-generator": "^7.24.6" }, "engines": { "node": ">=6.9.0" @@ -1025,12 +1025,12 @@ } }, "node_modules/@babel/plugin-transform-block-scoped-functions": { - "version": "7.24.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-block-scoped-functions/-/plugin-transform-block-scoped-functions-7.24.1.tgz", - "integrity": "sha512-TWWC18OShZutrv9C6mye1xwtam+uNi2bnTOCBUd5sZxyHOiWbU6ztSROofIMrK84uweEZC219POICK/sTYwfgg==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-block-scoped-functions/-/plugin-transform-block-scoped-functions-7.24.6.tgz", + "integrity": "sha512-XNW7jolYHW9CwORrZgA/97tL/k05qe/HL0z/qqJq1mdWhwwCM6D4BJBV7wAz9HgFziN5dTOG31znkVIzwxv+vw==", "dev": true, "dependencies": { - "@babel/helper-plugin-utils": "^7.24.0" + "@babel/helper-plugin-utils": "^7.24.6" }, "engines": { "node": ">=6.9.0" @@ -1040,12 +1040,12 @@ } }, "node_modules/@babel/plugin-transform-block-scoping": { - "version": "7.24.5", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-block-scoping/-/plugin-transform-block-scoping-7.24.5.tgz", - "integrity": "sha512-sMfBc3OxghjC95BkYrYocHL3NaOplrcaunblzwXhGmlPwpmfsxr4vK+mBBt49r+S240vahmv+kUxkeKgs+haCw==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-block-scoping/-/plugin-transform-block-scoping-7.24.6.tgz", + "integrity": "sha512-S/t1Xh4ehW7sGA7c1j/hiOBLnEYCp/c2sEG4ZkL8kI1xX9tW2pqJTCHKtdhe/jHKt8nG0pFCrDHUXd4DvjHS9w==", "dev": true, "dependencies": { - "@babel/helper-plugin-utils": "^7.24.5" + "@babel/helper-plugin-utils": "^7.24.6" }, "engines": { "node": ">=6.9.0" @@ -1055,13 +1055,13 @@ } }, "node_modules/@babel/plugin-transform-class-properties": { - "version": "7.24.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-class-properties/-/plugin-transform-class-properties-7.24.1.tgz", - "integrity": "sha512-OMLCXi0NqvJfORTaPQBwqLXHhb93wkBKZ4aNwMl6WtehO7ar+cmp+89iPEQPqxAnxsOKTaMcs3POz3rKayJ72g==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-class-properties/-/plugin-transform-class-properties-7.24.6.tgz", + "integrity": "sha512-j6dZ0Z2Z2slWLR3kt9aOmSIrBvnntWjMDN/TVcMPxhXMLmJVqX605CBRlcGI4b32GMbfifTEsdEjGjiE+j/c3A==", "dev": true, "dependencies": { - "@babel/helper-create-class-features-plugin": "^7.24.1", - "@babel/helper-plugin-utils": "^7.24.0" + "@babel/helper-create-class-features-plugin": "^7.24.6", + "@babel/helper-plugin-utils": "^7.24.6" }, "engines": { "node": ">=6.9.0" @@ -1071,13 +1071,13 @@ } }, "node_modules/@babel/plugin-transform-class-static-block": { - "version": "7.24.4", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-class-static-block/-/plugin-transform-class-static-block-7.24.4.tgz", - "integrity": "sha512-B8q7Pz870Hz/q9UgP8InNpY01CSLDSCyqX7zcRuv3FcPl87A2G17lASroHWaCtbdIcbYzOZ7kWmXFKbijMSmFg==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-class-static-block/-/plugin-transform-class-static-block-7.24.6.tgz", + "integrity": "sha512-1QSRfoPI9RoLRa8Mnakc6v3e0gJxiZQTYrMfLn+mD0sz5+ndSzwymp2hDcYJTyT0MOn0yuWzj8phlIvO72gTHA==", "dev": true, "dependencies": { - "@babel/helper-create-class-features-plugin": "^7.24.4", - "@babel/helper-plugin-utils": "^7.24.0", + "@babel/helper-create-class-features-plugin": "^7.24.6", + "@babel/helper-plugin-utils": "^7.24.6", "@babel/plugin-syntax-class-static-block": "^7.14.5" }, "engines": { @@ -1088,18 +1088,18 @@ } }, "node_modules/@babel/plugin-transform-classes": { - "version": "7.24.5", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-classes/-/plugin-transform-classes-7.24.5.tgz", - "integrity": "sha512-gWkLP25DFj2dwe9Ck8uwMOpko4YsqyfZJrOmqqcegeDYEbp7rmn4U6UQZNj08UF6MaX39XenSpKRCvpDRBtZ7Q==", - "dev": true, - "dependencies": { - "@babel/helper-annotate-as-pure": "^7.22.5", - "@babel/helper-compilation-targets": "^7.23.6", - "@babel/helper-environment-visitor": "^7.22.20", - "@babel/helper-function-name": "^7.23.0", - "@babel/helper-plugin-utils": "^7.24.5", - "@babel/helper-replace-supers": "^7.24.1", - "@babel/helper-split-export-declaration": "^7.24.5", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-classes/-/plugin-transform-classes-7.24.6.tgz", + "integrity": "sha512-+fN+NO2gh8JtRmDSOB6gaCVo36ha8kfCW1nMq2Gc0DABln0VcHN4PrALDvF5/diLzIRKptC7z/d7Lp64zk92Fg==", + "dev": true, + "dependencies": { + "@babel/helper-annotate-as-pure": "^7.24.6", + "@babel/helper-compilation-targets": "^7.24.6", + "@babel/helper-environment-visitor": "^7.24.6", + "@babel/helper-function-name": "^7.24.6", + "@babel/helper-plugin-utils": "^7.24.6", + "@babel/helper-replace-supers": "^7.24.6", + "@babel/helper-split-export-declaration": "^7.24.6", "globals": "^11.1.0" }, "engines": { @@ -1110,13 +1110,13 @@ } }, "node_modules/@babel/plugin-transform-computed-properties": { - "version": "7.24.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-computed-properties/-/plugin-transform-computed-properties-7.24.1.tgz", - "integrity": "sha512-5pJGVIUfJpOS+pAqBQd+QMaTD2vCL/HcePooON6pDpHgRp4gNRmzyHTPIkXntwKsq3ayUFVfJaIKPw2pOkOcTw==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-computed-properties/-/plugin-transform-computed-properties-7.24.6.tgz", + "integrity": "sha512-cRzPobcfRP0ZtuIEkA8QzghoUpSB3X3qSH5W2+FzG+VjWbJXExtx0nbRqwumdBN1x/ot2SlTNQLfBCnPdzp6kg==", "dev": true, "dependencies": { - "@babel/helper-plugin-utils": "^7.24.0", - "@babel/template": "^7.24.0" + "@babel/helper-plugin-utils": "^7.24.6", + "@babel/template": "^7.24.6" }, "engines": { "node": ">=6.9.0" @@ -1126,12 +1126,12 @@ } }, "node_modules/@babel/plugin-transform-destructuring": { - "version": "7.24.5", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-destructuring/-/plugin-transform-destructuring-7.24.5.tgz", - "integrity": "sha512-SZuuLyfxvsm+Ah57I/i1HVjveBENYK9ue8MJ7qkc7ndoNjqquJiElzA7f5yaAXjyW2hKojosOTAQQRX50bPSVg==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-destructuring/-/plugin-transform-destructuring-7.24.6.tgz", + "integrity": "sha512-YLW6AE5LQpk5npNXL7i/O+U9CE4XsBCuRPgyjl1EICZYKmcitV+ayuuUGMJm2lC1WWjXYszeTnIxF/dq/GhIZQ==", "dev": true, "dependencies": { - "@babel/helper-plugin-utils": "^7.24.5" + "@babel/helper-plugin-utils": "^7.24.6" }, "engines": { "node": ">=6.9.0" @@ -1141,13 +1141,13 @@ } }, "node_modules/@babel/plugin-transform-dotall-regex": { - "version": "7.24.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-dotall-regex/-/plugin-transform-dotall-regex-7.24.1.tgz", - "integrity": "sha512-p7uUxgSoZwZ2lPNMzUkqCts3xlp8n+o05ikjy7gbtFJSt9gdU88jAmtfmOxHM14noQXBxfgzf2yRWECiNVhTCw==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-dotall-regex/-/plugin-transform-dotall-regex-7.24.6.tgz", + "integrity": "sha512-rCXPnSEKvkm/EjzOtLoGvKseK+dS4kZwx1HexO3BtRtgL0fQ34awHn34aeSHuXtZY2F8a1X8xqBBPRtOxDVmcA==", "dev": true, "dependencies": { - "@babel/helper-create-regexp-features-plugin": "^7.22.15", - "@babel/helper-plugin-utils": "^7.24.0" + "@babel/helper-create-regexp-features-plugin": "^7.24.6", + "@babel/helper-plugin-utils": "^7.24.6" }, "engines": { "node": ">=6.9.0" @@ -1157,12 +1157,12 @@ } }, "node_modules/@babel/plugin-transform-duplicate-keys": { - "version": "7.24.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-duplicate-keys/-/plugin-transform-duplicate-keys-7.24.1.tgz", - "integrity": "sha512-msyzuUnvsjsaSaocV6L7ErfNsa5nDWL1XKNnDePLgmz+WdU4w/J8+AxBMrWfi9m4IxfL5sZQKUPQKDQeeAT6lA==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-duplicate-keys/-/plugin-transform-duplicate-keys-7.24.6.tgz", + "integrity": "sha512-/8Odwp/aVkZwPFJMllSbawhDAO3UJi65foB00HYnK/uXvvCPm0TAXSByjz1mpRmp0q6oX2SIxpkUOpPFHk7FLA==", "dev": true, "dependencies": { - "@babel/helper-plugin-utils": "^7.24.0" + "@babel/helper-plugin-utils": "^7.24.6" }, "engines": { "node": ">=6.9.0" @@ -1172,12 +1172,12 @@ } }, "node_modules/@babel/plugin-transform-dynamic-import": { - "version": "7.24.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-dynamic-import/-/plugin-transform-dynamic-import-7.24.1.tgz", - "integrity": "sha512-av2gdSTyXcJVdI+8aFZsCAtR29xJt0S5tas+Ef8NvBNmD1a+N/3ecMLeMBgfcK+xzsjdLDT6oHt+DFPyeqUbDA==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-dynamic-import/-/plugin-transform-dynamic-import-7.24.6.tgz", + "integrity": "sha512-vpq8SSLRTBLOHUZHSnBqVo0AKX3PBaoPs2vVzYVWslXDTDIpwAcCDtfhUcHSQQoYoUvcFPTdC8TZYXu9ZnLT/w==", "dev": true, "dependencies": { - "@babel/helper-plugin-utils": "^7.24.0", + "@babel/helper-plugin-utils": "^7.24.6", "@babel/plugin-syntax-dynamic-import": "^7.8.3" }, "engines": { @@ -1188,13 +1188,13 @@ } }, "node_modules/@babel/plugin-transform-exponentiation-operator": { - "version": "7.24.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-exponentiation-operator/-/plugin-transform-exponentiation-operator-7.24.1.tgz", - "integrity": "sha512-U1yX13dVBSwS23DEAqU+Z/PkwE9/m7QQy8Y9/+Tdb8UWYaGNDYwTLi19wqIAiROr8sXVum9A/rtiH5H0boUcTw==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-exponentiation-operator/-/plugin-transform-exponentiation-operator-7.24.6.tgz", + "integrity": "sha512-EemYpHtmz0lHE7hxxxYEuTYOOBZ43WkDgZ4arQ4r+VX9QHuNZC+WH3wUWmRNvR8ECpTRne29aZV6XO22qpOtdA==", "dev": true, "dependencies": { - "@babel/helper-builder-binary-assignment-operator-visitor": "^7.22.15", - "@babel/helper-plugin-utils": "^7.24.0" + "@babel/helper-builder-binary-assignment-operator-visitor": "^7.24.6", + "@babel/helper-plugin-utils": "^7.24.6" }, "engines": { "node": ">=6.9.0" @@ -1204,12 +1204,12 @@ } }, "node_modules/@babel/plugin-transform-export-namespace-from": { - "version": "7.24.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-export-namespace-from/-/plugin-transform-export-namespace-from-7.24.1.tgz", - "integrity": "sha512-Ft38m/KFOyzKw2UaJFkWG9QnHPG/Q/2SkOrRk4pNBPg5IPZ+dOxcmkK5IyuBcxiNPyyYowPGUReyBvrvZs7IlQ==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-export-namespace-from/-/plugin-transform-export-namespace-from-7.24.6.tgz", + "integrity": "sha512-inXaTM1SVrIxCkIJ5gqWiozHfFMStuGbGJAxZFBoHcRRdDP0ySLb3jH6JOwmfiinPwyMZqMBX+7NBDCO4z0NSA==", "dev": true, "dependencies": { - "@babel/helper-plugin-utils": "^7.24.0", + "@babel/helper-plugin-utils": "^7.24.6", "@babel/plugin-syntax-export-namespace-from": "^7.8.3" }, "engines": { @@ -1220,13 +1220,13 @@ } }, "node_modules/@babel/plugin-transform-for-of": { - "version": "7.24.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-for-of/-/plugin-transform-for-of-7.24.1.tgz", - "integrity": "sha512-OxBdcnF04bpdQdR3i4giHZNZQn7cm8RQKcSwA17wAAqEELo1ZOwp5FFgeptWUQXFyT9kwHo10aqqauYkRZPCAg==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-for-of/-/plugin-transform-for-of-7.24.6.tgz", + "integrity": "sha512-n3Sf72TnqK4nw/jziSqEl1qaWPbCRw2CziHH+jdRYvw4J6yeCzsj4jdw8hIntOEeDGTmHVe2w4MVL44PN0GMzg==", "dev": true, "dependencies": { - "@babel/helper-plugin-utils": "^7.24.0", - "@babel/helper-skip-transparent-expression-wrappers": "^7.22.5" + "@babel/helper-plugin-utils": "^7.24.6", + "@babel/helper-skip-transparent-expression-wrappers": "^7.24.6" }, "engines": { "node": ">=6.9.0" @@ -1236,14 +1236,14 @@ } }, "node_modules/@babel/plugin-transform-function-name": { - "version": "7.24.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-function-name/-/plugin-transform-function-name-7.24.1.tgz", - "integrity": "sha512-BXmDZpPlh7jwicKArQASrj8n22/w6iymRnvHYYd2zO30DbE277JO20/7yXJT3QxDPtiQiOxQBbZH4TpivNXIxA==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-function-name/-/plugin-transform-function-name-7.24.6.tgz", + "integrity": "sha512-sOajCu6V0P1KPljWHKiDq6ymgqB+vfo3isUS4McqW1DZtvSVU2v/wuMhmRmkg3sFoq6GMaUUf8W4WtoSLkOV/Q==", "dev": true, "dependencies": { - "@babel/helper-compilation-targets": "^7.23.6", - "@babel/helper-function-name": "^7.23.0", - "@babel/helper-plugin-utils": "^7.24.0" + "@babel/helper-compilation-targets": "^7.24.6", + "@babel/helper-function-name": "^7.24.6", + "@babel/helper-plugin-utils": "^7.24.6" }, "engines": { "node": ">=6.9.0" @@ -1253,12 +1253,12 @@ } }, "node_modules/@babel/plugin-transform-json-strings": { - "version": "7.24.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-json-strings/-/plugin-transform-json-strings-7.24.1.tgz", - "integrity": "sha512-U7RMFmRvoasscrIFy5xA4gIp8iWnWubnKkKuUGJjsuOH7GfbMkB+XZzeslx2kLdEGdOJDamEmCqOks6e8nv8DQ==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-json-strings/-/plugin-transform-json-strings-7.24.6.tgz", + "integrity": "sha512-Uvgd9p2gUnzYJxVdBLcU0KurF8aVhkmVyMKW4MIY1/BByvs3EBpv45q01o7pRTVmTvtQq5zDlytP3dcUgm7v9w==", "dev": true, "dependencies": { - "@babel/helper-plugin-utils": "^7.24.0", + "@babel/helper-plugin-utils": "^7.24.6", "@babel/plugin-syntax-json-strings": "^7.8.3" }, "engines": { @@ -1269,12 +1269,12 @@ } }, "node_modules/@babel/plugin-transform-literals": { - "version": "7.24.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-literals/-/plugin-transform-literals-7.24.1.tgz", - "integrity": "sha512-zn9pwz8U7nCqOYIiBaOxoQOtYmMODXTJnkxG4AtX8fPmnCRYWBOHD0qcpwS9e2VDSp1zNJYpdnFMIKb8jmwu6g==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-literals/-/plugin-transform-literals-7.24.6.tgz", + "integrity": "sha512-f2wHfR2HF6yMj+y+/y07+SLqnOSwRp8KYLpQKOzS58XLVlULhXbiYcygfXQxJlMbhII9+yXDwOUFLf60/TL5tw==", "dev": true, "dependencies": { - "@babel/helper-plugin-utils": "^7.24.0" + "@babel/helper-plugin-utils": "^7.24.6" }, "engines": { "node": ">=6.9.0" @@ -1284,12 +1284,12 @@ } }, "node_modules/@babel/plugin-transform-logical-assignment-operators": { - "version": "7.24.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-logical-assignment-operators/-/plugin-transform-logical-assignment-operators-7.24.1.tgz", - "integrity": "sha512-OhN6J4Bpz+hIBqItTeWJujDOfNP+unqv/NJgyhlpSqgBTPm37KkMmZV6SYcOj+pnDbdcl1qRGV/ZiIjX9Iy34w==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-logical-assignment-operators/-/plugin-transform-logical-assignment-operators-7.24.6.tgz", + "integrity": "sha512-EKaWvnezBCMkRIHxMJSIIylzhqK09YpiJtDbr2wsXTwnO0TxyjMUkaw4RlFIZMIS0iDj0KyIg7H7XCguHu/YDA==", "dev": true, "dependencies": { - "@babel/helper-plugin-utils": "^7.24.0", + "@babel/helper-plugin-utils": "^7.24.6", "@babel/plugin-syntax-logical-assignment-operators": "^7.10.4" }, "engines": { @@ -1300,12 +1300,12 @@ } }, "node_modules/@babel/plugin-transform-member-expression-literals": { - "version": "7.24.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-member-expression-literals/-/plugin-transform-member-expression-literals-7.24.1.tgz", - "integrity": "sha512-4ojai0KysTWXzHseJKa1XPNXKRbuUrhkOPY4rEGeR+7ChlJVKxFa3H3Bz+7tWaGKgJAXUWKOGmltN+u9B3+CVg==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-member-expression-literals/-/plugin-transform-member-expression-literals-7.24.6.tgz", + "integrity": "sha512-9g8iV146szUo5GWgXpRbq/GALTnY+WnNuRTuRHWWFfWGbP9ukRL0aO/jpu9dmOPikclkxnNsjY8/gsWl6bmZJQ==", "dev": true, "dependencies": { - "@babel/helper-plugin-utils": "^7.24.0" + "@babel/helper-plugin-utils": "^7.24.6" }, "engines": { "node": ">=6.9.0" @@ -1315,13 +1315,13 @@ } }, "node_modules/@babel/plugin-transform-modules-amd": { - "version": "7.24.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-amd/-/plugin-transform-modules-amd-7.24.1.tgz", - "integrity": "sha512-lAxNHi4HVtjnHd5Rxg3D5t99Xm6H7b04hUS7EHIXcUl2EV4yl1gWdqZrNzXnSrHveL9qMdbODlLF55mvgjAfaQ==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-amd/-/plugin-transform-modules-amd-7.24.6.tgz", + "integrity": "sha512-eAGogjZgcwqAxhyFgqghvoHRr+EYRQPFjUXrTYKBRb5qPnAVxOOglaxc4/byHqjvq/bqO2F3/CGwTHsgKJYHhQ==", "dev": true, "dependencies": { - "@babel/helper-module-transforms": "^7.23.3", - "@babel/helper-plugin-utils": "^7.24.0" + "@babel/helper-module-transforms": "^7.24.6", + "@babel/helper-plugin-utils": "^7.24.6" }, "engines": { "node": ">=6.9.0" @@ -1331,14 +1331,14 @@ } }, "node_modules/@babel/plugin-transform-modules-commonjs": { - "version": "7.24.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-commonjs/-/plugin-transform-modules-commonjs-7.24.1.tgz", - "integrity": "sha512-szog8fFTUxBfw0b98gEWPaEqF42ZUD/T3bkynW/wtgx2p/XCP55WEsb+VosKceRSd6njipdZvNogqdtI4Q0chw==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-commonjs/-/plugin-transform-modules-commonjs-7.24.6.tgz", + "integrity": "sha512-JEV8l3MHdmmdb7S7Cmx6rbNEjRCgTQMZxllveHO0mx6uiclB0NflCawlQQ6+o5ZrwjUBYPzHm2XoK4wqGVUFuw==", "dev": true, "dependencies": { - "@babel/helper-module-transforms": "^7.23.3", - "@babel/helper-plugin-utils": "^7.24.0", - "@babel/helper-simple-access": "^7.22.5" + "@babel/helper-module-transforms": "^7.24.6", + "@babel/helper-plugin-utils": "^7.24.6", + "@babel/helper-simple-access": "^7.24.6" }, "engines": { "node": ">=6.9.0" @@ -1348,15 +1348,15 @@ } }, "node_modules/@babel/plugin-transform-modules-systemjs": { - "version": "7.24.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-systemjs/-/plugin-transform-modules-systemjs-7.24.1.tgz", - "integrity": "sha512-mqQ3Zh9vFO1Tpmlt8QPnbwGHzNz3lpNEMxQb1kAemn/erstyqw1r9KeOlOfo3y6xAnFEcOv2tSyrXfmMk+/YZA==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-systemjs/-/plugin-transform-modules-systemjs-7.24.6.tgz", + "integrity": "sha512-xg1Z0J5JVYxtpX954XqaaAT6NpAY6LtZXvYFCJmGFJWwtlz2EmJoR8LycFRGNE8dBKizGWkGQZGegtkV8y8s+w==", "dev": true, "dependencies": { - "@babel/helper-hoist-variables": "^7.22.5", - "@babel/helper-module-transforms": "^7.23.3", - "@babel/helper-plugin-utils": "^7.24.0", - "@babel/helper-validator-identifier": "^7.22.20" + "@babel/helper-hoist-variables": "^7.24.6", + "@babel/helper-module-transforms": "^7.24.6", + "@babel/helper-plugin-utils": "^7.24.6", + "@babel/helper-validator-identifier": "^7.24.6" }, "engines": { "node": ">=6.9.0" @@ -1366,13 +1366,13 @@ } }, "node_modules/@babel/plugin-transform-modules-umd": { - "version": "7.24.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-umd/-/plugin-transform-modules-umd-7.24.1.tgz", - "integrity": "sha512-tuA3lpPj+5ITfcCluy6nWonSL7RvaG0AOTeAuvXqEKS34lnLzXpDb0dcP6K8jD0zWZFNDVly90AGFJPnm4fOYg==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-umd/-/plugin-transform-modules-umd-7.24.6.tgz", + "integrity": "sha512-esRCC/KsSEUvrSjv5rFYnjZI6qv4R1e/iHQrqwbZIoRJqk7xCvEUiN7L1XrmW5QSmQe3n1XD88wbgDTWLbVSyg==", "dev": true, "dependencies": { - "@babel/helper-module-transforms": "^7.23.3", - "@babel/helper-plugin-utils": "^7.24.0" + "@babel/helper-module-transforms": "^7.24.6", + "@babel/helper-plugin-utils": "^7.24.6" }, "engines": { "node": ">=6.9.0" @@ -1382,13 +1382,13 @@ } }, "node_modules/@babel/plugin-transform-named-capturing-groups-regex": { - "version": "7.22.5", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-named-capturing-groups-regex/-/plugin-transform-named-capturing-groups-regex-7.22.5.tgz", - "integrity": "sha512-YgLLKmS3aUBhHaxp5hi1WJTgOUb/NCuDHzGT9z9WTt3YG+CPRhJs6nprbStx6DnWM4dh6gt7SU3sZodbZ08adQ==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-named-capturing-groups-regex/-/plugin-transform-named-capturing-groups-regex-7.24.6.tgz", + "integrity": "sha512-6DneiCiu91wm3YiNIGDWZsl6GfTTbspuj/toTEqLh9d4cx50UIzSdg+T96p8DuT7aJOBRhFyaE9ZvTHkXrXr6Q==", "dev": true, "dependencies": { - "@babel/helper-create-regexp-features-plugin": "^7.22.5", - "@babel/helper-plugin-utils": "^7.22.5" + "@babel/helper-create-regexp-features-plugin": "^7.24.6", + "@babel/helper-plugin-utils": "^7.24.6" }, "engines": { "node": ">=6.9.0" @@ -1398,12 +1398,12 @@ } }, "node_modules/@babel/plugin-transform-new-target": { - "version": "7.24.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-new-target/-/plugin-transform-new-target-7.24.1.tgz", - "integrity": "sha512-/rurytBM34hYy0HKZQyA0nHbQgQNFm4Q/BOc9Hflxi2X3twRof7NaE5W46j4kQitm7SvACVRXsa6N/tSZxvPug==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-new-target/-/plugin-transform-new-target-7.24.6.tgz", + "integrity": "sha512-f8liz9JG2Va8A4J5ZBuaSdwfPqN6axfWRK+y66fjKYbwf9VBLuq4WxtinhJhvp1w6lamKUwLG0slK2RxqFgvHA==", "dev": true, "dependencies": { - "@babel/helper-plugin-utils": "^7.24.0" + "@babel/helper-plugin-utils": "^7.24.6" }, "engines": { "node": ">=6.9.0" @@ -1413,12 +1413,12 @@ } }, "node_modules/@babel/plugin-transform-nullish-coalescing-operator": { - "version": "7.24.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-nullish-coalescing-operator/-/plugin-transform-nullish-coalescing-operator-7.24.1.tgz", - "integrity": "sha512-iQ+caew8wRrhCikO5DrUYx0mrmdhkaELgFa+7baMcVuhxIkN7oxt06CZ51D65ugIb1UWRQ8oQe+HXAVM6qHFjw==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-nullish-coalescing-operator/-/plugin-transform-nullish-coalescing-operator-7.24.6.tgz", + "integrity": "sha512-+QlAiZBMsBK5NqrBWFXCYeXyiU1y7BQ/OYaiPAcQJMomn5Tyg+r5WuVtyEuvTbpV7L25ZSLfE+2E9ywj4FD48A==", "dev": true, "dependencies": { - "@babel/helper-plugin-utils": "^7.24.0", + "@babel/helper-plugin-utils": "^7.24.6", "@babel/plugin-syntax-nullish-coalescing-operator": "^7.8.3" }, "engines": { @@ -1429,12 +1429,12 @@ } }, "node_modules/@babel/plugin-transform-numeric-separator": { - "version": "7.24.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-numeric-separator/-/plugin-transform-numeric-separator-7.24.1.tgz", - "integrity": "sha512-7GAsGlK4cNL2OExJH1DzmDeKnRv/LXq0eLUSvudrehVA5Rgg4bIrqEUW29FbKMBRT0ztSqisv7kjP+XIC4ZMNw==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-numeric-separator/-/plugin-transform-numeric-separator-7.24.6.tgz", + "integrity": "sha512-6voawq8T25Jvvnc4/rXcWZQKKxUNZcKMS8ZNrjxQqoRFernJJKjE3s18Qo6VFaatG5aiX5JV1oPD7DbJhn0a4Q==", "dev": true, "dependencies": { - "@babel/helper-plugin-utils": "^7.24.0", + "@babel/helper-plugin-utils": "^7.24.6", "@babel/plugin-syntax-numeric-separator": "^7.10.4" }, "engines": { @@ -1445,15 +1445,15 @@ } }, "node_modules/@babel/plugin-transform-object-rest-spread": { - "version": "7.24.5", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-object-rest-spread/-/plugin-transform-object-rest-spread-7.24.5.tgz", - "integrity": "sha512-7EauQHszLGM3ay7a161tTQH7fj+3vVM/gThlz5HpFtnygTxjrlvoeq7MPVA1Vy9Q555OB8SnAOsMkLShNkkrHA==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-object-rest-spread/-/plugin-transform-object-rest-spread-7.24.6.tgz", + "integrity": "sha512-OKmi5wiMoRW5Smttne7BwHM8s/fb5JFs+bVGNSeHWzwZkWXWValR1M30jyXo1s/RaqgwwhEC62u4rFH/FBcBPg==", "dev": true, "dependencies": { - "@babel/helper-compilation-targets": "^7.23.6", - "@babel/helper-plugin-utils": "^7.24.5", + "@babel/helper-compilation-targets": "^7.24.6", + "@babel/helper-plugin-utils": "^7.24.6", "@babel/plugin-syntax-object-rest-spread": "^7.8.3", - "@babel/plugin-transform-parameters": "^7.24.5" + "@babel/plugin-transform-parameters": "^7.24.6" }, "engines": { "node": ">=6.9.0" @@ -1463,13 +1463,13 @@ } }, "node_modules/@babel/plugin-transform-object-super": { - "version": "7.24.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-object-super/-/plugin-transform-object-super-7.24.1.tgz", - "integrity": "sha512-oKJqR3TeI5hSLRxudMjFQ9re9fBVUU0GICqM3J1mi8MqlhVr6hC/ZN4ttAyMuQR6EZZIY6h/exe5swqGNNIkWQ==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-object-super/-/plugin-transform-object-super-7.24.6.tgz", + "integrity": "sha512-N/C76ihFKlZgKfdkEYKtaRUtXZAgK7sOY4h2qrbVbVTXPrKGIi8aww5WGe/+Wmg8onn8sr2ut6FXlsbu/j6JHg==", "dev": true, "dependencies": { - "@babel/helper-plugin-utils": "^7.24.0", - "@babel/helper-replace-supers": "^7.24.1" + "@babel/helper-plugin-utils": "^7.24.6", + "@babel/helper-replace-supers": "^7.24.6" }, "engines": { "node": ">=6.9.0" @@ -1479,12 +1479,12 @@ } }, "node_modules/@babel/plugin-transform-optional-catch-binding": { - "version": "7.24.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-optional-catch-binding/-/plugin-transform-optional-catch-binding-7.24.1.tgz", - "integrity": "sha512-oBTH7oURV4Y+3EUrf6cWn1OHio3qG/PVwO5J03iSJmBg6m2EhKjkAu/xuaXaYwWW9miYtvbWv4LNf0AmR43LUA==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-optional-catch-binding/-/plugin-transform-optional-catch-binding-7.24.6.tgz", + "integrity": "sha512-L5pZ+b3O1mSzJ71HmxSCmTVd03VOT2GXOigug6vDYJzE5awLI7P1g0wFcdmGuwSDSrQ0L2rDOe/hHws8J1rv3w==", "dev": true, "dependencies": { - "@babel/helper-plugin-utils": "^7.24.0", + "@babel/helper-plugin-utils": "^7.24.6", "@babel/plugin-syntax-optional-catch-binding": "^7.8.3" }, "engines": { @@ -1495,13 +1495,13 @@ } }, "node_modules/@babel/plugin-transform-optional-chaining": { - "version": "7.24.5", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-optional-chaining/-/plugin-transform-optional-chaining-7.24.5.tgz", - "integrity": "sha512-xWCkmwKT+ihmA6l7SSTpk8e4qQl/274iNbSKRRS8mpqFR32ksy36+a+LWY8OXCCEefF8WFlnOHVsaDI2231wBg==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-optional-chaining/-/plugin-transform-optional-chaining-7.24.6.tgz", + "integrity": "sha512-cHbqF6l1QP11OkYTYQ+hhVx1E017O5ZcSPXk9oODpqhcAD1htsWG2NpHrrhthEO2qZomLK0FXS+u7NfrkF5aOQ==", "dev": true, "dependencies": { - "@babel/helper-plugin-utils": "^7.24.5", - "@babel/helper-skip-transparent-expression-wrappers": "^7.22.5", + "@babel/helper-plugin-utils": "^7.24.6", + "@babel/helper-skip-transparent-expression-wrappers": "^7.24.6", "@babel/plugin-syntax-optional-chaining": "^7.8.3" }, "engines": { @@ -1512,12 +1512,12 @@ } }, "node_modules/@babel/plugin-transform-parameters": { - "version": "7.24.5", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-parameters/-/plugin-transform-parameters-7.24.5.tgz", - "integrity": "sha512-9Co00MqZ2aoky+4j2jhofErthm6QVLKbpQrvz20c3CH9KQCLHyNB+t2ya4/UrRpQGR+Wrwjg9foopoeSdnHOkA==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-parameters/-/plugin-transform-parameters-7.24.6.tgz", + "integrity": "sha512-ST7guE8vLV+vI70wmAxuZpIKzVjvFX9Qs8bl5w6tN/6gOypPWUmMQL2p7LJz5E63vEGrDhAiYetniJFyBH1RkA==", "dev": true, "dependencies": { - "@babel/helper-plugin-utils": "^7.24.5" + "@babel/helper-plugin-utils": "^7.24.6" }, "engines": { "node": ">=6.9.0" @@ -1527,13 +1527,13 @@ } }, "node_modules/@babel/plugin-transform-private-methods": { - "version": "7.24.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-private-methods/-/plugin-transform-private-methods-7.24.1.tgz", - "integrity": "sha512-tGvisebwBO5em4PaYNqt4fkw56K2VALsAbAakY0FjTYqJp7gfdrgr7YX76Or8/cpik0W6+tj3rZ0uHU9Oil4tw==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-private-methods/-/plugin-transform-private-methods-7.24.6.tgz", + "integrity": "sha512-T9LtDI0BgwXOzyXrvgLTT8DFjCC/XgWLjflczTLXyvxbnSR/gpv0hbmzlHE/kmh9nOvlygbamLKRo6Op4yB6aw==", "dev": true, "dependencies": { - "@babel/helper-create-class-features-plugin": "^7.24.1", - "@babel/helper-plugin-utils": "^7.24.0" + "@babel/helper-create-class-features-plugin": "^7.24.6", + "@babel/helper-plugin-utils": "^7.24.6" }, "engines": { "node": ">=6.9.0" @@ -1543,14 +1543,14 @@ } }, "node_modules/@babel/plugin-transform-private-property-in-object": { - "version": "7.24.5", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-private-property-in-object/-/plugin-transform-private-property-in-object-7.24.5.tgz", - "integrity": "sha512-JM4MHZqnWR04jPMujQDTBVRnqxpLLpx2tkn7iPn+Hmsc0Gnb79yvRWOkvqFOx3Z7P7VxiRIR22c4eGSNj87OBQ==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-private-property-in-object/-/plugin-transform-private-property-in-object-7.24.6.tgz", + "integrity": "sha512-Qu/ypFxCY5NkAnEhCF86Mvg3NSabKsh/TPpBVswEdkGl7+FbsYHy1ziRqJpwGH4thBdQHh8zx+z7vMYmcJ7iaQ==", "dev": true, "dependencies": { - "@babel/helper-annotate-as-pure": "^7.22.5", - "@babel/helper-create-class-features-plugin": "^7.24.5", - "@babel/helper-plugin-utils": "^7.24.5", + "@babel/helper-annotate-as-pure": "^7.24.6", + "@babel/helper-create-class-features-plugin": "^7.24.6", + "@babel/helper-plugin-utils": "^7.24.6", "@babel/plugin-syntax-private-property-in-object": "^7.14.5" }, "engines": { @@ -1561,12 +1561,12 @@ } }, "node_modules/@babel/plugin-transform-property-literals": { - "version": "7.24.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-property-literals/-/plugin-transform-property-literals-7.24.1.tgz", - "integrity": "sha512-LetvD7CrHmEx0G442gOomRr66d7q8HzzGGr4PMHGr+5YIm6++Yke+jxj246rpvsbyhJwCLxcTn6zW1P1BSenqA==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-property-literals/-/plugin-transform-property-literals-7.24.6.tgz", + "integrity": "sha512-oARaglxhRsN18OYsnPTpb8TcKQWDYNsPNmTnx5++WOAsUJ0cSC/FZVlIJCKvPbU4yn/UXsS0551CFKJhN0CaMw==", "dev": true, "dependencies": { - "@babel/helper-plugin-utils": "^7.24.0" + "@babel/helper-plugin-utils": "^7.24.6" }, "engines": { "node": ">=6.9.0" @@ -1576,12 +1576,12 @@ } }, "node_modules/@babel/plugin-transform-regenerator": { - "version": "7.24.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-regenerator/-/plugin-transform-regenerator-7.24.1.tgz", - "integrity": "sha512-sJwZBCzIBE4t+5Q4IGLaaun5ExVMRY0lYwos/jNecjMrVCygCdph3IKv0tkP5Fc87e/1+bebAmEAGBfnRD+cnw==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-regenerator/-/plugin-transform-regenerator-7.24.6.tgz", + "integrity": "sha512-SMDxO95I8WXRtXhTAc8t/NFQUT7VYbIWwJCJgEli9ml4MhqUMh4S6hxgH6SmAC3eAQNWCDJFxcFeEt9w2sDdXg==", "dev": true, "dependencies": { - "@babel/helper-plugin-utils": "^7.24.0", + "@babel/helper-plugin-utils": "^7.24.6", "regenerator-transform": "^0.15.2" }, "engines": { @@ -1592,12 +1592,12 @@ } }, "node_modules/@babel/plugin-transform-reserved-words": { - "version": "7.24.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-reserved-words/-/plugin-transform-reserved-words-7.24.1.tgz", - "integrity": "sha512-JAclqStUfIwKN15HrsQADFgeZt+wexNQ0uLhuqvqAUFoqPMjEcFCYZBhq0LUdz6dZK/mD+rErhW71fbx8RYElg==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-reserved-words/-/plugin-transform-reserved-words-7.24.6.tgz", + "integrity": "sha512-DcrgFXRRlK64dGE0ZFBPD5egM2uM8mgfrvTMOSB2yKzOtjpGegVYkzh3s1zZg1bBck3nkXiaOamJUqK3Syk+4A==", "dev": true, "dependencies": { - "@babel/helper-plugin-utils": "^7.24.0" + "@babel/helper-plugin-utils": "^7.24.6" }, "engines": { "node": ">=6.9.0" @@ -1638,12 +1638,12 @@ } }, "node_modules/@babel/plugin-transform-shorthand-properties": { - "version": "7.24.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-shorthand-properties/-/plugin-transform-shorthand-properties-7.24.1.tgz", - "integrity": "sha512-LyjVB1nsJ6gTTUKRjRWx9C1s9hE7dLfP/knKdrfeH9UPtAGjYGgxIbFfx7xyLIEWs7Xe1Gnf8EWiUqfjLhInZA==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-shorthand-properties/-/plugin-transform-shorthand-properties-7.24.6.tgz", + "integrity": "sha512-xnEUvHSMr9eOWS5Al2YPfc32ten7CXdH7Zwyyk7IqITg4nX61oHj+GxpNvl+y5JHjfN3KXE2IV55wAWowBYMVw==", "dev": true, "dependencies": { - "@babel/helper-plugin-utils": "^7.24.0" + "@babel/helper-plugin-utils": "^7.24.6" }, "engines": { "node": ">=6.9.0" @@ -1653,13 +1653,13 @@ } }, "node_modules/@babel/plugin-transform-spread": { - "version": "7.24.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-spread/-/plugin-transform-spread-7.24.1.tgz", - "integrity": "sha512-KjmcIM+fxgY+KxPVbjelJC6hrH1CgtPmTvdXAfn3/a9CnWGSTY7nH4zm5+cjmWJybdcPSsD0++QssDsjcpe47g==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-spread/-/plugin-transform-spread-7.24.6.tgz", + "integrity": "sha512-h/2j7oIUDjS+ULsIrNZ6/TKG97FgmEk1PXryk/HQq6op4XUUUwif2f69fJrzK0wza2zjCS1xhXmouACaWV5uPA==", "dev": true, "dependencies": { - "@babel/helper-plugin-utils": "^7.24.0", - "@babel/helper-skip-transparent-expression-wrappers": "^7.22.5" + "@babel/helper-plugin-utils": "^7.24.6", + "@babel/helper-skip-transparent-expression-wrappers": "^7.24.6" }, "engines": { "node": ">=6.9.0" @@ -1669,12 +1669,12 @@ } }, "node_modules/@babel/plugin-transform-sticky-regex": { - "version": "7.24.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-sticky-regex/-/plugin-transform-sticky-regex-7.24.1.tgz", - "integrity": "sha512-9v0f1bRXgPVcPrngOQvLXeGNNVLc8UjMVfebo9ka0WF3/7+aVUHmaJVT3sa0XCzEFioPfPHZiOcYG9qOsH63cw==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-sticky-regex/-/plugin-transform-sticky-regex-7.24.6.tgz", + "integrity": "sha512-fN8OcTLfGmYv7FnDrsjodYBo1DhPL3Pze/9mIIE2MGCT1KgADYIOD7rEglpLHZj8PZlC/JFX5WcD+85FLAQusw==", "dev": true, "dependencies": { - "@babel/helper-plugin-utils": "^7.24.0" + "@babel/helper-plugin-utils": "^7.24.6" }, "engines": { "node": ">=6.9.0" @@ -1684,12 +1684,12 @@ } }, "node_modules/@babel/plugin-transform-template-literals": { - "version": "7.24.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-template-literals/-/plugin-transform-template-literals-7.24.1.tgz", - "integrity": "sha512-WRkhROsNzriarqECASCNu/nojeXCDTE/F2HmRgOzi7NGvyfYGq1NEjKBK3ckLfRgGc6/lPAqP0vDOSw3YtG34g==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-template-literals/-/plugin-transform-template-literals-7.24.6.tgz", + "integrity": "sha512-BJbEqJIcKwrqUP+KfUIkxz3q8VzXe2R8Wv8TaNgO1cx+nNavxn/2+H8kp9tgFSOL6wYPPEgFvU6IKS4qoGqhmg==", "dev": true, "dependencies": { - "@babel/helper-plugin-utils": "^7.24.0" + "@babel/helper-plugin-utils": "^7.24.6" }, "engines": { "node": ">=6.9.0" @@ -1699,12 +1699,12 @@ } }, "node_modules/@babel/plugin-transform-typeof-symbol": { - "version": "7.24.5", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-typeof-symbol/-/plugin-transform-typeof-symbol-7.24.5.tgz", - "integrity": "sha512-UTGnhYVZtTAjdwOTzT+sCyXmTn8AhaxOS/MjG9REclZ6ULHWF9KoCZur0HSGU7hk8PdBFKKbYe6+gqdXWz84Jg==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-typeof-symbol/-/plugin-transform-typeof-symbol-7.24.6.tgz", + "integrity": "sha512-IshCXQ+G9JIFJI7bUpxTE/oA2lgVLAIK8q1KdJNoPXOpvRaNjMySGuvLfBw/Xi2/1lLo953uE8hyYSDW3TSYig==", "dev": true, "dependencies": { - "@babel/helper-plugin-utils": "^7.24.5" + "@babel/helper-plugin-utils": "^7.24.6" }, "engines": { "node": ">=6.9.0" @@ -1714,12 +1714,12 @@ } }, "node_modules/@babel/plugin-transform-unicode-escapes": { - "version": "7.24.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-unicode-escapes/-/plugin-transform-unicode-escapes-7.24.1.tgz", - "integrity": "sha512-RlkVIcWT4TLI96zM660S877E7beKlQw7Ig+wqkKBiWfj0zH5Q4h50q6er4wzZKRNSYpfo6ILJ+hrJAGSX2qcNw==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-unicode-escapes/-/plugin-transform-unicode-escapes-7.24.6.tgz", + "integrity": "sha512-bKl3xxcPbkQQo5eX9LjjDpU2xYHeEeNQbOhj0iPvetSzA+Tu9q/o5lujF4Sek60CM6MgYvOS/DJuwGbiEYAnLw==", "dev": true, "dependencies": { - "@babel/helper-plugin-utils": "^7.24.0" + "@babel/helper-plugin-utils": "^7.24.6" }, "engines": { "node": ">=6.9.0" @@ -1729,13 +1729,13 @@ } }, "node_modules/@babel/plugin-transform-unicode-property-regex": { - "version": "7.24.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-unicode-property-regex/-/plugin-transform-unicode-property-regex-7.24.1.tgz", - "integrity": "sha512-Ss4VvlfYV5huWApFsF8/Sq0oXnGO+jB+rijFEFugTd3cwSObUSnUi88djgR5528Csl0uKlrI331kRqe56Ov2Ng==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-unicode-property-regex/-/plugin-transform-unicode-property-regex-7.24.6.tgz", + "integrity": "sha512-8EIgImzVUxy15cZiPii9GvLZwsy7Vxc+8meSlR3cXFmBIl5W5Tn9LGBf7CDKkHj4uVfNXCJB8RsVfnmY61iedA==", "dev": true, "dependencies": { - "@babel/helper-create-regexp-features-plugin": "^7.22.15", - "@babel/helper-plugin-utils": "^7.24.0" + "@babel/helper-create-regexp-features-plugin": "^7.24.6", + "@babel/helper-plugin-utils": "^7.24.6" }, "engines": { "node": ">=6.9.0" @@ -1745,13 +1745,13 @@ } }, "node_modules/@babel/plugin-transform-unicode-regex": { - "version": "7.24.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-unicode-regex/-/plugin-transform-unicode-regex-7.24.1.tgz", - "integrity": "sha512-2A/94wgZgxfTsiLaQ2E36XAOdcZmGAaEEgVmxQWwZXWkGhvoHbaqXcKnU8zny4ycpu3vNqg0L/PcCiYtHtA13g==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-unicode-regex/-/plugin-transform-unicode-regex-7.24.6.tgz", + "integrity": "sha512-pssN6ExsvxaKU638qcWb81RrvvgZom3jDgU/r5xFZ7TONkZGFf4MhI2ltMb8OcQWhHyxgIavEU+hgqtbKOmsPA==", "dev": true, "dependencies": { - "@babel/helper-create-regexp-features-plugin": "^7.22.15", - "@babel/helper-plugin-utils": "^7.24.0" + "@babel/helper-create-regexp-features-plugin": "^7.24.6", + "@babel/helper-plugin-utils": "^7.24.6" }, "engines": { "node": ">=6.9.0" @@ -1761,13 +1761,13 @@ } }, "node_modules/@babel/plugin-transform-unicode-sets-regex": { - "version": "7.24.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-unicode-sets-regex/-/plugin-transform-unicode-sets-regex-7.24.1.tgz", - "integrity": "sha512-fqj4WuzzS+ukpgerpAoOnMfQXwUHFxXUZUE84oL2Kao2N8uSlvcpnAidKASgsNgzZHBsHWvcm8s9FPWUhAb8fA==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-unicode-sets-regex/-/plugin-transform-unicode-sets-regex-7.24.6.tgz", + "integrity": "sha512-quiMsb28oXWIDK0gXLALOJRXLgICLiulqdZGOaPPd0vRT7fQp74NtdADAVu+D8s00C+0Xs0MxVP0VKF/sZEUgw==", "dev": true, "dependencies": { - "@babel/helper-create-regexp-features-plugin": "^7.22.15", - "@babel/helper-plugin-utils": "^7.24.0" + "@babel/helper-create-regexp-features-plugin": "^7.24.6", + "@babel/helper-plugin-utils": "^7.24.6" }, "engines": { "node": ">=6.9.0" @@ -1777,27 +1777,27 @@ } }, "node_modules/@babel/preset-env": { - "version": "7.24.5", - "resolved": "https://registry.npmjs.org/@babel/preset-env/-/preset-env-7.24.5.tgz", - "integrity": "sha512-UGK2ifKtcC8i5AI4cH+sbLLuLc2ktYSFJgBAXorKAsHUZmrQ1q6aQ6i3BvU24wWs2AAKqQB6kq3N9V9Gw1HiMQ==", - "dev": true, - "dependencies": { - "@babel/compat-data": "^7.24.4", - "@babel/helper-compilation-targets": "^7.23.6", - "@babel/helper-plugin-utils": "^7.24.5", - "@babel/helper-validator-option": "^7.23.5", - "@babel/plugin-bugfix-firefox-class-in-computed-class-key": "^7.24.5", - "@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression": "^7.24.1", - "@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining": "^7.24.1", - "@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly": "^7.24.1", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/preset-env/-/preset-env-7.24.6.tgz", + "integrity": "sha512-CrxEAvN7VxfjOG8JNF2Y/eMqMJbZPZ185amwGUBp8D9USK90xQmv7dLdFSa+VbD7fdIqcy/Mfv7WtzG8+/qxKg==", + "dev": true, + "dependencies": { + "@babel/compat-data": "^7.24.6", + "@babel/helper-compilation-targets": "^7.24.6", + "@babel/helper-plugin-utils": "^7.24.6", + "@babel/helper-validator-option": "^7.24.6", + "@babel/plugin-bugfix-firefox-class-in-computed-class-key": "^7.24.6", + "@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression": "^7.24.6", + "@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining": "^7.24.6", + "@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly": "^7.24.6", "@babel/plugin-proposal-private-property-in-object": "7.21.0-placeholder-for-preset-env.2", "@babel/plugin-syntax-async-generators": "^7.8.4", "@babel/plugin-syntax-class-properties": "^7.12.13", "@babel/plugin-syntax-class-static-block": "^7.14.5", "@babel/plugin-syntax-dynamic-import": "^7.8.3", "@babel/plugin-syntax-export-namespace-from": "^7.8.3", - "@babel/plugin-syntax-import-assertions": "^7.24.1", - "@babel/plugin-syntax-import-attributes": "^7.24.1", + "@babel/plugin-syntax-import-assertions": "^7.24.6", + "@babel/plugin-syntax-import-attributes": "^7.24.6", "@babel/plugin-syntax-import-meta": "^7.10.4", "@babel/plugin-syntax-json-strings": "^7.8.3", "@babel/plugin-syntax-logical-assignment-operators": "^7.10.4", @@ -1809,54 +1809,54 @@ "@babel/plugin-syntax-private-property-in-object": "^7.14.5", "@babel/plugin-syntax-top-level-await": "^7.14.5", "@babel/plugin-syntax-unicode-sets-regex": "^7.18.6", - "@babel/plugin-transform-arrow-functions": "^7.24.1", - "@babel/plugin-transform-async-generator-functions": "^7.24.3", - "@babel/plugin-transform-async-to-generator": "^7.24.1", - "@babel/plugin-transform-block-scoped-functions": "^7.24.1", - "@babel/plugin-transform-block-scoping": "^7.24.5", - "@babel/plugin-transform-class-properties": "^7.24.1", - "@babel/plugin-transform-class-static-block": "^7.24.4", - "@babel/plugin-transform-classes": "^7.24.5", - "@babel/plugin-transform-computed-properties": "^7.24.1", - "@babel/plugin-transform-destructuring": "^7.24.5", - "@babel/plugin-transform-dotall-regex": "^7.24.1", - "@babel/plugin-transform-duplicate-keys": "^7.24.1", - "@babel/plugin-transform-dynamic-import": "^7.24.1", - "@babel/plugin-transform-exponentiation-operator": "^7.24.1", - "@babel/plugin-transform-export-namespace-from": "^7.24.1", - "@babel/plugin-transform-for-of": "^7.24.1", - "@babel/plugin-transform-function-name": "^7.24.1", - "@babel/plugin-transform-json-strings": "^7.24.1", - "@babel/plugin-transform-literals": "^7.24.1", - "@babel/plugin-transform-logical-assignment-operators": "^7.24.1", - "@babel/plugin-transform-member-expression-literals": "^7.24.1", - "@babel/plugin-transform-modules-amd": "^7.24.1", - "@babel/plugin-transform-modules-commonjs": "^7.24.1", - "@babel/plugin-transform-modules-systemjs": "^7.24.1", - "@babel/plugin-transform-modules-umd": "^7.24.1", - "@babel/plugin-transform-named-capturing-groups-regex": "^7.22.5", - "@babel/plugin-transform-new-target": "^7.24.1", - "@babel/plugin-transform-nullish-coalescing-operator": "^7.24.1", - "@babel/plugin-transform-numeric-separator": "^7.24.1", - "@babel/plugin-transform-object-rest-spread": "^7.24.5", - "@babel/plugin-transform-object-super": "^7.24.1", - "@babel/plugin-transform-optional-catch-binding": "^7.24.1", - "@babel/plugin-transform-optional-chaining": "^7.24.5", - "@babel/plugin-transform-parameters": "^7.24.5", - "@babel/plugin-transform-private-methods": "^7.24.1", - "@babel/plugin-transform-private-property-in-object": "^7.24.5", - "@babel/plugin-transform-property-literals": "^7.24.1", - "@babel/plugin-transform-regenerator": "^7.24.1", - "@babel/plugin-transform-reserved-words": "^7.24.1", - "@babel/plugin-transform-shorthand-properties": "^7.24.1", - "@babel/plugin-transform-spread": "^7.24.1", - "@babel/plugin-transform-sticky-regex": "^7.24.1", - "@babel/plugin-transform-template-literals": "^7.24.1", - "@babel/plugin-transform-typeof-symbol": "^7.24.5", - "@babel/plugin-transform-unicode-escapes": "^7.24.1", - "@babel/plugin-transform-unicode-property-regex": "^7.24.1", - "@babel/plugin-transform-unicode-regex": "^7.24.1", - "@babel/plugin-transform-unicode-sets-regex": "^7.24.1", + "@babel/plugin-transform-arrow-functions": "^7.24.6", + "@babel/plugin-transform-async-generator-functions": "^7.24.6", + "@babel/plugin-transform-async-to-generator": "^7.24.6", + "@babel/plugin-transform-block-scoped-functions": "^7.24.6", + "@babel/plugin-transform-block-scoping": "^7.24.6", + "@babel/plugin-transform-class-properties": "^7.24.6", + "@babel/plugin-transform-class-static-block": "^7.24.6", + "@babel/plugin-transform-classes": "^7.24.6", + "@babel/plugin-transform-computed-properties": "^7.24.6", + "@babel/plugin-transform-destructuring": "^7.24.6", + "@babel/plugin-transform-dotall-regex": "^7.24.6", + "@babel/plugin-transform-duplicate-keys": "^7.24.6", + "@babel/plugin-transform-dynamic-import": "^7.24.6", + "@babel/plugin-transform-exponentiation-operator": "^7.24.6", + "@babel/plugin-transform-export-namespace-from": "^7.24.6", + "@babel/plugin-transform-for-of": "^7.24.6", + "@babel/plugin-transform-function-name": "^7.24.6", + "@babel/plugin-transform-json-strings": "^7.24.6", + "@babel/plugin-transform-literals": "^7.24.6", + "@babel/plugin-transform-logical-assignment-operators": "^7.24.6", + "@babel/plugin-transform-member-expression-literals": "^7.24.6", + "@babel/plugin-transform-modules-amd": "^7.24.6", + "@babel/plugin-transform-modules-commonjs": "^7.24.6", + "@babel/plugin-transform-modules-systemjs": "^7.24.6", + "@babel/plugin-transform-modules-umd": "^7.24.6", + "@babel/plugin-transform-named-capturing-groups-regex": "^7.24.6", + "@babel/plugin-transform-new-target": "^7.24.6", + "@babel/plugin-transform-nullish-coalescing-operator": "^7.24.6", + "@babel/plugin-transform-numeric-separator": "^7.24.6", + "@babel/plugin-transform-object-rest-spread": "^7.24.6", + "@babel/plugin-transform-object-super": "^7.24.6", + "@babel/plugin-transform-optional-catch-binding": "^7.24.6", + "@babel/plugin-transform-optional-chaining": "^7.24.6", + "@babel/plugin-transform-parameters": "^7.24.6", + "@babel/plugin-transform-private-methods": "^7.24.6", + "@babel/plugin-transform-private-property-in-object": "^7.24.6", + "@babel/plugin-transform-property-literals": "^7.24.6", + "@babel/plugin-transform-regenerator": "^7.24.6", + "@babel/plugin-transform-reserved-words": "^7.24.6", + "@babel/plugin-transform-shorthand-properties": "^7.24.6", + "@babel/plugin-transform-spread": "^7.24.6", + "@babel/plugin-transform-sticky-regex": "^7.24.6", + "@babel/plugin-transform-template-literals": "^7.24.6", + "@babel/plugin-transform-typeof-symbol": "^7.24.6", + "@babel/plugin-transform-unicode-escapes": "^7.24.6", + "@babel/plugin-transform-unicode-property-regex": "^7.24.6", + "@babel/plugin-transform-unicode-regex": "^7.24.6", + "@babel/plugin-transform-unicode-sets-regex": "^7.24.6", "@babel/preset-modules": "0.1.6-no-external-plugins", "babel-plugin-polyfill-corejs2": "^0.4.10", "babel-plugin-polyfill-corejs3": "^0.10.4", @@ -18126,20 +18126,20 @@ } }, "@babel/helper-annotate-as-pure": { - "version": "7.22.5", - "resolved": "https://registry.npmjs.org/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.22.5.tgz", - "integrity": "sha512-LvBTxu8bQSQkcyKOU+a1btnNFQ1dMAd0R6PyW3arXes06F6QLWLIrd681bxRPIXlrMGR3XYnW9JyML7dP3qgxg==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.24.6.tgz", + "integrity": "sha512-DitEzDfOMnd13kZnDqns1ccmftwJTS9DMkyn9pYTxulS7bZxUxpMly3Nf23QQ6NwA4UB8lAqjbqWtyvElEMAkg==", "requires": { - "@babel/types": "^7.22.5" + "@babel/types": "^7.24.6" } }, "@babel/helper-builder-binary-assignment-operator-visitor": { - "version": "7.22.15", - "resolved": "https://registry.npmjs.org/@babel/helper-builder-binary-assignment-operator-visitor/-/helper-builder-binary-assignment-operator-visitor-7.22.15.tgz", - "integrity": "sha512-QkBXwGgaoC2GtGZRoma6kv7Szfv06khvhFav67ZExau2RaXzy8MpHSMO2PNoP2XtmQphJQRHFfg77Bq731Yizw==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/helper-builder-binary-assignment-operator-visitor/-/helper-builder-binary-assignment-operator-visitor-7.24.6.tgz", + "integrity": "sha512-+wnfqc5uHiMYtvRX7qu80Toef8BXeh4HHR1SPeonGb1SKPniNEd4a/nlaJJMv/OIEYvIVavvo0yR7u10Gqz0Iw==", "dev": true, "requires": { - "@babel/types": "^7.22.15" + "@babel/types": "^7.24.6" } }, "@babel/helper-compilation-targets": { @@ -18170,29 +18170,29 @@ } }, "@babel/helper-create-class-features-plugin": { - "version": "7.24.5", - "resolved": "https://registry.npmjs.org/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.24.5.tgz", - "integrity": "sha512-uRc4Cv8UQWnE4NXlYTIIdM7wfFkOqlFztcC/gVXDKohKoVB3OyonfelUBaJzSwpBntZ2KYGF/9S7asCHsXwW6g==", - "dev": true, - "requires": { - "@babel/helper-annotate-as-pure": "^7.22.5", - "@babel/helper-environment-visitor": "^7.22.20", - "@babel/helper-function-name": "^7.23.0", - "@babel/helper-member-expression-to-functions": "^7.24.5", - "@babel/helper-optimise-call-expression": "^7.22.5", - "@babel/helper-replace-supers": "^7.24.1", - "@babel/helper-skip-transparent-expression-wrappers": "^7.22.5", - "@babel/helper-split-export-declaration": "^7.24.5", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.24.6.tgz", + "integrity": "sha512-djsosdPJVZE6Vsw3kk7IPRWethP94WHGOhQTc67SNXE0ZzMhHgALw8iGmYS0TD1bbMM0VDROy43od7/hN6WYcA==", + "dev": true, + "requires": { + "@babel/helper-annotate-as-pure": "^7.24.6", + "@babel/helper-environment-visitor": "^7.24.6", + "@babel/helper-function-name": "^7.24.6", + "@babel/helper-member-expression-to-functions": "^7.24.6", + "@babel/helper-optimise-call-expression": "^7.24.6", + "@babel/helper-replace-supers": "^7.24.6", + "@babel/helper-skip-transparent-expression-wrappers": "^7.24.6", + "@babel/helper-split-export-declaration": "^7.24.6", "semver": "^6.3.1" } }, "@babel/helper-create-regexp-features-plugin": { - "version": "7.22.15", - "resolved": "https://registry.npmjs.org/@babel/helper-create-regexp-features-plugin/-/helper-create-regexp-features-plugin-7.22.15.tgz", - "integrity": "sha512-29FkPLFjn4TPEa3RE7GpW+qbE8tlsu3jntNYNfcGsc49LphF1PQIiD+vMZ1z1xVOKt+93khA9tc2JBs3kBjA7w==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/helper-create-regexp-features-plugin/-/helper-create-regexp-features-plugin-7.24.6.tgz", + "integrity": "sha512-C875lFBIWWwyv6MHZUG9HmRrlTDgOsLWZfYR0nW69gaKJNe0/Mpxx5r0EID2ZdHQkdUmQo2t0uNckTL08/1BgA==", "dev": true, "requires": { - "@babel/helper-annotate-as-pure": "^7.22.5", + "@babel/helper-annotate-as-pure": "^7.24.6", "regexpu-core": "^5.3.1", "semver": "^6.3.1" } @@ -18233,12 +18233,12 @@ } }, "@babel/helper-member-expression-to-functions": { - "version": "7.24.5", - "resolved": "https://registry.npmjs.org/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.24.5.tgz", - "integrity": "sha512-4owRteeihKWKamtqg4JmWSsEZU445xpFRXPEwp44HbgbxdWlUV1b4Agg4lkA806Lil5XM/e+FJyS0vj5T6vmcA==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.24.6.tgz", + "integrity": "sha512-OTsCufZTxDUsv2/eDXanw/mUZHWOxSbEmC3pP8cgjcy5rgeVPWWMStnv274DV60JtHxTk0adT0QrCzC4M9NWGg==", "dev": true, "requires": { - "@babel/types": "^7.24.5" + "@babel/types": "^7.24.6" } }, "@babel/helper-module-imports": { @@ -18262,39 +18262,39 @@ } }, "@babel/helper-optimise-call-expression": { - "version": "7.22.5", - "resolved": "https://registry.npmjs.org/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.22.5.tgz", - "integrity": "sha512-HBwaojN0xFRx4yIvpwGqxiV2tUfl7401jlok564NgB9EHS1y6QT17FmKWm4ztqjeVdXLuC4fSvHc5ePpQjoTbw==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.24.6.tgz", + "integrity": "sha512-3SFDJRbx7KuPRl8XDUr8O7GAEB8iGyWPjLKJh/ywP/Iy9WOmEfMrsWbaZpvBu2HSYn4KQygIsz0O7m8y10ncMA==", "dev": true, "requires": { - "@babel/types": "^7.22.5" + "@babel/types": "^7.24.6" } }, "@babel/helper-plugin-utils": { - "version": "7.24.5", - "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.24.5.tgz", - "integrity": "sha512-xjNLDopRzW2o6ba0gKbkZq5YWEBaK3PCyTOY1K2P/O07LGMhMqlMXPxwN4S5/RhWuCobT8z0jrlKGlYmeR1OhQ==" + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.24.6.tgz", + "integrity": "sha512-MZG/JcWfxybKwsA9N9PmtF2lOSFSEMVCpIRrbxccZFLJPrJciJdG/UhSh5W96GEteJI2ARqm5UAHxISwRDLSNg==" }, "@babel/helper-remap-async-to-generator": { - "version": "7.22.20", - "resolved": "https://registry.npmjs.org/@babel/helper-remap-async-to-generator/-/helper-remap-async-to-generator-7.22.20.tgz", - "integrity": "sha512-pBGyV4uBqOns+0UvhsTO8qgl8hO89PmiDYv+/COyp1aeMcmfrfruz+/nCMFiYyFF/Knn0yfrC85ZzNFjembFTw==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/helper-remap-async-to-generator/-/helper-remap-async-to-generator-7.24.6.tgz", + "integrity": "sha512-1Qursq9ArRZPAMOZf/nuzVW8HgJLkTB9y9LfP4lW2MVp4e9WkLJDovfKBxoDcCk6VuzIxyqWHyBoaCtSRP10yg==", "dev": true, "requires": { - "@babel/helper-annotate-as-pure": "^7.22.5", - "@babel/helper-environment-visitor": "^7.22.20", - "@babel/helper-wrap-function": "^7.22.20" + "@babel/helper-annotate-as-pure": "^7.24.6", + "@babel/helper-environment-visitor": "^7.24.6", + "@babel/helper-wrap-function": "^7.24.6" } }, "@babel/helper-replace-supers": { - "version": "7.24.1", - "resolved": "https://registry.npmjs.org/@babel/helper-replace-supers/-/helper-replace-supers-7.24.1.tgz", - "integrity": "sha512-QCR1UqC9BzG5vZl8BMicmZ28RuUBnHhAMddD8yHFHDRH9lLTZ9uUPehX8ctVPT8l0TKblJidqcgUUKGVrePleQ==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/helper-replace-supers/-/helper-replace-supers-7.24.6.tgz", + "integrity": "sha512-mRhfPwDqDpba8o1F8ESxsEkJMQkUF8ZIWrAc0FtWhxnjfextxMWxr22RtFizxxSYLjVHDeMgVsRq8BBZR2ikJQ==", "dev": true, "requires": { - "@babel/helper-environment-visitor": "^7.22.20", - "@babel/helper-member-expression-to-functions": "^7.23.0", - "@babel/helper-optimise-call-expression": "^7.22.5" + "@babel/helper-environment-visitor": "^7.24.6", + "@babel/helper-member-expression-to-functions": "^7.24.6", + "@babel/helper-optimise-call-expression": "^7.24.6" } }, "@babel/helper-simple-access": { @@ -18306,12 +18306,12 @@ } }, "@babel/helper-skip-transparent-expression-wrappers": { - "version": "7.22.5", - "resolved": "https://registry.npmjs.org/@babel/helper-skip-transparent-expression-wrappers/-/helper-skip-transparent-expression-wrappers-7.22.5.tgz", - "integrity": "sha512-tK14r66JZKiC43p8Ki33yLBVJKlQDFoA8GYN67lWCDCqoL6EMMSuM9b+Iff2jHaM/RRFYl7K+iiru7hbRqNx8Q==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/helper-skip-transparent-expression-wrappers/-/helper-skip-transparent-expression-wrappers-7.24.6.tgz", + "integrity": "sha512-jhbbkK3IUKc4T43WadP96a27oYti9gEf1LdyGSP2rHGH77kwLwfhO7TgwnWvxxQVmke0ImmCSS47vcuxEMGD3Q==", "dev": true, "requires": { - "@babel/types": "^7.22.5" + "@babel/types": "^7.24.6" } }, "@babel/helper-split-export-declaration": { @@ -18338,14 +18338,14 @@ "integrity": "sha512-Jktc8KkF3zIkePb48QO+IapbXlSapOW9S+ogZZkcO6bABgYAxtZcjZ/O005111YLf+j4M84uEgwYoidDkXbCkQ==" }, "@babel/helper-wrap-function": { - "version": "7.22.20", - "resolved": "https://registry.npmjs.org/@babel/helper-wrap-function/-/helper-wrap-function-7.22.20.tgz", - "integrity": "sha512-pms/UwkOpnQe/PDAEdV/d7dVCoBbB+R4FvYoHGZz+4VPcg7RtYy2KP7S2lbuWM6FCSgob5wshfGESbC/hzNXZw==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/helper-wrap-function/-/helper-wrap-function-7.24.6.tgz", + "integrity": "sha512-f1JLrlw/jbiNfxvdrfBgio/gRBk3yTAEJWirpAkiJG2Hb22E7cEYKHWo0dFPTv/niPovzIdPdEDetrv6tC6gPQ==", "dev": true, "requires": { - "@babel/helper-function-name": "^7.22.5", - "@babel/template": "^7.22.15", - "@babel/types": "^7.22.19" + "@babel/helper-function-name": "^7.24.6", + "@babel/template": "^7.24.6", + "@babel/types": "^7.24.6" } }, "@babel/helpers": { @@ -18374,43 +18374,43 @@ "integrity": "sha512-eNZXdfU35nJC2h24RznROuOpO94h6x8sg9ju0tT9biNtLZ2vuP8SduLqqV+/8+cebSLV9SJEAN5Z3zQbJG/M+Q==" }, "@babel/plugin-bugfix-firefox-class-in-computed-class-key": { - "version": "7.24.5", - "resolved": "https://registry.npmjs.org/@babel/plugin-bugfix-firefox-class-in-computed-class-key/-/plugin-bugfix-firefox-class-in-computed-class-key-7.24.5.tgz", - "integrity": "sha512-LdXRi1wEMTrHVR4Zc9F8OewC3vdm5h4QB6L71zy6StmYeqGi1b3ttIO8UC+BfZKcH9jdr4aI249rBkm+3+YvHw==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-bugfix-firefox-class-in-computed-class-key/-/plugin-bugfix-firefox-class-in-computed-class-key-7.24.6.tgz", + "integrity": "sha512-bYndrJ6Ph6Ar+GaB5VAc0JPoP80bQCm4qon6JEzXfRl5QZyQ8Ur1K6k7htxWmPA5z+k7JQvaMUrtXlqclWYzKw==", "dev": true, "requires": { - "@babel/helper-environment-visitor": "^7.22.20", - "@babel/helper-plugin-utils": "^7.24.5" + "@babel/helper-environment-visitor": "^7.24.6", + "@babel/helper-plugin-utils": "^7.24.6" } }, "@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression": { - "version": "7.24.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression/-/plugin-bugfix-safari-id-destructuring-collision-in-function-expression-7.24.1.tgz", - "integrity": "sha512-y4HqEnkelJIOQGd+3g1bTeKsA5c6qM7eOn7VggGVbBc0y8MLSKHacwcIE2PplNlQSj0PqS9rrXL/nkPVK+kUNg==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression/-/plugin-bugfix-safari-id-destructuring-collision-in-function-expression-7.24.6.tgz", + "integrity": "sha512-iVuhb6poq5ikqRq2XWU6OQ+R5o9wF+r/or9CeUyovgptz0UlnK4/seOQ1Istu/XybYjAhQv1FRSSfHHufIku5Q==", "dev": true, "requires": { - "@babel/helper-plugin-utils": "^7.24.0" + "@babel/helper-plugin-utils": "^7.24.6" } }, "@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining": { - "version": "7.24.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining/-/plugin-bugfix-v8-spread-parameters-in-optional-chaining-7.24.1.tgz", - "integrity": "sha512-Hj791Ii4ci8HqnaKHAlLNs+zaLXb0EzSDhiAWp5VNlyvCNymYfacs64pxTxbH1znW/NcArSmwpmG9IKE/TUVVQ==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining/-/plugin-bugfix-v8-spread-parameters-in-optional-chaining-7.24.6.tgz", + "integrity": "sha512-c8TER5xMDYzzFcGqOEp9l4hvB7dcbhcGjcLVwxWfe4P5DOafdwjsBJZKsmv+o3aXh7NhopvayQIovHrh2zSRUQ==", "dev": true, "requires": { - "@babel/helper-plugin-utils": "^7.24.0", - "@babel/helper-skip-transparent-expression-wrappers": "^7.22.5", - "@babel/plugin-transform-optional-chaining": "^7.24.1" + "@babel/helper-plugin-utils": "^7.24.6", + "@babel/helper-skip-transparent-expression-wrappers": "^7.24.6", + "@babel/plugin-transform-optional-chaining": "^7.24.6" } }, "@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly": { - "version": "7.24.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly/-/plugin-bugfix-v8-static-class-fields-redefine-readonly-7.24.1.tgz", - "integrity": "sha512-m9m/fXsXLiHfwdgydIFnpk+7jlVbnvlK5B2EKiPdLUb6WX654ZaaEWJUjk8TftRbZpK0XibovlLWX4KIZhV6jw==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly/-/plugin-bugfix-v8-static-class-fields-redefine-readonly-7.24.6.tgz", + "integrity": "sha512-z8zEjYmwBUHN/pCF3NuWBhHQjJCrd33qAi8MgANfMrAvn72k2cImT8VjK9LJFu4ysOLJqhfkYYb3MvwANRUNZQ==", "dev": true, "requires": { - "@babel/helper-environment-visitor": "^7.22.20", - "@babel/helper-plugin-utils": "^7.24.0" + "@babel/helper-environment-visitor": "^7.24.6", + "@babel/helper-plugin-utils": "^7.24.6" } }, "@babel/plugin-proposal-private-property-in-object": { @@ -18475,21 +18475,21 @@ } }, "@babel/plugin-syntax-import-assertions": { - "version": "7.24.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-import-assertions/-/plugin-syntax-import-assertions-7.24.1.tgz", - "integrity": "sha512-IuwnI5XnuF189t91XbxmXeCDz3qs6iDRO7GJ++wcfgeXNs/8FmIlKcpDSXNVyuLQxlwvskmI3Ct73wUODkJBlQ==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-import-assertions/-/plugin-syntax-import-assertions-7.24.6.tgz", + "integrity": "sha512-BE6o2BogJKJImTmGpkmOic4V0hlRRxVtzqxiSPa8TIFxyhi4EFjHm08nq1M4STK4RytuLMgnSz0/wfflvGFNOg==", "dev": true, "requires": { - "@babel/helper-plugin-utils": "^7.24.0" + "@babel/helper-plugin-utils": "^7.24.6" } }, "@babel/plugin-syntax-import-attributes": { - "version": "7.24.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-import-attributes/-/plugin-syntax-import-attributes-7.24.1.tgz", - "integrity": "sha512-zhQTMH0X2nVLnb04tz+s7AMuasX8U0FnpE+nHTOhSOINjWMnopoZTxtIKsd45n4GQ/HIZLyfIpoul8e2m0DnRA==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-import-attributes/-/plugin-syntax-import-attributes-7.24.6.tgz", + "integrity": "sha512-D+CfsVZousPXIdudSII7RGy52+dYRtbyKAZcvtQKq/NpsivyMVduepzcLqG5pMBugtMdedxdC8Ramdpcne9ZWQ==", "dev": true, "requires": { - "@babel/helper-plugin-utils": "^7.24.0" + "@babel/helper-plugin-utils": "^7.24.6" } }, "@babel/plugin-syntax-import-meta": { @@ -18611,401 +18611,401 @@ } }, "@babel/plugin-transform-arrow-functions": { - "version": "7.24.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-arrow-functions/-/plugin-transform-arrow-functions-7.24.1.tgz", - "integrity": "sha512-ngT/3NkRhsaep9ck9uj2Xhv9+xB1zShY3tM3g6om4xxCELwCDN4g4Aq5dRn48+0hasAql7s2hdBOysCfNpr4fw==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-arrow-functions/-/plugin-transform-arrow-functions-7.24.6.tgz", + "integrity": "sha512-jSSSDt4ZidNMggcLx8SaKsbGNEfIl0PHx/4mFEulorE7bpYLbN0d3pDW3eJ7Y5Z3yPhy3L3NaPCYyTUY7TuugQ==", "dev": true, "requires": { - "@babel/helper-plugin-utils": "^7.24.0" + "@babel/helper-plugin-utils": "^7.24.6" } }, "@babel/plugin-transform-async-generator-functions": { - "version": "7.24.3", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-async-generator-functions/-/plugin-transform-async-generator-functions-7.24.3.tgz", - "integrity": "sha512-Qe26CMYVjpQxJ8zxM1340JFNjZaF+ISWpr1Kt/jGo+ZTUzKkfw/pphEWbRCb+lmSM6k/TOgfYLvmbHkUQ0asIg==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-async-generator-functions/-/plugin-transform-async-generator-functions-7.24.6.tgz", + "integrity": "sha512-VEP2o4iR2DqQU6KPgizTW2mnMx6BG5b5O9iQdrW9HesLkv8GIA8x2daXBQxw1MrsIkFQGA/iJ204CKoQ8UcnAA==", "dev": true, "requires": { - "@babel/helper-environment-visitor": "^7.22.20", - "@babel/helper-plugin-utils": "^7.24.0", - "@babel/helper-remap-async-to-generator": "^7.22.20", + "@babel/helper-environment-visitor": "^7.24.6", + "@babel/helper-plugin-utils": "^7.24.6", + "@babel/helper-remap-async-to-generator": "^7.24.6", "@babel/plugin-syntax-async-generators": "^7.8.4" } }, "@babel/plugin-transform-async-to-generator": { - "version": "7.24.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-async-to-generator/-/plugin-transform-async-to-generator-7.24.1.tgz", - "integrity": "sha512-AawPptitRXp1y0n4ilKcGbRYWfbbzFWz2NqNu7dacYDtFtz0CMjG64b3LQsb3KIgnf4/obcUL78hfaOS7iCUfw==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-async-to-generator/-/plugin-transform-async-to-generator-7.24.6.tgz", + "integrity": "sha512-NTBA2SioI3OsHeIn6sQmhvXleSl9T70YY/hostQLveWs0ic+qvbA3fa0kwAwQ0OA/XGaAerNZRQGJyRfhbJK4g==", "dev": true, "requires": { - "@babel/helper-module-imports": "^7.24.1", - "@babel/helper-plugin-utils": "^7.24.0", - "@babel/helper-remap-async-to-generator": "^7.22.20" + "@babel/helper-module-imports": "^7.24.6", + "@babel/helper-plugin-utils": "^7.24.6", + "@babel/helper-remap-async-to-generator": "^7.24.6" } }, "@babel/plugin-transform-block-scoped-functions": { - "version": "7.24.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-block-scoped-functions/-/plugin-transform-block-scoped-functions-7.24.1.tgz", - "integrity": "sha512-TWWC18OShZutrv9C6mye1xwtam+uNi2bnTOCBUd5sZxyHOiWbU6ztSROofIMrK84uweEZC219POICK/sTYwfgg==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-block-scoped-functions/-/plugin-transform-block-scoped-functions-7.24.6.tgz", + "integrity": "sha512-XNW7jolYHW9CwORrZgA/97tL/k05qe/HL0z/qqJq1mdWhwwCM6D4BJBV7wAz9HgFziN5dTOG31znkVIzwxv+vw==", "dev": true, "requires": { - "@babel/helper-plugin-utils": "^7.24.0" + "@babel/helper-plugin-utils": "^7.24.6" } }, "@babel/plugin-transform-block-scoping": { - "version": "7.24.5", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-block-scoping/-/plugin-transform-block-scoping-7.24.5.tgz", - "integrity": "sha512-sMfBc3OxghjC95BkYrYocHL3NaOplrcaunblzwXhGmlPwpmfsxr4vK+mBBt49r+S240vahmv+kUxkeKgs+haCw==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-block-scoping/-/plugin-transform-block-scoping-7.24.6.tgz", + "integrity": "sha512-S/t1Xh4ehW7sGA7c1j/hiOBLnEYCp/c2sEG4ZkL8kI1xX9tW2pqJTCHKtdhe/jHKt8nG0pFCrDHUXd4DvjHS9w==", "dev": true, "requires": { - "@babel/helper-plugin-utils": "^7.24.5" + "@babel/helper-plugin-utils": "^7.24.6" } }, "@babel/plugin-transform-class-properties": { - "version": "7.24.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-class-properties/-/plugin-transform-class-properties-7.24.1.tgz", - "integrity": "sha512-OMLCXi0NqvJfORTaPQBwqLXHhb93wkBKZ4aNwMl6WtehO7ar+cmp+89iPEQPqxAnxsOKTaMcs3POz3rKayJ72g==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-class-properties/-/plugin-transform-class-properties-7.24.6.tgz", + "integrity": "sha512-j6dZ0Z2Z2slWLR3kt9aOmSIrBvnntWjMDN/TVcMPxhXMLmJVqX605CBRlcGI4b32GMbfifTEsdEjGjiE+j/c3A==", "dev": true, "requires": { - "@babel/helper-create-class-features-plugin": "^7.24.1", - "@babel/helper-plugin-utils": "^7.24.0" + "@babel/helper-create-class-features-plugin": "^7.24.6", + "@babel/helper-plugin-utils": "^7.24.6" } }, "@babel/plugin-transform-class-static-block": { - "version": "7.24.4", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-class-static-block/-/plugin-transform-class-static-block-7.24.4.tgz", - "integrity": "sha512-B8q7Pz870Hz/q9UgP8InNpY01CSLDSCyqX7zcRuv3FcPl87A2G17lASroHWaCtbdIcbYzOZ7kWmXFKbijMSmFg==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-class-static-block/-/plugin-transform-class-static-block-7.24.6.tgz", + "integrity": "sha512-1QSRfoPI9RoLRa8Mnakc6v3e0gJxiZQTYrMfLn+mD0sz5+ndSzwymp2hDcYJTyT0MOn0yuWzj8phlIvO72gTHA==", "dev": true, "requires": { - "@babel/helper-create-class-features-plugin": "^7.24.4", - "@babel/helper-plugin-utils": "^7.24.0", + "@babel/helper-create-class-features-plugin": "^7.24.6", + "@babel/helper-plugin-utils": "^7.24.6", "@babel/plugin-syntax-class-static-block": "^7.14.5" } }, "@babel/plugin-transform-classes": { - "version": "7.24.5", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-classes/-/plugin-transform-classes-7.24.5.tgz", - "integrity": "sha512-gWkLP25DFj2dwe9Ck8uwMOpko4YsqyfZJrOmqqcegeDYEbp7rmn4U6UQZNj08UF6MaX39XenSpKRCvpDRBtZ7Q==", - "dev": true, - "requires": { - "@babel/helper-annotate-as-pure": "^7.22.5", - "@babel/helper-compilation-targets": "^7.23.6", - "@babel/helper-environment-visitor": "^7.22.20", - "@babel/helper-function-name": "^7.23.0", - "@babel/helper-plugin-utils": "^7.24.5", - "@babel/helper-replace-supers": "^7.24.1", - "@babel/helper-split-export-declaration": "^7.24.5", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-classes/-/plugin-transform-classes-7.24.6.tgz", + "integrity": "sha512-+fN+NO2gh8JtRmDSOB6gaCVo36ha8kfCW1nMq2Gc0DABln0VcHN4PrALDvF5/diLzIRKptC7z/d7Lp64zk92Fg==", + "dev": true, + "requires": { + "@babel/helper-annotate-as-pure": "^7.24.6", + "@babel/helper-compilation-targets": "^7.24.6", + "@babel/helper-environment-visitor": "^7.24.6", + "@babel/helper-function-name": "^7.24.6", + "@babel/helper-plugin-utils": "^7.24.6", + "@babel/helper-replace-supers": "^7.24.6", + "@babel/helper-split-export-declaration": "^7.24.6", "globals": "^11.1.0" } }, "@babel/plugin-transform-computed-properties": { - "version": "7.24.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-computed-properties/-/plugin-transform-computed-properties-7.24.1.tgz", - "integrity": "sha512-5pJGVIUfJpOS+pAqBQd+QMaTD2vCL/HcePooON6pDpHgRp4gNRmzyHTPIkXntwKsq3ayUFVfJaIKPw2pOkOcTw==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-computed-properties/-/plugin-transform-computed-properties-7.24.6.tgz", + "integrity": "sha512-cRzPobcfRP0ZtuIEkA8QzghoUpSB3X3qSH5W2+FzG+VjWbJXExtx0nbRqwumdBN1x/ot2SlTNQLfBCnPdzp6kg==", "dev": true, "requires": { - "@babel/helper-plugin-utils": "^7.24.0", - "@babel/template": "^7.24.0" + "@babel/helper-plugin-utils": "^7.24.6", + "@babel/template": "^7.24.6" } }, "@babel/plugin-transform-destructuring": { - "version": "7.24.5", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-destructuring/-/plugin-transform-destructuring-7.24.5.tgz", - "integrity": "sha512-SZuuLyfxvsm+Ah57I/i1HVjveBENYK9ue8MJ7qkc7ndoNjqquJiElzA7f5yaAXjyW2hKojosOTAQQRX50bPSVg==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-destructuring/-/plugin-transform-destructuring-7.24.6.tgz", + "integrity": "sha512-YLW6AE5LQpk5npNXL7i/O+U9CE4XsBCuRPgyjl1EICZYKmcitV+ayuuUGMJm2lC1WWjXYszeTnIxF/dq/GhIZQ==", "dev": true, "requires": { - "@babel/helper-plugin-utils": "^7.24.5" + "@babel/helper-plugin-utils": "^7.24.6" } }, "@babel/plugin-transform-dotall-regex": { - "version": "7.24.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-dotall-regex/-/plugin-transform-dotall-regex-7.24.1.tgz", - "integrity": "sha512-p7uUxgSoZwZ2lPNMzUkqCts3xlp8n+o05ikjy7gbtFJSt9gdU88jAmtfmOxHM14noQXBxfgzf2yRWECiNVhTCw==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-dotall-regex/-/plugin-transform-dotall-regex-7.24.6.tgz", + "integrity": "sha512-rCXPnSEKvkm/EjzOtLoGvKseK+dS4kZwx1HexO3BtRtgL0fQ34awHn34aeSHuXtZY2F8a1X8xqBBPRtOxDVmcA==", "dev": true, "requires": { - "@babel/helper-create-regexp-features-plugin": "^7.22.15", - "@babel/helper-plugin-utils": "^7.24.0" + "@babel/helper-create-regexp-features-plugin": "^7.24.6", + "@babel/helper-plugin-utils": "^7.24.6" } }, "@babel/plugin-transform-duplicate-keys": { - "version": "7.24.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-duplicate-keys/-/plugin-transform-duplicate-keys-7.24.1.tgz", - "integrity": "sha512-msyzuUnvsjsaSaocV6L7ErfNsa5nDWL1XKNnDePLgmz+WdU4w/J8+AxBMrWfi9m4IxfL5sZQKUPQKDQeeAT6lA==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-duplicate-keys/-/plugin-transform-duplicate-keys-7.24.6.tgz", + "integrity": "sha512-/8Odwp/aVkZwPFJMllSbawhDAO3UJi65foB00HYnK/uXvvCPm0TAXSByjz1mpRmp0q6oX2SIxpkUOpPFHk7FLA==", "dev": true, "requires": { - "@babel/helper-plugin-utils": "^7.24.0" + "@babel/helper-plugin-utils": "^7.24.6" } }, "@babel/plugin-transform-dynamic-import": { - "version": "7.24.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-dynamic-import/-/plugin-transform-dynamic-import-7.24.1.tgz", - "integrity": "sha512-av2gdSTyXcJVdI+8aFZsCAtR29xJt0S5tas+Ef8NvBNmD1a+N/3ecMLeMBgfcK+xzsjdLDT6oHt+DFPyeqUbDA==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-dynamic-import/-/plugin-transform-dynamic-import-7.24.6.tgz", + "integrity": "sha512-vpq8SSLRTBLOHUZHSnBqVo0AKX3PBaoPs2vVzYVWslXDTDIpwAcCDtfhUcHSQQoYoUvcFPTdC8TZYXu9ZnLT/w==", "dev": true, "requires": { - "@babel/helper-plugin-utils": "^7.24.0", + "@babel/helper-plugin-utils": "^7.24.6", "@babel/plugin-syntax-dynamic-import": "^7.8.3" } }, "@babel/plugin-transform-exponentiation-operator": { - "version": "7.24.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-exponentiation-operator/-/plugin-transform-exponentiation-operator-7.24.1.tgz", - "integrity": "sha512-U1yX13dVBSwS23DEAqU+Z/PkwE9/m7QQy8Y9/+Tdb8UWYaGNDYwTLi19wqIAiROr8sXVum9A/rtiH5H0boUcTw==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-exponentiation-operator/-/plugin-transform-exponentiation-operator-7.24.6.tgz", + "integrity": "sha512-EemYpHtmz0lHE7hxxxYEuTYOOBZ43WkDgZ4arQ4r+VX9QHuNZC+WH3wUWmRNvR8ECpTRne29aZV6XO22qpOtdA==", "dev": true, "requires": { - "@babel/helper-builder-binary-assignment-operator-visitor": "^7.22.15", - "@babel/helper-plugin-utils": "^7.24.0" + "@babel/helper-builder-binary-assignment-operator-visitor": "^7.24.6", + "@babel/helper-plugin-utils": "^7.24.6" } }, "@babel/plugin-transform-export-namespace-from": { - "version": "7.24.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-export-namespace-from/-/plugin-transform-export-namespace-from-7.24.1.tgz", - "integrity": "sha512-Ft38m/KFOyzKw2UaJFkWG9QnHPG/Q/2SkOrRk4pNBPg5IPZ+dOxcmkK5IyuBcxiNPyyYowPGUReyBvrvZs7IlQ==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-export-namespace-from/-/plugin-transform-export-namespace-from-7.24.6.tgz", + "integrity": "sha512-inXaTM1SVrIxCkIJ5gqWiozHfFMStuGbGJAxZFBoHcRRdDP0ySLb3jH6JOwmfiinPwyMZqMBX+7NBDCO4z0NSA==", "dev": true, "requires": { - "@babel/helper-plugin-utils": "^7.24.0", + "@babel/helper-plugin-utils": "^7.24.6", "@babel/plugin-syntax-export-namespace-from": "^7.8.3" } }, "@babel/plugin-transform-for-of": { - "version": "7.24.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-for-of/-/plugin-transform-for-of-7.24.1.tgz", - "integrity": "sha512-OxBdcnF04bpdQdR3i4giHZNZQn7cm8RQKcSwA17wAAqEELo1ZOwp5FFgeptWUQXFyT9kwHo10aqqauYkRZPCAg==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-for-of/-/plugin-transform-for-of-7.24.6.tgz", + "integrity": "sha512-n3Sf72TnqK4nw/jziSqEl1qaWPbCRw2CziHH+jdRYvw4J6yeCzsj4jdw8hIntOEeDGTmHVe2w4MVL44PN0GMzg==", "dev": true, "requires": { - "@babel/helper-plugin-utils": "^7.24.0", - "@babel/helper-skip-transparent-expression-wrappers": "^7.22.5" + "@babel/helper-plugin-utils": "^7.24.6", + "@babel/helper-skip-transparent-expression-wrappers": "^7.24.6" } }, "@babel/plugin-transform-function-name": { - "version": "7.24.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-function-name/-/plugin-transform-function-name-7.24.1.tgz", - "integrity": "sha512-BXmDZpPlh7jwicKArQASrj8n22/w6iymRnvHYYd2zO30DbE277JO20/7yXJT3QxDPtiQiOxQBbZH4TpivNXIxA==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-function-name/-/plugin-transform-function-name-7.24.6.tgz", + "integrity": "sha512-sOajCu6V0P1KPljWHKiDq6ymgqB+vfo3isUS4McqW1DZtvSVU2v/wuMhmRmkg3sFoq6GMaUUf8W4WtoSLkOV/Q==", "dev": true, "requires": { - "@babel/helper-compilation-targets": "^7.23.6", - "@babel/helper-function-name": "^7.23.0", - "@babel/helper-plugin-utils": "^7.24.0" + "@babel/helper-compilation-targets": "^7.24.6", + "@babel/helper-function-name": "^7.24.6", + "@babel/helper-plugin-utils": "^7.24.6" } }, "@babel/plugin-transform-json-strings": { - "version": "7.24.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-json-strings/-/plugin-transform-json-strings-7.24.1.tgz", - "integrity": "sha512-U7RMFmRvoasscrIFy5xA4gIp8iWnWubnKkKuUGJjsuOH7GfbMkB+XZzeslx2kLdEGdOJDamEmCqOks6e8nv8DQ==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-json-strings/-/plugin-transform-json-strings-7.24.6.tgz", + "integrity": "sha512-Uvgd9p2gUnzYJxVdBLcU0KurF8aVhkmVyMKW4MIY1/BByvs3EBpv45q01o7pRTVmTvtQq5zDlytP3dcUgm7v9w==", "dev": true, "requires": { - "@babel/helper-plugin-utils": "^7.24.0", + "@babel/helper-plugin-utils": "^7.24.6", "@babel/plugin-syntax-json-strings": "^7.8.3" } }, "@babel/plugin-transform-literals": { - "version": "7.24.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-literals/-/plugin-transform-literals-7.24.1.tgz", - "integrity": "sha512-zn9pwz8U7nCqOYIiBaOxoQOtYmMODXTJnkxG4AtX8fPmnCRYWBOHD0qcpwS9e2VDSp1zNJYpdnFMIKb8jmwu6g==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-literals/-/plugin-transform-literals-7.24.6.tgz", + "integrity": "sha512-f2wHfR2HF6yMj+y+/y07+SLqnOSwRp8KYLpQKOzS58XLVlULhXbiYcygfXQxJlMbhII9+yXDwOUFLf60/TL5tw==", "dev": true, "requires": { - "@babel/helper-plugin-utils": "^7.24.0" + "@babel/helper-plugin-utils": "^7.24.6" } }, "@babel/plugin-transform-logical-assignment-operators": { - "version": "7.24.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-logical-assignment-operators/-/plugin-transform-logical-assignment-operators-7.24.1.tgz", - "integrity": "sha512-OhN6J4Bpz+hIBqItTeWJujDOfNP+unqv/NJgyhlpSqgBTPm37KkMmZV6SYcOj+pnDbdcl1qRGV/ZiIjX9Iy34w==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-logical-assignment-operators/-/plugin-transform-logical-assignment-operators-7.24.6.tgz", + "integrity": "sha512-EKaWvnezBCMkRIHxMJSIIylzhqK09YpiJtDbr2wsXTwnO0TxyjMUkaw4RlFIZMIS0iDj0KyIg7H7XCguHu/YDA==", "dev": true, "requires": { - "@babel/helper-plugin-utils": "^7.24.0", + "@babel/helper-plugin-utils": "^7.24.6", "@babel/plugin-syntax-logical-assignment-operators": "^7.10.4" } }, "@babel/plugin-transform-member-expression-literals": { - "version": "7.24.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-member-expression-literals/-/plugin-transform-member-expression-literals-7.24.1.tgz", - "integrity": "sha512-4ojai0KysTWXzHseJKa1XPNXKRbuUrhkOPY4rEGeR+7ChlJVKxFa3H3Bz+7tWaGKgJAXUWKOGmltN+u9B3+CVg==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-member-expression-literals/-/plugin-transform-member-expression-literals-7.24.6.tgz", + "integrity": "sha512-9g8iV146szUo5GWgXpRbq/GALTnY+WnNuRTuRHWWFfWGbP9ukRL0aO/jpu9dmOPikclkxnNsjY8/gsWl6bmZJQ==", "dev": true, "requires": { - "@babel/helper-plugin-utils": "^7.24.0" + "@babel/helper-plugin-utils": "^7.24.6" } }, "@babel/plugin-transform-modules-amd": { - "version": "7.24.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-amd/-/plugin-transform-modules-amd-7.24.1.tgz", - "integrity": "sha512-lAxNHi4HVtjnHd5Rxg3D5t99Xm6H7b04hUS7EHIXcUl2EV4yl1gWdqZrNzXnSrHveL9qMdbODlLF55mvgjAfaQ==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-amd/-/plugin-transform-modules-amd-7.24.6.tgz", + "integrity": "sha512-eAGogjZgcwqAxhyFgqghvoHRr+EYRQPFjUXrTYKBRb5qPnAVxOOglaxc4/byHqjvq/bqO2F3/CGwTHsgKJYHhQ==", "dev": true, "requires": { - "@babel/helper-module-transforms": "^7.23.3", - "@babel/helper-plugin-utils": "^7.24.0" + "@babel/helper-module-transforms": "^7.24.6", + "@babel/helper-plugin-utils": "^7.24.6" } }, "@babel/plugin-transform-modules-commonjs": { - "version": "7.24.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-commonjs/-/plugin-transform-modules-commonjs-7.24.1.tgz", - "integrity": "sha512-szog8fFTUxBfw0b98gEWPaEqF42ZUD/T3bkynW/wtgx2p/XCP55WEsb+VosKceRSd6njipdZvNogqdtI4Q0chw==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-commonjs/-/plugin-transform-modules-commonjs-7.24.6.tgz", + "integrity": "sha512-JEV8l3MHdmmdb7S7Cmx6rbNEjRCgTQMZxllveHO0mx6uiclB0NflCawlQQ6+o5ZrwjUBYPzHm2XoK4wqGVUFuw==", "dev": true, "requires": { - "@babel/helper-module-transforms": "^7.23.3", - "@babel/helper-plugin-utils": "^7.24.0", - "@babel/helper-simple-access": "^7.22.5" + "@babel/helper-module-transforms": "^7.24.6", + "@babel/helper-plugin-utils": "^7.24.6", + "@babel/helper-simple-access": "^7.24.6" } }, "@babel/plugin-transform-modules-systemjs": { - "version": "7.24.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-systemjs/-/plugin-transform-modules-systemjs-7.24.1.tgz", - "integrity": "sha512-mqQ3Zh9vFO1Tpmlt8QPnbwGHzNz3lpNEMxQb1kAemn/erstyqw1r9KeOlOfo3y6xAnFEcOv2tSyrXfmMk+/YZA==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-systemjs/-/plugin-transform-modules-systemjs-7.24.6.tgz", + "integrity": "sha512-xg1Z0J5JVYxtpX954XqaaAT6NpAY6LtZXvYFCJmGFJWwtlz2EmJoR8LycFRGNE8dBKizGWkGQZGegtkV8y8s+w==", "dev": true, "requires": { - "@babel/helper-hoist-variables": "^7.22.5", - "@babel/helper-module-transforms": "^7.23.3", - "@babel/helper-plugin-utils": "^7.24.0", - "@babel/helper-validator-identifier": "^7.22.20" + "@babel/helper-hoist-variables": "^7.24.6", + "@babel/helper-module-transforms": "^7.24.6", + "@babel/helper-plugin-utils": "^7.24.6", + "@babel/helper-validator-identifier": "^7.24.6" } }, "@babel/plugin-transform-modules-umd": { - "version": "7.24.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-umd/-/plugin-transform-modules-umd-7.24.1.tgz", - "integrity": "sha512-tuA3lpPj+5ITfcCluy6nWonSL7RvaG0AOTeAuvXqEKS34lnLzXpDb0dcP6K8jD0zWZFNDVly90AGFJPnm4fOYg==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-umd/-/plugin-transform-modules-umd-7.24.6.tgz", + "integrity": "sha512-esRCC/KsSEUvrSjv5rFYnjZI6qv4R1e/iHQrqwbZIoRJqk7xCvEUiN7L1XrmW5QSmQe3n1XD88wbgDTWLbVSyg==", "dev": true, "requires": { - "@babel/helper-module-transforms": "^7.23.3", - "@babel/helper-plugin-utils": "^7.24.0" + "@babel/helper-module-transforms": "^7.24.6", + "@babel/helper-plugin-utils": "^7.24.6" } }, "@babel/plugin-transform-named-capturing-groups-regex": { - "version": "7.22.5", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-named-capturing-groups-regex/-/plugin-transform-named-capturing-groups-regex-7.22.5.tgz", - "integrity": "sha512-YgLLKmS3aUBhHaxp5hi1WJTgOUb/NCuDHzGT9z9WTt3YG+CPRhJs6nprbStx6DnWM4dh6gt7SU3sZodbZ08adQ==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-named-capturing-groups-regex/-/plugin-transform-named-capturing-groups-regex-7.24.6.tgz", + "integrity": "sha512-6DneiCiu91wm3YiNIGDWZsl6GfTTbspuj/toTEqLh9d4cx50UIzSdg+T96p8DuT7aJOBRhFyaE9ZvTHkXrXr6Q==", "dev": true, "requires": { - "@babel/helper-create-regexp-features-plugin": "^7.22.5", - "@babel/helper-plugin-utils": "^7.22.5" + "@babel/helper-create-regexp-features-plugin": "^7.24.6", + "@babel/helper-plugin-utils": "^7.24.6" } }, "@babel/plugin-transform-new-target": { - "version": "7.24.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-new-target/-/plugin-transform-new-target-7.24.1.tgz", - "integrity": "sha512-/rurytBM34hYy0HKZQyA0nHbQgQNFm4Q/BOc9Hflxi2X3twRof7NaE5W46j4kQitm7SvACVRXsa6N/tSZxvPug==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-new-target/-/plugin-transform-new-target-7.24.6.tgz", + "integrity": "sha512-f8liz9JG2Va8A4J5ZBuaSdwfPqN6axfWRK+y66fjKYbwf9VBLuq4WxtinhJhvp1w6lamKUwLG0slK2RxqFgvHA==", "dev": true, "requires": { - "@babel/helper-plugin-utils": "^7.24.0" + "@babel/helper-plugin-utils": "^7.24.6" } }, "@babel/plugin-transform-nullish-coalescing-operator": { - "version": "7.24.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-nullish-coalescing-operator/-/plugin-transform-nullish-coalescing-operator-7.24.1.tgz", - "integrity": "sha512-iQ+caew8wRrhCikO5DrUYx0mrmdhkaELgFa+7baMcVuhxIkN7oxt06CZ51D65ugIb1UWRQ8oQe+HXAVM6qHFjw==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-nullish-coalescing-operator/-/plugin-transform-nullish-coalescing-operator-7.24.6.tgz", + "integrity": "sha512-+QlAiZBMsBK5NqrBWFXCYeXyiU1y7BQ/OYaiPAcQJMomn5Tyg+r5WuVtyEuvTbpV7L25ZSLfE+2E9ywj4FD48A==", "dev": true, "requires": { - "@babel/helper-plugin-utils": "^7.24.0", + "@babel/helper-plugin-utils": "^7.24.6", "@babel/plugin-syntax-nullish-coalescing-operator": "^7.8.3" } }, "@babel/plugin-transform-numeric-separator": { - "version": "7.24.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-numeric-separator/-/plugin-transform-numeric-separator-7.24.1.tgz", - "integrity": "sha512-7GAsGlK4cNL2OExJH1DzmDeKnRv/LXq0eLUSvudrehVA5Rgg4bIrqEUW29FbKMBRT0ztSqisv7kjP+XIC4ZMNw==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-numeric-separator/-/plugin-transform-numeric-separator-7.24.6.tgz", + "integrity": "sha512-6voawq8T25Jvvnc4/rXcWZQKKxUNZcKMS8ZNrjxQqoRFernJJKjE3s18Qo6VFaatG5aiX5JV1oPD7DbJhn0a4Q==", "dev": true, "requires": { - "@babel/helper-plugin-utils": "^7.24.0", + "@babel/helper-plugin-utils": "^7.24.6", "@babel/plugin-syntax-numeric-separator": "^7.10.4" } }, "@babel/plugin-transform-object-rest-spread": { - "version": "7.24.5", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-object-rest-spread/-/plugin-transform-object-rest-spread-7.24.5.tgz", - "integrity": "sha512-7EauQHszLGM3ay7a161tTQH7fj+3vVM/gThlz5HpFtnygTxjrlvoeq7MPVA1Vy9Q555OB8SnAOsMkLShNkkrHA==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-object-rest-spread/-/plugin-transform-object-rest-spread-7.24.6.tgz", + "integrity": "sha512-OKmi5wiMoRW5Smttne7BwHM8s/fb5JFs+bVGNSeHWzwZkWXWValR1M30jyXo1s/RaqgwwhEC62u4rFH/FBcBPg==", "dev": true, "requires": { - "@babel/helper-compilation-targets": "^7.23.6", - "@babel/helper-plugin-utils": "^7.24.5", + "@babel/helper-compilation-targets": "^7.24.6", + "@babel/helper-plugin-utils": "^7.24.6", "@babel/plugin-syntax-object-rest-spread": "^7.8.3", - "@babel/plugin-transform-parameters": "^7.24.5" + "@babel/plugin-transform-parameters": "^7.24.6" } }, "@babel/plugin-transform-object-super": { - "version": "7.24.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-object-super/-/plugin-transform-object-super-7.24.1.tgz", - "integrity": "sha512-oKJqR3TeI5hSLRxudMjFQ9re9fBVUU0GICqM3J1mi8MqlhVr6hC/ZN4ttAyMuQR6EZZIY6h/exe5swqGNNIkWQ==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-object-super/-/plugin-transform-object-super-7.24.6.tgz", + "integrity": "sha512-N/C76ihFKlZgKfdkEYKtaRUtXZAgK7sOY4h2qrbVbVTXPrKGIi8aww5WGe/+Wmg8onn8sr2ut6FXlsbu/j6JHg==", "dev": true, "requires": { - "@babel/helper-plugin-utils": "^7.24.0", - "@babel/helper-replace-supers": "^7.24.1" + "@babel/helper-plugin-utils": "^7.24.6", + "@babel/helper-replace-supers": "^7.24.6" } }, "@babel/plugin-transform-optional-catch-binding": { - "version": "7.24.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-optional-catch-binding/-/plugin-transform-optional-catch-binding-7.24.1.tgz", - "integrity": "sha512-oBTH7oURV4Y+3EUrf6cWn1OHio3qG/PVwO5J03iSJmBg6m2EhKjkAu/xuaXaYwWW9miYtvbWv4LNf0AmR43LUA==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-optional-catch-binding/-/plugin-transform-optional-catch-binding-7.24.6.tgz", + "integrity": "sha512-L5pZ+b3O1mSzJ71HmxSCmTVd03VOT2GXOigug6vDYJzE5awLI7P1g0wFcdmGuwSDSrQ0L2rDOe/hHws8J1rv3w==", "dev": true, "requires": { - "@babel/helper-plugin-utils": "^7.24.0", + "@babel/helper-plugin-utils": "^7.24.6", "@babel/plugin-syntax-optional-catch-binding": "^7.8.3" } }, "@babel/plugin-transform-optional-chaining": { - "version": "7.24.5", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-optional-chaining/-/plugin-transform-optional-chaining-7.24.5.tgz", - "integrity": "sha512-xWCkmwKT+ihmA6l7SSTpk8e4qQl/274iNbSKRRS8mpqFR32ksy36+a+LWY8OXCCEefF8WFlnOHVsaDI2231wBg==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-optional-chaining/-/plugin-transform-optional-chaining-7.24.6.tgz", + "integrity": "sha512-cHbqF6l1QP11OkYTYQ+hhVx1E017O5ZcSPXk9oODpqhcAD1htsWG2NpHrrhthEO2qZomLK0FXS+u7NfrkF5aOQ==", "dev": true, "requires": { - "@babel/helper-plugin-utils": "^7.24.5", - "@babel/helper-skip-transparent-expression-wrappers": "^7.22.5", + "@babel/helper-plugin-utils": "^7.24.6", + "@babel/helper-skip-transparent-expression-wrappers": "^7.24.6", "@babel/plugin-syntax-optional-chaining": "^7.8.3" } }, "@babel/plugin-transform-parameters": { - "version": "7.24.5", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-parameters/-/plugin-transform-parameters-7.24.5.tgz", - "integrity": "sha512-9Co00MqZ2aoky+4j2jhofErthm6QVLKbpQrvz20c3CH9KQCLHyNB+t2ya4/UrRpQGR+Wrwjg9foopoeSdnHOkA==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-parameters/-/plugin-transform-parameters-7.24.6.tgz", + "integrity": "sha512-ST7guE8vLV+vI70wmAxuZpIKzVjvFX9Qs8bl5w6tN/6gOypPWUmMQL2p7LJz5E63vEGrDhAiYetniJFyBH1RkA==", "dev": true, "requires": { - "@babel/helper-plugin-utils": "^7.24.5" + "@babel/helper-plugin-utils": "^7.24.6" } }, "@babel/plugin-transform-private-methods": { - "version": "7.24.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-private-methods/-/plugin-transform-private-methods-7.24.1.tgz", - "integrity": "sha512-tGvisebwBO5em4PaYNqt4fkw56K2VALsAbAakY0FjTYqJp7gfdrgr7YX76Or8/cpik0W6+tj3rZ0uHU9Oil4tw==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-private-methods/-/plugin-transform-private-methods-7.24.6.tgz", + "integrity": "sha512-T9LtDI0BgwXOzyXrvgLTT8DFjCC/XgWLjflczTLXyvxbnSR/gpv0hbmzlHE/kmh9nOvlygbamLKRo6Op4yB6aw==", "dev": true, "requires": { - "@babel/helper-create-class-features-plugin": "^7.24.1", - "@babel/helper-plugin-utils": "^7.24.0" + "@babel/helper-create-class-features-plugin": "^7.24.6", + "@babel/helper-plugin-utils": "^7.24.6" } }, "@babel/plugin-transform-private-property-in-object": { - "version": "7.24.5", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-private-property-in-object/-/plugin-transform-private-property-in-object-7.24.5.tgz", - "integrity": "sha512-JM4MHZqnWR04jPMujQDTBVRnqxpLLpx2tkn7iPn+Hmsc0Gnb79yvRWOkvqFOx3Z7P7VxiRIR22c4eGSNj87OBQ==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-private-property-in-object/-/plugin-transform-private-property-in-object-7.24.6.tgz", + "integrity": "sha512-Qu/ypFxCY5NkAnEhCF86Mvg3NSabKsh/TPpBVswEdkGl7+FbsYHy1ziRqJpwGH4thBdQHh8zx+z7vMYmcJ7iaQ==", "dev": true, "requires": { - "@babel/helper-annotate-as-pure": "^7.22.5", - "@babel/helper-create-class-features-plugin": "^7.24.5", - "@babel/helper-plugin-utils": "^7.24.5", + "@babel/helper-annotate-as-pure": "^7.24.6", + "@babel/helper-create-class-features-plugin": "^7.24.6", + "@babel/helper-plugin-utils": "^7.24.6", "@babel/plugin-syntax-private-property-in-object": "^7.14.5" } }, "@babel/plugin-transform-property-literals": { - "version": "7.24.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-property-literals/-/plugin-transform-property-literals-7.24.1.tgz", - "integrity": "sha512-LetvD7CrHmEx0G442gOomRr66d7q8HzzGGr4PMHGr+5YIm6++Yke+jxj246rpvsbyhJwCLxcTn6zW1P1BSenqA==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-property-literals/-/plugin-transform-property-literals-7.24.6.tgz", + "integrity": "sha512-oARaglxhRsN18OYsnPTpb8TcKQWDYNsPNmTnx5++WOAsUJ0cSC/FZVlIJCKvPbU4yn/UXsS0551CFKJhN0CaMw==", "dev": true, "requires": { - "@babel/helper-plugin-utils": "^7.24.0" + "@babel/helper-plugin-utils": "^7.24.6" } }, "@babel/plugin-transform-regenerator": { - "version": "7.24.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-regenerator/-/plugin-transform-regenerator-7.24.1.tgz", - "integrity": "sha512-sJwZBCzIBE4t+5Q4IGLaaun5ExVMRY0lYwos/jNecjMrVCygCdph3IKv0tkP5Fc87e/1+bebAmEAGBfnRD+cnw==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-regenerator/-/plugin-transform-regenerator-7.24.6.tgz", + "integrity": "sha512-SMDxO95I8WXRtXhTAc8t/NFQUT7VYbIWwJCJgEli9ml4MhqUMh4S6hxgH6SmAC3eAQNWCDJFxcFeEt9w2sDdXg==", "dev": true, "requires": { - "@babel/helper-plugin-utils": "^7.24.0", + "@babel/helper-plugin-utils": "^7.24.6", "regenerator-transform": "^0.15.2" } }, "@babel/plugin-transform-reserved-words": { - "version": "7.24.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-reserved-words/-/plugin-transform-reserved-words-7.24.1.tgz", - "integrity": "sha512-JAclqStUfIwKN15HrsQADFgeZt+wexNQ0uLhuqvqAUFoqPMjEcFCYZBhq0LUdz6dZK/mD+rErhW71fbx8RYElg==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-reserved-words/-/plugin-transform-reserved-words-7.24.6.tgz", + "integrity": "sha512-DcrgFXRRlK64dGE0ZFBPD5egM2uM8mgfrvTMOSB2yKzOtjpGegVYkzh3s1zZg1bBck3nkXiaOamJUqK3Syk+4A==", "dev": true, "requires": { - "@babel/helper-plugin-utils": "^7.24.0" + "@babel/helper-plugin-utils": "^7.24.6" } }, "@babel/plugin-transform-runtime": { @@ -19033,112 +19033,112 @@ } }, "@babel/plugin-transform-shorthand-properties": { - "version": "7.24.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-shorthand-properties/-/plugin-transform-shorthand-properties-7.24.1.tgz", - "integrity": "sha512-LyjVB1nsJ6gTTUKRjRWx9C1s9hE7dLfP/knKdrfeH9UPtAGjYGgxIbFfx7xyLIEWs7Xe1Gnf8EWiUqfjLhInZA==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-shorthand-properties/-/plugin-transform-shorthand-properties-7.24.6.tgz", + "integrity": "sha512-xnEUvHSMr9eOWS5Al2YPfc32ten7CXdH7Zwyyk7IqITg4nX61oHj+GxpNvl+y5JHjfN3KXE2IV55wAWowBYMVw==", "dev": true, "requires": { - "@babel/helper-plugin-utils": "^7.24.0" + "@babel/helper-plugin-utils": "^7.24.6" } }, "@babel/plugin-transform-spread": { - "version": "7.24.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-spread/-/plugin-transform-spread-7.24.1.tgz", - "integrity": "sha512-KjmcIM+fxgY+KxPVbjelJC6hrH1CgtPmTvdXAfn3/a9CnWGSTY7nH4zm5+cjmWJybdcPSsD0++QssDsjcpe47g==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-spread/-/plugin-transform-spread-7.24.6.tgz", + "integrity": "sha512-h/2j7oIUDjS+ULsIrNZ6/TKG97FgmEk1PXryk/HQq6op4XUUUwif2f69fJrzK0wza2zjCS1xhXmouACaWV5uPA==", "dev": true, "requires": { - "@babel/helper-plugin-utils": "^7.24.0", - "@babel/helper-skip-transparent-expression-wrappers": "^7.22.5" + "@babel/helper-plugin-utils": "^7.24.6", + "@babel/helper-skip-transparent-expression-wrappers": "^7.24.6" } }, "@babel/plugin-transform-sticky-regex": { - "version": "7.24.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-sticky-regex/-/plugin-transform-sticky-regex-7.24.1.tgz", - "integrity": "sha512-9v0f1bRXgPVcPrngOQvLXeGNNVLc8UjMVfebo9ka0WF3/7+aVUHmaJVT3sa0XCzEFioPfPHZiOcYG9qOsH63cw==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-sticky-regex/-/plugin-transform-sticky-regex-7.24.6.tgz", + "integrity": "sha512-fN8OcTLfGmYv7FnDrsjodYBo1DhPL3Pze/9mIIE2MGCT1KgADYIOD7rEglpLHZj8PZlC/JFX5WcD+85FLAQusw==", "dev": true, "requires": { - "@babel/helper-plugin-utils": "^7.24.0" + "@babel/helper-plugin-utils": "^7.24.6" } }, "@babel/plugin-transform-template-literals": { - "version": "7.24.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-template-literals/-/plugin-transform-template-literals-7.24.1.tgz", - "integrity": "sha512-WRkhROsNzriarqECASCNu/nojeXCDTE/F2HmRgOzi7NGvyfYGq1NEjKBK3ckLfRgGc6/lPAqP0vDOSw3YtG34g==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-template-literals/-/plugin-transform-template-literals-7.24.6.tgz", + "integrity": "sha512-BJbEqJIcKwrqUP+KfUIkxz3q8VzXe2R8Wv8TaNgO1cx+nNavxn/2+H8kp9tgFSOL6wYPPEgFvU6IKS4qoGqhmg==", "dev": true, "requires": { - "@babel/helper-plugin-utils": "^7.24.0" + "@babel/helper-plugin-utils": "^7.24.6" } }, "@babel/plugin-transform-typeof-symbol": { - "version": "7.24.5", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-typeof-symbol/-/plugin-transform-typeof-symbol-7.24.5.tgz", - "integrity": "sha512-UTGnhYVZtTAjdwOTzT+sCyXmTn8AhaxOS/MjG9REclZ6ULHWF9KoCZur0HSGU7hk8PdBFKKbYe6+gqdXWz84Jg==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-typeof-symbol/-/plugin-transform-typeof-symbol-7.24.6.tgz", + "integrity": "sha512-IshCXQ+G9JIFJI7bUpxTE/oA2lgVLAIK8q1KdJNoPXOpvRaNjMySGuvLfBw/Xi2/1lLo953uE8hyYSDW3TSYig==", "dev": true, "requires": { - "@babel/helper-plugin-utils": "^7.24.5" + "@babel/helper-plugin-utils": "^7.24.6" } }, "@babel/plugin-transform-unicode-escapes": { - "version": "7.24.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-unicode-escapes/-/plugin-transform-unicode-escapes-7.24.1.tgz", - "integrity": "sha512-RlkVIcWT4TLI96zM660S877E7beKlQw7Ig+wqkKBiWfj0zH5Q4h50q6er4wzZKRNSYpfo6ILJ+hrJAGSX2qcNw==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-unicode-escapes/-/plugin-transform-unicode-escapes-7.24.6.tgz", + "integrity": "sha512-bKl3xxcPbkQQo5eX9LjjDpU2xYHeEeNQbOhj0iPvetSzA+Tu9q/o5lujF4Sek60CM6MgYvOS/DJuwGbiEYAnLw==", "dev": true, "requires": { - "@babel/helper-plugin-utils": "^7.24.0" + "@babel/helper-plugin-utils": "^7.24.6" } }, "@babel/plugin-transform-unicode-property-regex": { - "version": "7.24.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-unicode-property-regex/-/plugin-transform-unicode-property-regex-7.24.1.tgz", - "integrity": "sha512-Ss4VvlfYV5huWApFsF8/Sq0oXnGO+jB+rijFEFugTd3cwSObUSnUi88djgR5528Csl0uKlrI331kRqe56Ov2Ng==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-unicode-property-regex/-/plugin-transform-unicode-property-regex-7.24.6.tgz", + "integrity": "sha512-8EIgImzVUxy15cZiPii9GvLZwsy7Vxc+8meSlR3cXFmBIl5W5Tn9LGBf7CDKkHj4uVfNXCJB8RsVfnmY61iedA==", "dev": true, "requires": { - "@babel/helper-create-regexp-features-plugin": "^7.22.15", - "@babel/helper-plugin-utils": "^7.24.0" + "@babel/helper-create-regexp-features-plugin": "^7.24.6", + "@babel/helper-plugin-utils": "^7.24.6" } }, "@babel/plugin-transform-unicode-regex": { - "version": "7.24.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-unicode-regex/-/plugin-transform-unicode-regex-7.24.1.tgz", - "integrity": "sha512-2A/94wgZgxfTsiLaQ2E36XAOdcZmGAaEEgVmxQWwZXWkGhvoHbaqXcKnU8zny4ycpu3vNqg0L/PcCiYtHtA13g==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-unicode-regex/-/plugin-transform-unicode-regex-7.24.6.tgz", + "integrity": "sha512-pssN6ExsvxaKU638qcWb81RrvvgZom3jDgU/r5xFZ7TONkZGFf4MhI2ltMb8OcQWhHyxgIavEU+hgqtbKOmsPA==", "dev": true, "requires": { - "@babel/helper-create-regexp-features-plugin": "^7.22.15", - "@babel/helper-plugin-utils": "^7.24.0" + "@babel/helper-create-regexp-features-plugin": "^7.24.6", + "@babel/helper-plugin-utils": "^7.24.6" } }, "@babel/plugin-transform-unicode-sets-regex": { - "version": "7.24.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-unicode-sets-regex/-/plugin-transform-unicode-sets-regex-7.24.1.tgz", - "integrity": "sha512-fqj4WuzzS+ukpgerpAoOnMfQXwUHFxXUZUE84oL2Kao2N8uSlvcpnAidKASgsNgzZHBsHWvcm8s9FPWUhAb8fA==", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-unicode-sets-regex/-/plugin-transform-unicode-sets-regex-7.24.6.tgz", + "integrity": "sha512-quiMsb28oXWIDK0gXLALOJRXLgICLiulqdZGOaPPd0vRT7fQp74NtdADAVu+D8s00C+0Xs0MxVP0VKF/sZEUgw==", "dev": true, "requires": { - "@babel/helper-create-regexp-features-plugin": "^7.22.15", - "@babel/helper-plugin-utils": "^7.24.0" + "@babel/helper-create-regexp-features-plugin": "^7.24.6", + "@babel/helper-plugin-utils": "^7.24.6" } }, "@babel/preset-env": { - "version": "7.24.5", - "resolved": "https://registry.npmjs.org/@babel/preset-env/-/preset-env-7.24.5.tgz", - "integrity": "sha512-UGK2ifKtcC8i5AI4cH+sbLLuLc2ktYSFJgBAXorKAsHUZmrQ1q6aQ6i3BvU24wWs2AAKqQB6kq3N9V9Gw1HiMQ==", - "dev": true, - "requires": { - "@babel/compat-data": "^7.24.4", - "@babel/helper-compilation-targets": "^7.23.6", - "@babel/helper-plugin-utils": "^7.24.5", - "@babel/helper-validator-option": "^7.23.5", - "@babel/plugin-bugfix-firefox-class-in-computed-class-key": "^7.24.5", - "@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression": "^7.24.1", - "@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining": "^7.24.1", - "@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly": "^7.24.1", + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/preset-env/-/preset-env-7.24.6.tgz", + "integrity": "sha512-CrxEAvN7VxfjOG8JNF2Y/eMqMJbZPZ185amwGUBp8D9USK90xQmv7dLdFSa+VbD7fdIqcy/Mfv7WtzG8+/qxKg==", + "dev": true, + "requires": { + "@babel/compat-data": "^7.24.6", + "@babel/helper-compilation-targets": "^7.24.6", + "@babel/helper-plugin-utils": "^7.24.6", + "@babel/helper-validator-option": "^7.24.6", + "@babel/plugin-bugfix-firefox-class-in-computed-class-key": "^7.24.6", + "@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression": "^7.24.6", + "@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining": "^7.24.6", + "@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly": "^7.24.6", "@babel/plugin-proposal-private-property-in-object": "7.21.0-placeholder-for-preset-env.2", "@babel/plugin-syntax-async-generators": "^7.8.4", "@babel/plugin-syntax-class-properties": "^7.12.13", "@babel/plugin-syntax-class-static-block": "^7.14.5", "@babel/plugin-syntax-dynamic-import": "^7.8.3", "@babel/plugin-syntax-export-namespace-from": "^7.8.3", - "@babel/plugin-syntax-import-assertions": "^7.24.1", - "@babel/plugin-syntax-import-attributes": "^7.24.1", + "@babel/plugin-syntax-import-assertions": "^7.24.6", + "@babel/plugin-syntax-import-attributes": "^7.24.6", "@babel/plugin-syntax-import-meta": "^7.10.4", "@babel/plugin-syntax-json-strings": "^7.8.3", "@babel/plugin-syntax-logical-assignment-operators": "^7.10.4", @@ -19150,54 +19150,54 @@ "@babel/plugin-syntax-private-property-in-object": "^7.14.5", "@babel/plugin-syntax-top-level-await": "^7.14.5", "@babel/plugin-syntax-unicode-sets-regex": "^7.18.6", - "@babel/plugin-transform-arrow-functions": "^7.24.1", - "@babel/plugin-transform-async-generator-functions": "^7.24.3", - "@babel/plugin-transform-async-to-generator": "^7.24.1", - "@babel/plugin-transform-block-scoped-functions": "^7.24.1", - "@babel/plugin-transform-block-scoping": "^7.24.5", - "@babel/plugin-transform-class-properties": "^7.24.1", - "@babel/plugin-transform-class-static-block": "^7.24.4", - "@babel/plugin-transform-classes": "^7.24.5", - "@babel/plugin-transform-computed-properties": "^7.24.1", - "@babel/plugin-transform-destructuring": "^7.24.5", - "@babel/plugin-transform-dotall-regex": "^7.24.1", - "@babel/plugin-transform-duplicate-keys": "^7.24.1", - "@babel/plugin-transform-dynamic-import": "^7.24.1", - "@babel/plugin-transform-exponentiation-operator": "^7.24.1", - "@babel/plugin-transform-export-namespace-from": "^7.24.1", - "@babel/plugin-transform-for-of": "^7.24.1", - "@babel/plugin-transform-function-name": "^7.24.1", - "@babel/plugin-transform-json-strings": "^7.24.1", - "@babel/plugin-transform-literals": "^7.24.1", - "@babel/plugin-transform-logical-assignment-operators": "^7.24.1", - "@babel/plugin-transform-member-expression-literals": "^7.24.1", - "@babel/plugin-transform-modules-amd": "^7.24.1", - "@babel/plugin-transform-modules-commonjs": "^7.24.1", - "@babel/plugin-transform-modules-systemjs": "^7.24.1", - "@babel/plugin-transform-modules-umd": "^7.24.1", - "@babel/plugin-transform-named-capturing-groups-regex": "^7.22.5", - "@babel/plugin-transform-new-target": "^7.24.1", - "@babel/plugin-transform-nullish-coalescing-operator": "^7.24.1", - "@babel/plugin-transform-numeric-separator": "^7.24.1", - "@babel/plugin-transform-object-rest-spread": "^7.24.5", - "@babel/plugin-transform-object-super": "^7.24.1", - "@babel/plugin-transform-optional-catch-binding": "^7.24.1", - "@babel/plugin-transform-optional-chaining": "^7.24.5", - "@babel/plugin-transform-parameters": "^7.24.5", - "@babel/plugin-transform-private-methods": "^7.24.1", - "@babel/plugin-transform-private-property-in-object": "^7.24.5", - "@babel/plugin-transform-property-literals": "^7.24.1", - "@babel/plugin-transform-regenerator": "^7.24.1", - "@babel/plugin-transform-reserved-words": "^7.24.1", - "@babel/plugin-transform-shorthand-properties": "^7.24.1", - "@babel/plugin-transform-spread": "^7.24.1", - "@babel/plugin-transform-sticky-regex": "^7.24.1", - "@babel/plugin-transform-template-literals": "^7.24.1", - "@babel/plugin-transform-typeof-symbol": "^7.24.5", - "@babel/plugin-transform-unicode-escapes": "^7.24.1", - "@babel/plugin-transform-unicode-property-regex": "^7.24.1", - "@babel/plugin-transform-unicode-regex": "^7.24.1", - "@babel/plugin-transform-unicode-sets-regex": "^7.24.1", + "@babel/plugin-transform-arrow-functions": "^7.24.6", + "@babel/plugin-transform-async-generator-functions": "^7.24.6", + "@babel/plugin-transform-async-to-generator": "^7.24.6", + "@babel/plugin-transform-block-scoped-functions": "^7.24.6", + "@babel/plugin-transform-block-scoping": "^7.24.6", + "@babel/plugin-transform-class-properties": "^7.24.6", + "@babel/plugin-transform-class-static-block": "^7.24.6", + "@babel/plugin-transform-classes": "^7.24.6", + "@babel/plugin-transform-computed-properties": "^7.24.6", + "@babel/plugin-transform-destructuring": "^7.24.6", + "@babel/plugin-transform-dotall-regex": "^7.24.6", + "@babel/plugin-transform-duplicate-keys": "^7.24.6", + "@babel/plugin-transform-dynamic-import": "^7.24.6", + "@babel/plugin-transform-exponentiation-operator": "^7.24.6", + "@babel/plugin-transform-export-namespace-from": "^7.24.6", + "@babel/plugin-transform-for-of": "^7.24.6", + "@babel/plugin-transform-function-name": "^7.24.6", + "@babel/plugin-transform-json-strings": "^7.24.6", + "@babel/plugin-transform-literals": "^7.24.6", + "@babel/plugin-transform-logical-assignment-operators": "^7.24.6", + "@babel/plugin-transform-member-expression-literals": "^7.24.6", + "@babel/plugin-transform-modules-amd": "^7.24.6", + "@babel/plugin-transform-modules-commonjs": "^7.24.6", + "@babel/plugin-transform-modules-systemjs": "^7.24.6", + "@babel/plugin-transform-modules-umd": "^7.24.6", + "@babel/plugin-transform-named-capturing-groups-regex": "^7.24.6", + "@babel/plugin-transform-new-target": "^7.24.6", + "@babel/plugin-transform-nullish-coalescing-operator": "^7.24.6", + "@babel/plugin-transform-numeric-separator": "^7.24.6", + "@babel/plugin-transform-object-rest-spread": "^7.24.6", + "@babel/plugin-transform-object-super": "^7.24.6", + "@babel/plugin-transform-optional-catch-binding": "^7.24.6", + "@babel/plugin-transform-optional-chaining": "^7.24.6", + "@babel/plugin-transform-parameters": "^7.24.6", + "@babel/plugin-transform-private-methods": "^7.24.6", + "@babel/plugin-transform-private-property-in-object": "^7.24.6", + "@babel/plugin-transform-property-literals": "^7.24.6", + "@babel/plugin-transform-regenerator": "^7.24.6", + "@babel/plugin-transform-reserved-words": "^7.24.6", + "@babel/plugin-transform-shorthand-properties": "^7.24.6", + "@babel/plugin-transform-spread": "^7.24.6", + "@babel/plugin-transform-sticky-regex": "^7.24.6", + "@babel/plugin-transform-template-literals": "^7.24.6", + "@babel/plugin-transform-typeof-symbol": "^7.24.6", + "@babel/plugin-transform-unicode-escapes": "^7.24.6", + "@babel/plugin-transform-unicode-property-regex": "^7.24.6", + "@babel/plugin-transform-unicode-regex": "^7.24.6", + "@babel/plugin-transform-unicode-sets-regex": "^7.24.6", "@babel/preset-modules": "0.1.6-no-external-plugins", "babel-plugin-polyfill-corejs2": "^0.4.10", "babel-plugin-polyfill-corejs3": "^0.10.4", diff --git a/apps/block_scout_web/assets/package.json b/apps/block_scout_web/assets/package.json index 92cf81054397..f652b6903192 100644 --- a/apps/block_scout_web/assets/package.json +++ b/apps/block_scout_web/assets/package.json @@ -84,7 +84,7 @@ }, "devDependencies": { "@babel/core": "^7.24.6", - "@babel/preset-env": "^7.24.5", + "@babel/preset-env": "^7.24.6", "autoprefixer": "^10.4.19", "babel-loader": "^9.1.3", "copy-webpack-plugin": "^12.0.2", From 67b708f3f41872bb1cbc2cbe86fea67775b0882f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 3 Jun 2024 12:38:59 +0300 Subject: [PATCH 054/150] chore(deps): bump chart.js in /apps/block_scout_web/assets (#10169) Bumps [chart.js](https://github.com/chartjs/Chart.js) from 4.4.2 to 4.4.3. - [Release notes](https://github.com/chartjs/Chart.js/releases) - [Commits](https://github.com/chartjs/Chart.js/compare/v4.4.2...v4.4.3) --- updated-dependencies: - dependency-name: chart.js dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- apps/block_scout_web/assets/package-lock.json | 14 +++++++------- apps/block_scout_web/assets/package.json | 2 +- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/apps/block_scout_web/assets/package-lock.json b/apps/block_scout_web/assets/package-lock.json index 72b6a74fcbb6..f13ee11fad65 100644 --- a/apps/block_scout_web/assets/package-lock.json +++ b/apps/block_scout_web/assets/package-lock.json @@ -14,7 +14,7 @@ "assert": "^2.1.0", "bignumber.js": "^9.1.2", "bootstrap": "^4.6.0", - "chart.js": "^4.4.2", + "chart.js": "^4.4.3", "chartjs-adapter-luxon": "^1.3.1", "clipboard": "^2.0.11", "core-js": "^3.37.1", @@ -5595,9 +5595,9 @@ } }, "node_modules/chart.js": { - "version": "4.4.2", - "resolved": "https://registry.npmjs.org/chart.js/-/chart.js-4.4.2.tgz", - "integrity": "sha512-6GD7iKwFpP5kbSD4MeRRRlTnQvxfQREy36uEtm1hzHzcOqwWx0YEHuspuoNlslu+nciLIB7fjjsHkUv/FzFcOg==", + "version": "4.4.3", + "resolved": "https://registry.npmjs.org/chart.js/-/chart.js-4.4.3.tgz", + "integrity": "sha512-qK1gkGSRYcJzqrrzdR6a+I0vQ4/R+SoODXyAjscQ/4mzuNzySaMCd+hyVxitSY1+L2fjPD1Gbn+ibNqRmwQeLw==", "dependencies": { "@kurkle/color": "^0.3.0" }, @@ -22059,9 +22059,9 @@ "dev": true }, "chart.js": { - "version": "4.4.2", - "resolved": "https://registry.npmjs.org/chart.js/-/chart.js-4.4.2.tgz", - "integrity": "sha512-6GD7iKwFpP5kbSD4MeRRRlTnQvxfQREy36uEtm1hzHzcOqwWx0YEHuspuoNlslu+nciLIB7fjjsHkUv/FzFcOg==", + "version": "4.4.3", + "resolved": "https://registry.npmjs.org/chart.js/-/chart.js-4.4.3.tgz", + "integrity": "sha512-qK1gkGSRYcJzqrrzdR6a+I0vQ4/R+SoODXyAjscQ/4mzuNzySaMCd+hyVxitSY1+L2fjPD1Gbn+ibNqRmwQeLw==", "requires": { "@kurkle/color": "^0.3.0" } diff --git a/apps/block_scout_web/assets/package.json b/apps/block_scout_web/assets/package.json index f652b6903192..1bebf8a8ce42 100644 --- a/apps/block_scout_web/assets/package.json +++ b/apps/block_scout_web/assets/package.json @@ -26,7 +26,7 @@ "assert": "^2.1.0", "bignumber.js": "^9.1.2", "bootstrap": "^4.6.0", - "chart.js": "^4.4.2", + "chart.js": "^4.4.3", "chartjs-adapter-luxon": "^1.3.1", "clipboard": "^2.0.11", "core-js": "^3.37.1", From 0e7f09a5f080da790f16201c4aca60ea0802eab5 Mon Sep 17 00:00:00 2001 From: Qwerty5Uiop <105209995+Qwerty5Uiop@users.noreply.github.com> Date: Mon, 3 Jun 2024 16:34:37 +0400 Subject: [PATCH 055/150] fix: Add a separate db url for events listener (#10164) --- .../publish-docker-image-for-eth-sepolia.yml | 38 ++++++++++++++++++- .../lib/explorer/chain/events/listener.ex | 8 ++++ .../lib/explorer/repo/config_helper.ex | 4 +- config/runtime/prod.exs | 1 + docker-compose/envs/common-blockscout.env | 1 + 5 files changed, 49 insertions(+), 3 deletions(-) diff --git a/.github/workflows/publish-docker-image-for-eth-sepolia.yml b/.github/workflows/publish-docker-image-for-eth-sepolia.yml index d63d935df4a6..a929c1d70127 100644 --- a/.github/workflows/publish-docker-image-for-eth-sepolia.yml +++ b/.github/workflows/publish-docker-image-for-eth-sepolia.yml @@ -20,7 +20,7 @@ jobs: docker-username: ${{ secrets.DOCKER_USERNAME }} docker-password: ${{ secrets.DOCKER_PASSWORD }} - - name: Build and push Docker image + - name: Build and push Docker image (indexer + API) uses: docker/build-push-action@v5 with: context: . @@ -37,4 +37,40 @@ jobs: ADMIN_PANEL_ENABLED=false CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta.+commit.${{ env.SHORT_SHA }} + RELEASE_VERSION=${{ env.RELEASE_VERSION }} + + - name: Build and push Docker image (indexer) + uses: docker/build-push-action@v5 + with: + context: . + file: ./docker/Dockerfile + push: true + tags: blockscout/blockscout-${{ env.DOCKER_CHAIN_NAME }}:${{ env.RELEASE_VERSION }}-postrelease-${{ env.SHORT_SHA }}-indexer + build-args: | + CHAIN_TYPE=ethereum + CACHE_EXCHANGE_RATES_PERIOD= + DISABLE_WEBAPP=true + DISABLE_API=true + CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= + ADMIN_PANEL_ENABLED=false + CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta.+commit.${{ env.SHORT_SHA }} + RELEASE_VERSION=${{ env.RELEASE_VERSION }} + + - name: Build and push Docker image (API) + uses: docker/build-push-action@v5 + with: + context: . + file: ./docker/Dockerfile + push: true + tags: blockscout/blockscout-${{ env.DOCKER_CHAIN_NAME }}:${{ env.RELEASE_VERSION }}-postrelease-${{ env.SHORT_SHA }}-api + build-args: | + CHAIN_TYPE=ethereum + CACHE_EXCHANGE_RATES_PERIOD= + DISABLE_WEBAPP=true + DISABLE_INDEXER=true + CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= + ADMIN_PANEL_ENABLED=false + CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta.+commit.${{ env.SHORT_SHA }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} \ No newline at end of file diff --git a/apps/explorer/lib/explorer/chain/events/listener.ex b/apps/explorer/lib/explorer/chain/events/listener.ex index ac59d95812f5..4a771d3afdd5 100644 --- a/apps/explorer/lib/explorer/chain/events/listener.ex +++ b/apps/explorer/lib/explorer/chain/events/listener.ex @@ -6,6 +6,7 @@ defmodule Explorer.Chain.Events.Listener do use GenServer alias Explorer.Repo + alias Explorer.Repo.ConfigHelper alias Explorer.Utility.EventNotification alias Postgrex.Notifications @@ -17,6 +18,7 @@ defmodule Explorer.Chain.Events.Listener do {:ok, pid} = :explorer |> Application.get_env(Explorer.Repo) + |> Keyword.merge(listener_db_parameters()) |> Notifications.start_link() ref = Notifications.listen!(pid, channel) @@ -81,4 +83,10 @@ defmodule Explorer.Chain.Events.Listener do data end end + + defp listener_db_parameters do + listener_db_url = Application.get_env(:explorer, Repo)[:listener_url] || Application.get_env(:explorer, Repo)[:url] + + ConfigHelper.extract_parameters(listener_db_url) + end end diff --git a/apps/explorer/lib/explorer/repo/config_helper.ex b/apps/explorer/lib/explorer/repo/config_helper.ex index 26e53f31e7cf..68b18e005a39 100644 --- a/apps/explorer/lib/explorer/repo/config_helper.ex +++ b/apps/explorer/lib/explorer/repo/config_helper.ex @@ -54,10 +54,10 @@ defmodule Explorer.Repo.ConfigHelper do def ssl_enabled?, do: String.equivalent?(System.get_env("ECTO_USE_SSL") || "true", "true") - defp extract_parameters(empty) when empty == nil or empty == "", do: [] + def extract_parameters(empty) when empty == nil or empty == "", do: [] # sobelow_skip ["DOS.StringToAtom"] - defp extract_parameters(database_url) do + def extract_parameters(database_url) do ~r/\w*:\/\/(?[a-zA-Z0-9_-]*):(?[a-zA-Z0-9-*#!%^&$_.]*)?@(?(([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\-]*[a-zA-Z0-9])\.)*([A-Za-z0-9]|[A-Za-z0-9][A-Za-z0-9\-]*[A-Za-z0-9])):(?\d+)\/(?[a-zA-Z0-9_-]*)/ |> Regex.named_captures(database_url) |> Keyword.new(fn {k, v} -> {String.to_atom(k), v} end) diff --git a/config/runtime/prod.exs b/config/runtime/prod.exs index eb54b61f33be..b88b35471925 100644 --- a/config/runtime/prod.exs +++ b/config/runtime/prod.exs @@ -33,6 +33,7 @@ queue_target = ConfigHelper.parse_integer_env_var("DATABASE_QUEUE_TARGET", 50) # Configures the database config :explorer, Explorer.Repo, url: System.get_env("DATABASE_URL"), + listener_url: System.get_env("DATABASE_EVENT_URL"), pool_size: pool_size, ssl: ExplorerConfigHelper.ssl_enabled?(), queue_target: queue_target diff --git a/docker-compose/envs/common-blockscout.env b/docker-compose/envs/common-blockscout.env index 66728d07b52d..047c110f4037 100644 --- a/docker-compose/envs/common-blockscout.env +++ b/docker-compose/envs/common-blockscout.env @@ -2,6 +2,7 @@ ETHEREUM_JSONRPC_VARIANT=geth ETHEREUM_JSONRPC_HTTP_URL=http://host.docker.internal:8545/ # ETHEREUM_JSONRPC_FALLBACK_HTTP_URL= DATABASE_URL=postgresql://blockscout:ceWb1MeLBEeOIfk65gU8EjF8@db:5432/blockscout +# DATABASE_EVENT_URL= # DATABASE_QUEUE_TARGET # TEST_DATABASE_URL= # TEST_DATABASE_READ_ONLY_API_URL= From ba494167094b61d3b5d463bab3d44187b36c9f3c Mon Sep 17 00:00:00 2001 From: Qwerty5Uiop <105209995+Qwerty5Uiop@users.noreply.github.com> Date: Mon, 3 Jun 2024 17:59:54 +0400 Subject: [PATCH 056/150] fix: Move auth routes to general router (#10153) * fix: Move auth routes to general router * Move account routes to the separate router --- apps/block_scout_web/lib/block_scout_web.ex | 3 + .../lib/block_scout_web/api_router.ex | 83 +-------- .../lib/block_scout_web/router.ex | 3 + .../block_scout_web/routers/account_router.ex | 173 ++++++++++++++++++ .../lib/block_scout_web/web_router.ex | 71 +------ .../account/custom_abi_controller_test.exs | 34 ++-- .../block_scout_web/test/support/conn_case.ex | 1 + 7 files changed, 202 insertions(+), 166 deletions(-) create mode 100644 apps/block_scout_web/lib/block_scout_web/routers/account_router.ex diff --git a/apps/block_scout_web/lib/block_scout_web.ex b/apps/block_scout_web/lib/block_scout_web.ex index c56ab8c116fa..4df825cff2e0 100644 --- a/apps/block_scout_web/lib/block_scout_web.ex +++ b/apps/block_scout_web/lib/block_scout_web.ex @@ -27,6 +27,7 @@ defmodule BlockScoutWeb do import BlockScoutWeb.WebRouter.Helpers, except: [static_path: 2] import BlockScoutWeb.Gettext import BlockScoutWeb.ErrorHelper + import BlockScoutWeb.Routers.AccountRouter.Helpers, except: [static_path: 2] import Plug.Conn alias BlockScoutWeb.AdminRouter.Helpers, as: AdminRoutes @@ -56,6 +57,8 @@ defmodule BlockScoutWeb do WeiHelper } + import BlockScoutWeb.Routers.AccountRouter.Helpers, except: [static_path: 2] + import Explorer.Chain.CurrencyHelper, only: [divide_decimals: 2] import BlockScoutWeb.WebRouter.Helpers, except: [static_path: 2] diff --git a/apps/block_scout_web/lib/block_scout_web/api_router.ex b/apps/block_scout_web/lib/block_scout_web/api_router.ex index 53eae5775bff..24e323db8c5b 100644 --- a/apps/block_scout_web/lib/block_scout_web/api_router.ex +++ b/apps/block_scout_web/lib/block_scout_web/api_router.ex @@ -14,7 +14,8 @@ defmodule BlockScoutWeb.ApiRouter do """ use BlockScoutWeb, :router alias BlockScoutWeb.{AddressTransactionController, APIKeyV2Router, SmartContractsApiV2Router, UtilsApiV2Router} - alias BlockScoutWeb.Plug.{CheckAccountAPI, CheckApiV2, RateLimit} + alias BlockScoutWeb.Plug.{CheckApiV2, RateLimit} + alias BlockScoutWeb.Routers.AccountRouter @max_query_string_length 5_000 @@ -36,23 +37,6 @@ defmodule BlockScoutWeb.ApiRouter do plug(:accepts, ["json"]) end - pipeline :account_api do - plug( - Plug.Parsers, - parsers: [:urlencoded, :multipart, :json], - length: 100_000, - query_string_length: @max_query_string_length, - pass: ["*/*"], - json_decoder: Poison - ) - - plug(BlockScoutWeb.Plug.Logger, application: :api) - plug(:accepts, ["json"]) - plug(:fetch_session) - plug(:protect_from_forgery) - plug(CheckAccountAPI) - end - pipeline :api_v2 do plug( Plug.Parsers, @@ -98,70 +82,9 @@ defmodule BlockScoutWeb.ApiRouter do plug(RateLimit, graphql?: true) end - alias BlockScoutWeb.Account.Api.V2.{AuthenticateController, EmailController, TagsController, UserController} alias BlockScoutWeb.API.V2 - scope "/account/v2", as: :account_v2 do - pipe_through(:account_api) - - get("/authenticate", AuthenticateController, :authenticate_get) - post("/authenticate", AuthenticateController, :authenticate_post) - - get("/get_csrf", UserController, :get_csrf) - - scope "/email" do - get("/resend", EmailController, :resend_email) - end - - scope "/user" do - get("/info", UserController, :info) - - get("/watchlist", UserController, :watchlist) - delete("/watchlist/:id", UserController, :delete_watchlist) - post("/watchlist", UserController, :create_watchlist) - put("/watchlist/:id", UserController, :update_watchlist) - - get("/api_keys", UserController, :api_keys) - delete("/api_keys/:api_key", UserController, :delete_api_key) - post("/api_keys", UserController, :create_api_key) - put("/api_keys/:api_key", UserController, :update_api_key) - - get("/custom_abis", UserController, :custom_abis) - delete("/custom_abis/:id", UserController, :delete_custom_abi) - post("/custom_abis", UserController, :create_custom_abi) - put("/custom_abis/:id", UserController, :update_custom_abi) - - get("/public_tags", UserController, :public_tags_requests) - delete("/public_tags/:id", UserController, :delete_public_tags_request) - post("/public_tags", UserController, :create_public_tags_request) - put("/public_tags/:id", UserController, :update_public_tags_request) - - scope "/tags" do - get("/address/", UserController, :tags_address) - get("/address/:id", UserController, :tags_address) - delete("/address/:id", UserController, :delete_tag_address) - post("/address/", UserController, :create_tag_address) - put("/address/:id", UserController, :update_tag_address) - - get("/transaction/", UserController, :tags_transaction) - get("/transaction/:id", UserController, :tags_transaction) - delete("/transaction/:id", UserController, :delete_tag_transaction) - post("/transaction/", UserController, :create_tag_transaction) - put("/transaction/:id", UserController, :update_tag_transaction) - end - end - end - - scope "/account/v2" do - pipe_through(:api) - pipe_through(:account_api) - - scope "/tags" do - get("/address/:address_hash", TagsController, :tags_address) - - get("/transaction/:transaction_hash", TagsController, :tags_transaction) - end - end + forward("/account", AccountRouter) scope "/v2/import" do pipe_through(:api_v2_no_session) diff --git a/apps/block_scout_web/lib/block_scout_web/router.ex b/apps/block_scout_web/lib/block_scout_web/router.ex index 4376054f7dae..b64a0d1c7d01 100644 --- a/apps/block_scout_web/lib/block_scout_web/router.ex +++ b/apps/block_scout_web/lib/block_scout_web/router.ex @@ -3,6 +3,7 @@ defmodule BlockScoutWeb.Router do alias BlockScoutWeb.Plug.{GraphQL, RateLimit} alias BlockScoutWeb.{ApiRouter, WebRouter} + alias BlockScoutWeb.Routers.AccountRouter @max_query_string_length 5_000 @@ -55,6 +56,8 @@ defmodule BlockScoutWeb.Router do plug(RateLimit, graphql?: true) end + match(:*, "/auth/*path", AccountRouter, []) + forward("/api", ApiRouter) scope "/graphiql" do diff --git a/apps/block_scout_web/lib/block_scout_web/routers/account_router.ex b/apps/block_scout_web/lib/block_scout_web/routers/account_router.ex new file mode 100644 index 000000000000..6263aa6d7965 --- /dev/null +++ b/apps/block_scout_web/lib/block_scout_web/routers/account_router.ex @@ -0,0 +1,173 @@ +defmodule BlockScoutWeb.Routers.AccountRouter do + @moduledoc """ + Router for account-related requests + """ + use BlockScoutWeb, :router + + alias BlockScoutWeb.Account.Api.V2.{AuthenticateController, EmailController, TagsController, UserController} + alias BlockScoutWeb.Plug.{CheckAccountAPI, CheckAccountWeb} + + @max_query_string_length 5_000 + + pipeline :account_web do + plug( + Plug.Parsers, + parsers: [:urlencoded, :multipart, :json], + length: 100_000, + query_string_length: @max_query_string_length, + pass: ["*/*"], + json_decoder: Poison + ) + + plug(BlockScoutWeb.Plug.Logger, application: :block_scout_web) + plug(:accepts, ["html"]) + plug(:fetch_session) + plug(:fetch_flash) + plug(CheckAccountWeb) + plug(:protect_from_forgery) + plug(BlockScoutWeb.CSPHeader) + plug(BlockScoutWeb.ChecksumAddress) + end + + pipeline :account_api do + plug( + Plug.Parsers, + parsers: [:urlencoded, :multipart, :json], + length: 100_000, + query_string_length: @max_query_string_length, + pass: ["*/*"], + json_decoder: Poison + ) + + plug(BlockScoutWeb.Plug.Logger, application: :api) + plug(:accepts, ["json"]) + plug(:fetch_session) + plug(:protect_from_forgery) + plug(CheckAccountAPI) + end + + pipeline :api do + plug( + Plug.Parsers, + parsers: [:urlencoded, :multipart, :json], + length: 20_000_000, + query_string_length: @max_query_string_length, + pass: ["*/*"], + json_decoder: Poison + ) + + plug(BlockScoutWeb.Plug.Logger, application: :api) + plug(:accepts, ["json"]) + end + + scope "/auth", BlockScoutWeb do + pipe_through(:account_web) + + get("/profile", Account.AuthController, :profile) + get("/logout", Account.AuthController, :logout) + get("/:provider", Account.AuthController, :request) + get("/:provider/callback", Account.AuthController, :callback) + end + + scope "/", BlockScoutWeb do + pipe_through(:account_web) + + resources("/tag_address", Account.TagAddressController, + only: [:index, :new, :create, :delete], + as: :tag_address + ) + + resources("/tag_transaction", Account.TagTransactionController, + only: [:index, :new, :create, :delete], + as: :tag_transaction + ) + + resources("/watchlist", Account.WatchlistController, + only: [:show], + singleton: true, + as: :watchlist + ) + + resources("/watchlist_address", Account.WatchlistAddressController, + only: [:new, :create, :edit, :update, :delete], + as: :watchlist_address + ) + + resources("/api_key", Account.ApiKeyController, + only: [:new, :create, :edit, :update, :delete, :index], + as: :api_key + ) + + resources("/custom_abi", Account.CustomABIController, + only: [:new, :create, :edit, :update, :delete, :index], + as: :custom_abi + ) + + resources("/public_tags_request", Account.PublicTagsRequestController, + only: [:new, :create, :edit, :update, :delete, :index], + as: :public_tags_request + ) + end + + scope "/v2", as: :account_v2 do + pipe_through(:account_api) + + get("/authenticate", AuthenticateController, :authenticate_get) + post("/authenticate", AuthenticateController, :authenticate_post) + + get("/get_csrf", UserController, :get_csrf) + + scope "/email" do + get("/resend", EmailController, :resend_email) + end + + scope "/user" do + get("/info", UserController, :info) + + get("/watchlist", UserController, :watchlist) + delete("/watchlist/:id", UserController, :delete_watchlist) + post("/watchlist", UserController, :create_watchlist) + put("/watchlist/:id", UserController, :update_watchlist) + + get("/api_keys", UserController, :api_keys) + delete("/api_keys/:api_key", UserController, :delete_api_key) + post("/api_keys", UserController, :create_api_key) + put("/api_keys/:api_key", UserController, :update_api_key) + + get("/custom_abis", UserController, :custom_abis) + delete("/custom_abis/:id", UserController, :delete_custom_abi) + post("/custom_abis", UserController, :create_custom_abi) + put("/custom_abis/:id", UserController, :update_custom_abi) + + get("/public_tags", UserController, :public_tags_requests) + delete("/public_tags/:id", UserController, :delete_public_tags_request) + post("/public_tags", UserController, :create_public_tags_request) + put("/public_tags/:id", UserController, :update_public_tags_request) + + scope "/tags" do + get("/address/", UserController, :tags_address) + get("/address/:id", UserController, :tags_address) + delete("/address/:id", UserController, :delete_tag_address) + post("/address/", UserController, :create_tag_address) + put("/address/:id", UserController, :update_tag_address) + + get("/transaction/", UserController, :tags_transaction) + get("/transaction/:id", UserController, :tags_transaction) + delete("/transaction/:id", UserController, :delete_tag_transaction) + post("/transaction/", UserController, :create_tag_transaction) + put("/transaction/:id", UserController, :update_tag_transaction) + end + end + end + + scope "/v2" do + pipe_through(:api) + pipe_through(:account_api) + + scope "/tags" do + get("/address/:address_hash", TagsController, :tags_address) + + get("/transaction/:transaction_hash", TagsController, :tags_transaction) + end + end +end diff --git a/apps/block_scout_web/lib/block_scout_web/web_router.ex b/apps/block_scout_web/lib/block_scout_web/web_router.ex index 2793fd810386..cabf0ed4e31b 100644 --- a/apps/block_scout_web/lib/block_scout_web/web_router.ex +++ b/apps/block_scout_web/lib/block_scout_web/web_router.ex @@ -5,7 +5,7 @@ defmodule BlockScoutWeb.WebRouter do use BlockScoutWeb, :router require Ueberauth - alias BlockScoutWeb.Plug.CheckAccountWeb + alias BlockScoutWeb.Routers.AccountRouter @max_query_string_length 5_000 @@ -28,78 +28,11 @@ defmodule BlockScoutWeb.WebRouter do plug(BlockScoutWeb.ChecksumAddress) end - pipeline :account do - plug( - Plug.Parsers, - parsers: [:urlencoded, :multipart, :json], - length: 100_000, - query_string_length: @max_query_string_length, - pass: ["*/*"], - json_decoder: Poison - ) - - plug(BlockScoutWeb.Plug.Logger, application: :block_scout_web) - plug(:accepts, ["html"]) - plug(:fetch_session) - plug(:fetch_flash) - plug(CheckAccountWeb) - plug(:protect_from_forgery) - plug(BlockScoutWeb.CSPHeader) - plug(BlockScoutWeb.ChecksumAddress) - end - if Mix.env() == :dev do forward("/sent_emails", Bamboo.SentEmailViewerPlug) end - scope "/auth", BlockScoutWeb do - pipe_through(:account) - - get("/profile", Account.AuthController, :profile) - get("/logout", Account.AuthController, :logout) - get("/:provider", Account.AuthController, :request) - get("/:provider/callback", Account.AuthController, :callback) - end - - scope "/account", BlockScoutWeb do - pipe_through(:account) - - resources("/tag_address", Account.TagAddressController, - only: [:index, :new, :create, :delete], - as: :tag_address - ) - - resources("/tag_transaction", Account.TagTransactionController, - only: [:index, :new, :create, :delete], - as: :tag_transaction - ) - - resources("/watchlist", Account.WatchlistController, - only: [:show], - singleton: true, - as: :watchlist - ) - - resources("/watchlist_address", Account.WatchlistAddressController, - only: [:new, :create, :edit, :update, :delete], - as: :watchlist_address - ) - - resources("/api_key", Account.ApiKeyController, - only: [:new, :create, :edit, :update, :delete, :index], - as: :api_key - ) - - resources("/custom_abi", Account.CustomABIController, - only: [:new, :create, :edit, :update, :delete, :index], - as: :custom_abi - ) - - resources("/public_tags_request", Account.PublicTagsRequestController, - only: [:new, :create, :edit, :update, :delete, :index], - as: :public_tags_request - ) - end + forward("/account", AccountRouter) # Disallows Iframes (write routes) scope "/", BlockScoutWeb do diff --git a/apps/block_scout_web/test/block_scout_web/controllers/account/custom_abi_controller_test.exs b/apps/block_scout_web/test/block_scout_web/controllers/account/custom_abi_controller_test.exs index b31d3be2284a..d2b56b163aad 100644 --- a/apps/block_scout_web/test/block_scout_web/controllers/account/custom_abi_controller_test.exs +++ b/apps/block_scout_web/test/block_scout_web/controllers/account/custom_abi_controller_test.exs @@ -18,7 +18,7 @@ defmodule BlockScoutWeb.Account.CustomABIControllerTest do test "custom ABI page opens correctly", %{conn: conn} do result_conn = conn - |> get(custom_abi_path(conn, :index)) + |> get("/account/custom_abi") assert html_response(result_conn, 200) =~ "Create a Custom ABI to interact with contracts." end @@ -34,7 +34,7 @@ defmodule BlockScoutWeb.Account.CustomABIControllerTest do result_conn = conn - |> post(custom_abi_path(conn, :create, %{"custom_abi" => custom_abi})) + |> post("/account/custom_abi", %{"custom_abi" => custom_abi}) assert html_response(result_conn, 200) =~ "Add Custom ABI" assert html_response(result_conn, 200) =~ to_string(contract_address.hash) @@ -42,7 +42,7 @@ defmodule BlockScoutWeb.Account.CustomABIControllerTest do result_conn_1 = conn - |> post(custom_abi_path(conn, :create, %{"custom_abi" => Map.put(custom_abi, "abi", "123")})) + |> post("/account/custom_abi", %{"custom_abi" => Map.put(custom_abi, "abi", "123")}) assert html_response(result_conn_1, 200) =~ "Add Custom ABI" assert html_response(result_conn_1, 200) =~ to_string(contract_address.hash) @@ -50,7 +50,7 @@ defmodule BlockScoutWeb.Account.CustomABIControllerTest do result_conn_2 = conn - |> get(custom_abi_path(conn, :index)) + |> get("/account/custom_abi") assert html_response(result_conn_2, 200) =~ "Create a Custom ABI to interact with contracts." refute html_response(result_conn_2, 200) =~ to_string(contract_address.hash) @@ -67,17 +67,17 @@ defmodule BlockScoutWeb.Account.CustomABIControllerTest do result_conn = conn - |> post(custom_abi_path(conn, :create, %{"custom_abi" => custom_abi})) + |> post("/account/custom_abi", %{"custom_abi" => custom_abi}) - assert redirected_to(result_conn) == custom_abi_path(conn, :index) + assert redirected_to(result_conn) == "/account/custom_abi" - result_conn_2 = get(result_conn, custom_abi_path(conn, :index)) + result_conn_2 = get(result_conn, "/account/custom_abi") assert html_response(result_conn_2, 200) =~ to_string(contract_address.hash) assert html_response(result_conn_2, 200) =~ "Create a Custom ABI to interact with contracts." result_conn_1 = conn - |> post(custom_abi_path(conn, :create, %{"custom_abi" => custom_abi})) + |> post("/account/custom_abi", %{"custom_abi" => custom_abi}) assert html_response(result_conn_1, 200) =~ "Add Custom ABI" assert html_response(result_conn_1, 200) =~ to_string(contract_address.hash) @@ -95,7 +95,7 @@ defmodule BlockScoutWeb.Account.CustomABIControllerTest do result_conn = conn - |> post(custom_abi_path(conn, :create, %{"custom_abi" => custom_abi})) + |> post("/account/custom_abi", %{"custom_abi" => custom_abi}) assert html_response(result_conn, 200) =~ "Add Custom ABI" assert html_response(result_conn, 200) =~ to_string(contract_address.hash) @@ -114,15 +114,15 @@ defmodule BlockScoutWeb.Account.CustomABIControllerTest do } assert conn - |> post(custom_abi_path(conn, :create, %{"custom_abi" => custom_abi})) - |> redirected_to() == custom_abi_path(conn, :index) + |> post("/account/custom_abi", %{"custom_abi" => custom_abi}) + |> redirected_to() == "/account/custom_abi" to_string(address.hash) end) assert abi_list = conn - |> get(custom_abi_path(conn, :index)) + |> get("/account/custom_abi") |> html_response(200) Enum.each(addresses, fn address -> assert abi_list =~ address end) @@ -137,7 +137,7 @@ defmodule BlockScoutWeb.Account.CustomABIControllerTest do assert error_form = conn - |> post(custom_abi_path(conn, :create, %{"custom_abi" => custom_abi})) + |> post("/account/custom_abi", %{"custom_abi" => custom_abi}) |> html_response(200) assert error_form =~ "Add Custom ABI" @@ -146,7 +146,7 @@ defmodule BlockScoutWeb.Account.CustomABIControllerTest do assert abi_list_new = conn - |> get(custom_abi_path(conn, :index)) + |> get("/account/custom_abi") |> html_response(200) Enum.each(addresses, fn address -> assert abi_list_new =~ address end) @@ -169,11 +169,11 @@ defmodule BlockScoutWeb.Account.CustomABIControllerTest do result_conn = conn - |> post(custom_abi_path(conn, :create, %{"custom_abi" => custom_abi})) + |> post("/account/custom_abi", %{"custom_abi" => custom_abi}) - assert redirected_to(result_conn) == custom_abi_path(conn, :index) + assert redirected_to(result_conn) == "/account/custom_abi" - result_conn_2 = get(result_conn, custom_abi_path(conn, :index)) + result_conn_2 = get(result_conn, "/account/custom_abi") assert html_response(result_conn_2, 200) =~ to_string(contract_address.hash) assert html_response(result_conn_2, 200) =~ "Create a Custom ABI to interact with contracts." diff --git a/apps/block_scout_web/test/support/conn_case.ex b/apps/block_scout_web/test/support/conn_case.ex index 5ae6216f0c01..cd03ad31e13a 100644 --- a/apps/block_scout_web/test/support/conn_case.ex +++ b/apps/block_scout_web/test/support/conn_case.ex @@ -22,6 +22,7 @@ defmodule BlockScoutWeb.ConnCase do import Phoenix.ConnTest import BlockScoutWeb.Router.Helpers import BlockScoutWeb.WebRouter.Helpers, except: [static_path: 2] + import BlockScoutWeb.Routers.AccountRouter.Helpers, except: [static_path: 2] import Bureaucrat.Helpers # The default endpoint for testing From a072be34b733232a3142ad433255f0456c6eb759 Mon Sep 17 00:00:00 2001 From: Victor Baranov Date: Wed, 5 Jun 2024 12:14:34 +0300 Subject: [PATCH 057/150] chore: Return is verified=true for verified minimal proxy pattern (#10132) * Return is verified=true for verified minimal proxy pattern * Refactor proxy object for API --- .../templates/address/overview.html.eex | 4 +- .../views/api/v2/address_view.ex | 63 ++++++------ .../block_scout_web/views/api/v2/helper.ex | 78 ++++++++++++--- .../account/api/v2/user_controller_test.exs | 10 +- .../api/v2/address_controller_test.exs | 95 ++++++++++++++++++- 5 files changed, 198 insertions(+), 52 deletions(-) diff --git a/apps/block_scout_web/lib/block_scout_web/templates/address/overview.html.eex b/apps/block_scout_web/lib/block_scout_web/templates/address/overview.html.eex index 95155a7e91f8..dfc00f704859 100644 --- a/apps/block_scout_web/lib/block_scout_web/templates/address/overview.html.eex +++ b/apps/block_scout_web/lib/block_scout_web/templates/address/overview.html.eex @@ -124,7 +124,9 @@ <% end %> <%= if @is_proxy do %> - <% {implementation_address_, name} = Implementation.get_implementation(@address.smart_contract) %> + <% {implementation_addresses, implementation_names} = Implementation.get_implementation(@address.smart_contract) %> + <% implementation_address_ = Enum.at(implementation_addresses, 0) %> + <% name = Enum.at(implementation_names, 0) %> <% implementation_address = implementation_address_ || "0x0000000000000000000000000000000000000000" %>
diff --git a/apps/block_scout_web/lib/block_scout_web/views/api/v2/address_view.ex b/apps/block_scout_web/lib/block_scout_web/views/api/v2/address_view.ex index abd555a1cd37..5d131ce7fb27 100644 --- a/apps/block_scout_web/lib/block_scout_web/views/api/v2/address_view.ex +++ b/apps/block_scout_web/lib/block_scout_web/views/api/v2/address_view.ex @@ -100,16 +100,7 @@ defmodule BlockScoutWeb.API.V2.AddressView do {addresses, names} <- Implementation.get_implementation(address_with_smart_contract.smart_contract, @api_true), false <- addresses && Enum.empty?(addresses) do - addresses - |> Enum.zip(names) - |> Enum.reduce([], fn {address, name}, acc -> - with {:ok, address_hash} <- Chain.string_to_address_hash(address), - checksummed_address <- Address.checksum(address_hash) do - [%{"address" => checksummed_address, "name" => name} | acc] - else - _ -> acc - end - end) + Helper.proxy_object_info(addresses, names) else _ -> [] @@ -125,33 +116,41 @@ defmodule BlockScoutWeb.API.V2.AddressView do # todo: added for backward compatibility, remove when frontend unbound from these props {implementation_address, implementation_name} = single_implementation(implementations) - Map.merge(base_info, %{ - "creator_address_hash" => creator_hash && Address.checksum(creator_hash), - "creation_tx_hash" => creation_tx, - "token" => token, - "coin_balance" => balance, - "exchange_rate" => exchange_rate, - # todo: added for backward compatibility, remove when frontend unbound from these props - "implementation_address" => implementation_address, - "implementation_name" => implementation_name, - "implementations" => implementations, - "block_number_balance_updated_at" => address.fetched_coin_balance_block_number, - "has_decompiled_code" => AddressView.has_decompiled_code?(address), - "has_validated_blocks" => Counters.check_if_validated_blocks_at_address(address.hash, @api_true), - "has_logs" => Counters.check_if_logs_at_address(address.hash, @api_true), - "has_tokens" => Counters.check_if_tokens_at_address(address.hash, @api_true), - "has_token_transfers" => Counters.check_if_token_transfers_at_address(address.hash, @api_true), - "watchlist_address_id" => Chain.select_watchlist_address_id(get_watchlist_id(conn), address.hash), - "has_beacon_chain_withdrawals" => Counters.check_if_withdrawals_at_address(address.hash, @api_true) - }) + extended_info = + Map.merge(base_info, %{ + "creator_address_hash" => creator_hash && Address.checksum(creator_hash), + "creation_tx_hash" => creation_tx, + "token" => token, + "coin_balance" => balance, + "exchange_rate" => exchange_rate, + "block_number_balance_updated_at" => address.fetched_coin_balance_block_number, + "has_decompiled_code" => AddressView.has_decompiled_code?(address), + "has_validated_blocks" => Counters.check_if_validated_blocks_at_address(address.hash, @api_true), + "has_logs" => Counters.check_if_logs_at_address(address.hash, @api_true), + "has_tokens" => Counters.check_if_tokens_at_address(address.hash, @api_true), + "has_token_transfers" => Counters.check_if_token_transfers_at_address(address.hash, @api_true), + "watchlist_address_id" => Chain.select_watchlist_address_id(get_watchlist_id(conn), address.hash), + "has_beacon_chain_withdrawals" => Counters.check_if_withdrawals_at_address(address.hash, @api_true) + }) + + if Enum.empty?(implementations) do + extended_info + else + Map.merge(extended_info, %{ + # todo: added for backward compatibility, remove when frontend unbound from these props + "implementation_address" => implementation_address, + "implementation_name" => implementation_name, + "implementations" => implementations + }) + end end defp single_implementation(implementations) do %{"address" => implementation_address, "name" => implementation_name} = - if implementations && !Enum.empty?(implementations) do - implementations |> Enum.at(0) - else + if Enum.empty?(implementations) do %{"address" => nil, "name" => nil} + else + implementations |> Enum.at(0) end {implementation_address, implementation_name} diff --git a/apps/block_scout_web/lib/block_scout_web/views/api/v2/helper.ex b/apps/block_scout_web/lib/block_scout_web/views/api/v2/helper.ex index d2157538a3a5..2c958a5c3900 100644 --- a/apps/block_scout_web/lib/block_scout_web/views/api/v2/helper.ex +++ b/apps/block_scout_web/lib/block_scout_web/views/api/v2/helper.ex @@ -4,7 +4,8 @@ defmodule BlockScoutWeb.API.V2.Helper do """ alias Ecto.Association.NotLoaded - alias Explorer.Chain.Address + alias Explorer.Chain + alias Explorer.Chain.{Address, Hash} alias Explorer.Chain.SmartContract.Proxy.Models.Implementation alias Explorer.Chain.Transaction.History.TransactionStats @@ -55,19 +56,22 @@ defmodule BlockScoutWeb.API.V2.Helper do @spec address_with_info(any(), any()) :: nil | %{optional(<<_::32, _::_*8>>) => any()} def address_with_info(%Address{} = address, _address_hash) do smart_contract? = Address.smart_contract?(address) - implementation_names = if smart_contract?, do: Implementation.names(address), else: [] - formatted_implementation_names = - implementation_names - |> Enum.map(fn name -> - %{"name" => name} - end) + {implementation_address_hashes, implementation_names, implementation_address, implementation_name, + proxy_implementations} = + if smart_contract? do + proxy_implementations = Implementation.get_proxy_implementations(address.hash) - implementation_name = - if Enum.empty?(implementation_names) do - nil + implementation_address_hashes = (proxy_implementations && proxy_implementations.address_hashes) || [] + implementation_names = (proxy_implementations && proxy_implementations.names) || [] + + implementation_address = implementation_address_hashes |> Enum.at(0) + implementation_name = implementation_names |> Enum.at(0) + + {implementation_address_hashes, implementation_names, implementation_address, implementation_name, + proxy_implementations} else - implementation_names |> Enum.at(0) + {[], [], nil, nil, nil} end %{ @@ -75,9 +79,10 @@ defmodule BlockScoutWeb.API.V2.Helper do "is_contract" => smart_contract?, "name" => address_name(address), # todo: added for backward compatibility, remove when frontend unbound from these props + "implementation_address" => implementation_address, "implementation_name" => implementation_name, - "implementations" => formatted_implementation_names, - "is_verified" => verified?(address), + "implementations" => proxy_object_info(implementation_address_hashes, implementation_names), + "is_verified" => verified?(address) || verified_minimal_proxy?(proxy_implementations), "ens_domain_name" => address.ens_domain_name, "metadata" => address.metadata } @@ -104,6 +109,7 @@ defmodule BlockScoutWeb.API.V2.Helper do "is_contract" => false, "name" => nil, # todo: added for backward compatibility, remove when frontend unbound from these props + "implementation_address" => nil, "implementation_name" => nil, "implementations" => [], "is_verified" => nil, @@ -112,6 +118,52 @@ defmodule BlockScoutWeb.API.V2.Helper do } end + @doc """ + Retrieves formatted proxy object based on its implementation addresses and names. + + ## Parameters + + * `implementation_addresses` - A list of implementation addresses for the proxy object. + * `implementation_names` - A list of implementation names for the proxy object. + + ## Returns + + A list of maps containing information about the proxy object. + + """ + @spec proxy_object_info([String.t() | Hash.Address.t()], [String.t() | nil]) :: [map()] + def proxy_object_info([], []), do: [] + + def proxy_object_info(implementation_addresses, implementation_names) do + implementation_addresses + |> Enum.zip(implementation_names) + |> Enum.reduce([], fn {address, name}, acc -> + case address do + %Hash{} = address_hash -> + [%{"address" => Address.checksum(address_hash), "name" => name} | acc] + + _ -> + with {:ok, address_hash} <- Chain.string_to_address_hash(address), + checksummed_address <- Address.checksum(address_hash) do + [%{"address" => checksummed_address, "name" => name} | acc] + else + _ -> acc + end + end + end) + end + + defp minimal_proxy_pattern?(proxy_implementations) do + proxy_implementations.proxy_type == :eip1167 + end + + defp verified_minimal_proxy?(nil), do: false + + defp verified_minimal_proxy?(proxy_implementations) do + (minimal_proxy_pattern?(proxy_implementations) && + Enum.any?(proxy_implementations.names, fn name -> !is_nil(name) end)) || false + end + def address_name(%Address{names: [_ | _] = address_names}) do case Enum.find(address_names, &(&1.primary == true)) do nil -> diff --git a/apps/block_scout_web/test/block_scout_web/controllers/account/api/v2/user_controller_test.exs b/apps/block_scout_web/test/block_scout_web/controllers/account/api/v2/user_controller_test.exs index 4c80ab128dab..a5efaeb98df8 100644 --- a/apps/block_scout_web/test/block_scout_web/controllers/account/api/v2/user_controller_test.exs +++ b/apps/block_scout_web/test/block_scout_web/controllers/account/api/v2/user_controller_test.exs @@ -152,6 +152,7 @@ defmodule BlockScoutWeb.Account.Api.V2.UserControllerTest do "address" => %{ "hash" => Address.checksum(addr), # todo: added for backward compatibility, remove when frontend unbound from these props + "implementation_address" => nil, "implementation_name" => nil, "implementations" => [], "is_contract" => false, @@ -166,7 +167,7 @@ defmodule BlockScoutWeb.Account.Api.V2.UserControllerTest do }} end) - assert Enum.all?(created, fn {addr, map_tag, _} -> + assert Enum.all?(created, fn {addr, map_tag, map} -> response = conn |> get("/api/account/v2/tags/address/#{addr}") @@ -182,7 +183,9 @@ defmodule BlockScoutWeb.Account.Api.V2.UserControllerTest do |> json_response(200) |> Map.get("items") - assert Enum.all?(created, fn {_, _, map} -> map in response end) + assert Enum.all?(created, fn {_, _, map} -> + map in response + end) end test "delete address tag", %{conn: conn} do @@ -208,6 +211,7 @@ defmodule BlockScoutWeb.Account.Api.V2.UserControllerTest do "address" => %{ "hash" => Address.checksum(addr), # todo: added for backward compatibility, remove when frontend unbound from these props + "implementation_address" => nil, "implementation_name" => nil, "implementations" => [], "is_contract" => false, @@ -222,7 +226,7 @@ defmodule BlockScoutWeb.Account.Api.V2.UserControllerTest do }} end) - assert Enum.all?(created, fn {addr, map_tag, _} -> + assert Enum.all?(created, fn {addr, map_tag, map} -> response = conn |> get("/api/account/v2/tags/address/#{addr}") diff --git a/apps/block_scout_web/test/block_scout_web/controllers/api/v2/address_controller_test.exs b/apps/block_scout_web/test/block_scout_web/controllers/api/v2/address_controller_test.exs index 6bdfc7d5e76e..4cbaeff60e2d 100644 --- a/apps/block_scout_web/test/block_scout_web/controllers/api/v2/address_controller_test.exs +++ b/apps/block_scout_web/test/block_scout_web/controllers/api/v2/address_controller_test.exs @@ -62,7 +62,7 @@ defmodule BlockScoutWeb.API.V2.AddressControllerTest do correct_response = %{ "hash" => Address.checksum(address.hash), "is_contract" => false, - "is_verified" => nil, + "is_verified" => false, "name" => nil, "private_tags" => [], "public_tags" => [], @@ -73,8 +73,8 @@ defmodule BlockScoutWeb.API.V2.AddressControllerTest do "coin_balance" => nil, "exchange_rate" => nil, # todo: added for backward compatibility, remove when frontend unbound from these props - "implementation_name" => nil, "implementation_address" => nil, + "implementation_name" => nil, "implementations" => [], "block_number_balance_updated_at" => nil, "has_decompiled_code" => false, @@ -95,7 +95,96 @@ defmodule BlockScoutWeb.API.V2.AddressControllerTest do assert ^correct_response = json_response(request, 200) end - test "get contract info", %{conn: conn} do + test "get EIP-1167 proxy contract info", %{conn: conn} do + implementation_contract = + insert(:smart_contract, + name: "Implementation", + external_libraries: [], + constructor_arguments: "", + abi: [ + %{ + "type" => "constructor", + "inputs" => [ + %{"type" => "address", "name" => "_proxyStorage"}, + %{"type" => "address", "name" => "_implementationAddress"} + ] + }, + %{ + "constant" => false, + "inputs" => [%{"name" => "x", "type" => "uint256"}], + "name" => "set", + "outputs" => [], + "payable" => false, + "stateMutability" => "nonpayable", + "type" => "function" + }, + %{ + "constant" => true, + "inputs" => [], + "name" => "get", + "outputs" => [%{"name" => "", "type" => "uint256"}], + "payable" => false, + "stateMutability" => "view", + "type" => "function" + } + ], + license_type: 9 + ) + + implementation_contract_address_hash_string = + Base.encode16(implementation_contract.address_hash.bytes, case: :lower) + + proxy_tx_input = + "0x11b804ab000000000000000000000000" <> + implementation_contract_address_hash_string <> + "000000000000000000000000000000000000000000000000000000000000006035323031313537360000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000284e159163400000000000000000000000034420c13696f4ac650b9fafe915553a1abcd7dd30000000000000000000000000000000000000000000000000000000000000140000000000000000000000000000000000000000000000000000000000000018000000000000000000000000000000000000000000000000000000000000001c00000000000000000000000000000000000000000000000000000000000000220000000000000000000000000ff5ae9b0a7522736299d797d80b8fc6f31d61100000000000000000000000000ff5ae9b0a7522736299d797d80b8fc6f31d6110000000000000000000000000000000000000000000000000000000000000003e8000000000000000000000000000000000000000000000000000000000000000000000000000000000000000034420c13696f4ac650b9fafe915553a1abcd7dd300000000000000000000000000000000000000000000000000000000000000184f7074696d69736d2053756273637269626572204e465473000000000000000000000000000000000000000000000000000000000000000000000000000000054f504e46540000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000037697066733a2f2f516d66544e504839765651334b5952346d6b52325a6b757756424266456f5a5554545064395538666931503332752f300000000000000000000000000000000000000000000000000000000000000000000000000000000002000000000000000000000000c82bbe41f2cf04e3a8efa18f7032bdd7f6d98a81000000000000000000000000efba8a2a82ec1fb1273806174f5e28fbb917cf9500000000000000000000000000000000000000000000000000000000" + + proxy_deployed_bytecode = + "0x363d3d373d3d3d363d73" <> implementation_contract_address_hash_string <> "5af43d82803e903d91602b57fd5bf3" + + proxy_address = + insert(:contract_address, + contract_code: proxy_deployed_bytecode + ) + + tx = + insert(:transaction, + created_contract_address_hash: proxy_address.hash, + input: proxy_tx_input + ) + |> with_block(status: :ok) + + name = implementation_contract.name + from = Address.checksum(tx.from_address_hash) + tx_hash = to_string(tx.hash) + address_hash = Address.checksum(proxy_address.hash) + + insert(:proxy_implementation, + proxy_address_hash: proxy_address.hash, + proxy_type: "eip1167", + address_hashes: [implementation_contract.address_hash], + names: [name] + ) + + request = get(conn, "/api/v2/addresses/#{Address.checksum(proxy_address.hash)}") + + assert %{ + "hash" => ^address_hash, + "is_contract" => true, + "is_verified" => true, + "private_tags" => [], + "public_tags" => [], + "watchlist_names" => [], + "creator_address_hash" => ^from, + "creation_tx_hash" => ^tx_hash, + "implementation_address" => "0x" <> ^implementation_contract_address_hash_string, + "implementations" => [ + %{"address" => "0x" <> ^implementation_contract_address_hash_string, "name" => ^name} + ] + } = json_response(request, 200) + end + + test "get EIP-1967 proxy contract info", %{conn: conn} do smart_contract = insert(:smart_contract) tx = From 59170c0d812fecd180f45ee40fbe938ab2a8b71e Mon Sep 17 00:00:00 2001 From: Victor Baranov Date: Thu, 6 Jun 2024 11:32:05 +0300 Subject: [PATCH 058/150] chore: Exclude write methods from read tabs (#10111) * Exclude write methods from Read tab * Reverse corresponding test --- apps/explorer/lib/explorer/smart_contract/helper.ex | 3 ++- apps/explorer/test/explorer/smart_contract/helper_test.exs | 4 ++-- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/apps/explorer/lib/explorer/smart_contract/helper.ex b/apps/explorer/lib/explorer/smart_contract/helper.ex index e0451fe035f9..dd74830d16c6 100644 --- a/apps/explorer/lib/explorer/smart_contract/helper.ex +++ b/apps/explorer/lib/explorer/smart_contract/helper.ex @@ -5,6 +5,7 @@ defmodule Explorer.SmartContract.Helper do alias Explorer.{Chain, Helper} alias Explorer.Chain.{Hash, SmartContract} + alias Explorer.SmartContract.Writer alias Phoenix.HTML def queriable_method?(method) do @@ -25,7 +26,7 @@ defmodule Explorer.SmartContract.Helper do def read_with_wallet_method?(function), do: !error?(function) && !event?(function) && !constructor?(function) && - !empty_outputs?(function) + !empty_outputs?(function) && !Writer.write_function?(function) def empty_outputs?(function), do: is_nil(function["outputs"]) || function["outputs"] == [] diff --git a/apps/explorer/test/explorer/smart_contract/helper_test.exs b/apps/explorer/test/explorer/smart_contract/helper_test.exs index 202c0b7edc1f..98f67b99f537 100644 --- a/apps/explorer/test/explorer/smart_contract/helper_test.exs +++ b/apps/explorer/test/explorer/smart_contract/helper_test.exs @@ -126,7 +126,7 @@ defmodule Explorer.SmartContract.HelperTest do end describe "read_with_wallet_method?" do - test "returns payable method with output in the read tab" do + test "doesn't return payable method with output in the read tab" do function = %{ "type" => "function", "stateMutability" => "payable", @@ -135,7 +135,7 @@ defmodule Explorer.SmartContract.HelperTest do "inputs" => [] } - assert Helper.read_with_wallet_method?(function) + refute Helper.read_with_wallet_method?(function) end test "doesn't return payable method with no output in the read tab" do From 646a343ba70528ec24e0af3a12779ff9848357c6 Mon Sep 17 00:00:00 2001 From: Qwerty5Uiop <105209995+Qwerty5Uiop@users.noreply.github.com> Date: Thu, 6 Jun 2024 12:51:07 +0400 Subject: [PATCH 059/150] chore: Refactor PendingTransactionsSanitizer to use batched requests (#10101) --- .../indexer/pending_transactions_sanitizer.ex | 57 +++++++++++-------- 1 file changed, 34 insertions(+), 23 deletions(-) diff --git a/apps/indexer/lib/indexer/pending_transactions_sanitizer.ex b/apps/indexer/lib/indexer/pending_transactions_sanitizer.ex index 552d500aff5f..e1ea48996d18 100644 --- a/apps/indexer/lib/indexer/pending_transactions_sanitizer.ex +++ b/apps/indexer/lib/indexer/pending_transactions_sanitizer.ex @@ -8,7 +8,7 @@ defmodule Indexer.PendingTransactionsSanitizer do require Logger - import EthereumJSONRPC, only: [json_rpc: 2, request: 1] + import EthereumJSONRPC, only: [json_rpc: 2, request: 1, id_to_params: 1] import EthereumJSONRPC.Receipt, only: [to_elixir: 1] alias Ecto.Changeset @@ -64,35 +64,46 @@ defmodule Indexer.PendingTransactionsSanitizer do end defp sanitize_pending_transactions(json_rpc_named_arguments) do + receipts_batch_size = Application.get_env(:indexer, :receipts_batch_size) pending_transactions_list_from_db = Chain.pending_transactions_list() - - pending_transactions_list_from_db - |> Enum.with_index() - |> Enum.each(fn {pending_tx, ind} -> - pending_tx_hash_str = "0x" <> Base.encode16(pending_tx.hash.bytes, case: :lower) - - with {:ok, result} <- - %{id: ind, method: "eth_getTransactionReceipt", params: [pending_tx_hash_str]} - |> request() - |> json_rpc(json_rpc_named_arguments) do - if result do - fetch_block_and_invalidate_wrapper(pending_tx, pending_tx_hash_str, result) - else - Logger.debug( - "Transaction with hash #{pending_tx_hash_str} doesn't exist in the node anymore. We should remove it from Blockscout DB.", - fetcher: :pending_transactions_to_refetch - ) - - fetch_pending_transaction_and_delete(pending_tx) - end - end - end) + id_to_params = id_to_params(pending_transactions_list_from_db) + + with {:ok, responses} <- + id_to_params + |> get_transaction_receipt_requests() + |> Enum.chunk_every(receipts_batch_size) + |> json_rpc(json_rpc_named_arguments) do + Enum.each(responses, fn + %{id: id, result: result} -> + pending_tx = Map.fetch!(id_to_params, id) + + if result do + fetch_block_and_invalidate_wrapper(pending_tx, to_string(pending_tx.hash), result) + else + Logger.debug( + "Transaction with hash #{pending_tx.hash} doesn't exist in the node anymore. We should remove it from Blockscout DB.", + fetcher: :pending_transactions_to_refetch + ) + + fetch_pending_transaction_and_delete(pending_tx) + end + + error -> + Logger.error("Error while fetching pending transaction receipt: #{inspect(error)}") + end) + end Logger.debug("Pending transactions are sanitized", fetcher: :pending_transactions_to_refetch ) end + defp get_transaction_receipt_requests(id_to_params) do + Enum.map(id_to_params, fn {id, transaction} -> + request(%{id: id, method: "eth_getTransactionReceipt", params: [to_string(transaction.hash)]}) + end) + end + defp fetch_block_and_invalidate_wrapper(pending_tx, pending_tx_hash_str, result) do block_hash = Map.get(result, "blockHash") From b5780d33fc9a51403e169a41d25ef73874f0aeba Mon Sep 17 00:00:00 2001 From: Qwerty5Uiop <105209995+Qwerty5Uiop@users.noreply.github.com> Date: Thu, 6 Jun 2024 14:35:23 +0400 Subject: [PATCH 060/150] fix: Add the ability to allow empty traces (#10200) --- .../ethereum_jsonrpc/lib/ethereum_jsonrpc/geth.ex | 15 ++++++++++++++- .../chain/import/runner/internal_transactions.ex | 11 +++++++++-- config/runtime.exs | 1 + docker-compose/envs/common-blockscout.env | 1 + 4 files changed, 25 insertions(+), 3 deletions(-) diff --git a/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/geth.ex b/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/geth.ex index a93bce430708..63d53bda9614 100644 --- a/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/geth.ex +++ b/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/geth.ex @@ -389,6 +389,7 @@ defmodule EthereumJSONRPC.Geth do [Map.put(last, "error", "execution stopped") | acc] end + # credo:disable-for-next-line /Complexity/ defp parse_call_tracer_calls({%{"type" => upcase_type, "from" => from} = call, index}, acc, trace_address, inner?) do case String.downcase(upcase_type) do type when type in ~w(call callcode delegatecall staticcall create create2 selfdestruct revert stop invalid) -> @@ -427,8 +428,12 @@ defmodule EthereumJSONRPC.Geth do if(inner?, do: new_trace_address, else: []) ) + "" -> + unless allow_empty_traces?(), do: log_unknown_type(call) + acc + _unknown_type -> - Logger.warning("Call from a callTracer with an unknown type: #{inspect(call)}") + log_unknown_type(call) acc end end @@ -439,6 +444,10 @@ defmodule EthereumJSONRPC.Geth do |> Enum.reduce(acc, &parse_call_tracer_calls(&1, &2, trace_address)) end + defp log_unknown_type(call) do + Logger.warning("Call from a callTracer with an unknown type: #{inspect(call)}") + end + @spec reduce_internal_transactions_params(list()) :: {:ok, list()} | {:error, list()} def reduce_internal_transactions_params(internal_transactions_params) when is_list(internal_transactions_params) do internal_transactions_params @@ -475,4 +484,8 @@ defmodule EthereumJSONRPC.Geth do defp tracer_type do Application.get_env(:ethereum_jsonrpc, __MODULE__)[:tracer] end + + defp allow_empty_traces? do + Application.get_env(:ethereum_jsonrpc, __MODULE__)[:allow_empty_traces?] + end end diff --git a/apps/explorer/lib/explorer/chain/import/runner/internal_transactions.ex b/apps/explorer/lib/explorer/chain/import/runner/internal_transactions.ex index 5fcd7024283d..d3a29f2111d5 100644 --- a/apps/explorer/lib/explorer/chain/import/runner/internal_transactions.ex +++ b/apps/explorer/lib/explorer/chain/import/runner/internal_transactions.ex @@ -333,7 +333,8 @@ defmodule Explorer.Chain.Import.Runner.InternalTransactions do # common_tuples = MapSet.intersection(required_tuples, candidate_tuples) #should be added # |> MapSet.difference(internal_transactions_tuples) should be replaced with |> MapSet.difference(common_tuples) - # Note: for zetachain, the case "# - there are no internal txs for some transactions" is removed since + # Note: for zetachain or if empty traces are explicitly allowed, + # the case "# - there are no internal txs for some transactions" is removed since # there are may be non-traceable transactions transactions_tuples = MapSet.new(transactions, &{&1.hash, &1.block_number}) @@ -343,7 +344,7 @@ defmodule Explorer.Chain.Import.Runner.InternalTransactions do all_tuples = MapSet.union(transactions_tuples, internal_transactions_tuples) invalid_block_numbers = - if Application.get_env(:explorer, :chain_type) == :zetachain do + if allow_non_traceable_transactions?() do Enum.reduce(internal_transactions_tuples, [], fn {transaction_hash, block_number}, acc -> # credo:disable-for-next-line case Enum.find(transactions_tuples, fn {t_hash, _block_number} -> t_hash == transaction_hash end) do @@ -362,6 +363,12 @@ defmodule Explorer.Chain.Import.Runner.InternalTransactions do {:ok, invalid_block_numbers} end + defp allow_non_traceable_transactions? do + Application.get_env(:explorer, :chain_type) == :zetachain or + (Application.get_env(:explorer, :json_rpc_named_arguments)[:variant] == EthereumJSONRPC.Geth and + Application.get_env(:ethereum_jsonrpc, EthereumJSONRPC.Geth)[:allow_empty_traces?]) + end + defp valid_internal_transactions(transactions, internal_transactions_params, invalid_block_numbers) do if Enum.empty?(transactions) do {:ok, []} diff --git a/config/runtime.exs b/config/runtime.exs index c9490d7bccd8..72c02fb50b18 100644 --- a/config/runtime.exs +++ b/config/runtime.exs @@ -186,6 +186,7 @@ config :ethereum_jsonrpc, EthereumJSONRPC.HTTP, config :ethereum_jsonrpc, EthereumJSONRPC.Geth, block_traceable?: ConfigHelper.parse_bool_env_var("ETHEREUM_JSONRPC_GETH_TRACE_BY_BLOCK"), + allow_empty_traces?: ConfigHelper.parse_bool_env_var("ETHEREUM_JSONRPC_GETH_ALLOW_EMPTY_TRACES"), debug_trace_timeout: System.get_env("ETHEREUM_JSONRPC_DEBUG_TRACE_TRANSACTION_TIMEOUT", "5s"), tracer: if(ConfigHelper.chain_type() == :polygon_edge, diff --git a/docker-compose/envs/common-blockscout.env b/docker-compose/envs/common-blockscout.env index 047c110f4037..08a2e64ba0e6 100644 --- a/docker-compose/envs/common-blockscout.env +++ b/docker-compose/envs/common-blockscout.env @@ -22,6 +22,7 @@ ETHEREUM_JSONRPC_DISABLE_ARCHIVE_BALANCES=false # ETHEREUM_JSONRPC_HTTP_HEADERS= # ETHEREUM_JSONRPC_WAIT_PER_TIMEOUT= # ETHEREUM_JSONRPC_GETH_TRACE_BY_BLOCK= +# ETHEREUM_JSONRPC_GETH_ALLOW_EMPTY_TRACES= IPC_PATH= NETWORK_PATH=/ BLOCKSCOUT_HOST= From d9586c233f798f93b94fc416e93ef94c3521c4bf Mon Sep 17 00:00:00 2001 From: Viktor Baranov Date: Thu, 6 Jun 2024 15:14:32 +0300 Subject: [PATCH 061/150] fix: Resolve flaky address_controller test for web --- .../controllers/api/v2/address_controller_test.exs | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/apps/block_scout_web/test/block_scout_web/controllers/api/v2/address_controller_test.exs b/apps/block_scout_web/test/block_scout_web/controllers/api/v2/address_controller_test.exs index 4cbaeff60e2d..e727e6af7e2b 100644 --- a/apps/block_scout_web/test/block_scout_web/controllers/api/v2/address_controller_test.exs +++ b/apps/block_scout_web/test/block_scout_web/controllers/api/v2/address_controller_test.exs @@ -159,6 +159,11 @@ defmodule BlockScoutWeb.API.V2.AddressControllerTest do tx_hash = to_string(tx.hash) address_hash = Address.checksum(proxy_address.hash) + {:ok, implementation_contract_address_hash} = + Chain.string_to_address_hash("0x" <> implementation_contract_address_hash_string) + + checksummed_implementation_contract_address_hash = Address.checksum(implementation_contract_address_hash) + insert(:proxy_implementation, proxy_address_hash: proxy_address.hash, proxy_type: "eip1167", @@ -177,9 +182,9 @@ defmodule BlockScoutWeb.API.V2.AddressControllerTest do "watchlist_names" => [], "creator_address_hash" => ^from, "creation_tx_hash" => ^tx_hash, - "implementation_address" => "0x" <> ^implementation_contract_address_hash_string, + "implementation_address" => ^checksummed_implementation_contract_address_hash, "implementations" => [ - %{"address" => "0x" <> ^implementation_contract_address_hash_string, "name" => ^name} + %{"address" => ^checksummed_implementation_contract_address_hash, "name" => ^name} ] } = json_response(request, 200) end From 3e77a354ba924140b14bb6e18477731f53abcb79 Mon Sep 17 00:00:00 2001 From: nikitosing <32202610+nikitosing@users.noreply.github.com> Date: Thu, 6 Jun 2024 15:20:14 +0300 Subject: [PATCH 062/150] feat: Add Fee column to Internal transactions CSV export (#10204) * feat: Add Fee column to Internal transactions CSV export * Fix tests --- ...dress_internal_transaction_csv_exporter.ex | 16 +++++++++++--- .../lib/explorer/chain/transaction.ex | 21 ++++++++++++------- ...internal_transaction_csv_exporter_test.exs | 12 ++++++++++- 3 files changed, 38 insertions(+), 11 deletions(-) diff --git a/apps/explorer/lib/explorer/chain/csv_export/address_internal_transaction_csv_exporter.ex b/apps/explorer/lib/explorer/chain/csv_export/address_internal_transaction_csv_exporter.ex index de4bcad24d12..855abf7de969 100644 --- a/apps/explorer/lib/explorer/chain/csv_export/address_internal_transaction_csv_exporter.ex +++ b/apps/explorer/lib/explorer/chain/csv_export/address_internal_transaction_csv_exporter.ex @@ -4,7 +4,7 @@ defmodule Explorer.Chain.CSVExport.AddressInternalTransactionCsvExporter do """ alias Explorer.{Chain, PagingOptions} - alias Explorer.Chain.{Address, Hash, Wei} + alias Explorer.Chain.{Address, Hash, Transaction, Wei} alias Explorer.Chain.CSVExport.Helper @paging_options %PagingOptions{page_size: Helper.limit()} @@ -34,6 +34,9 @@ defmodule Explorer.Chain.CSVExport.AddressInternalTransactionCsvExporter do |> Keyword.put(:paging_options, paging_options) |> Keyword.put(:from_block, from_block) |> Keyword.put(:to_block, to_block) + |> Keyword.put(:necessity_by_association, %{ + :transaction => :optional + }) |> (&if(Helper.valid_filter?(filter_type, filter_value, "internal_transactions"), do: &1 |> Keyword.put(:direction, String.to_atom(filter_value)), else: &1 @@ -61,12 +64,18 @@ defmodule Explorer.Chain.CSVExport.AddressInternalTransactionCsvExporter do "Value", "Input", "Output", - "ErrCode" + "ErrCode", + "Fee" ] internal_transaction_lists = internal_transactions |> Stream.map(fn internal_transaction -> + gas_price = + internal_transaction.transaction && + (internal_transaction.transaction.gas_price || + Transaction.effective_gas_price(internal_transaction.transaction, internal_transaction.block)) + [ to_string(internal_transaction.transaction_hash), internal_transaction.index, @@ -85,7 +94,8 @@ defmodule Explorer.Chain.CSVExport.AddressInternalTransactionCsvExporter do Wei.to(internal_transaction.value, :wei), internal_transaction.input, internal_transaction.output, - internal_transaction.error + internal_transaction.error, + gas_price && gas_price |> Wei.mult(internal_transaction.gas_used) |> Wei.to(:wei) ] end) diff --git a/apps/explorer/lib/explorer/chain/transaction.ex b/apps/explorer/lib/explorer/chain/transaction.ex index 70355c66b8a7..e06ba545b3cb 100644 --- a/apps/explorer/lib/explorer/chain/transaction.ex +++ b/apps/explorer/lib/explorer/chain/transaction.ex @@ -266,6 +266,7 @@ defmodule Explorer.Chain.Transaction do alias Explorer.{Chain, PagingOptions, Repo, SortingHelper} alias Explorer.Chain.{ + Block, Block.Reward, ContractMethod, Data, @@ -1795,17 +1796,23 @@ defmodule Explorer.Chain.Transaction do end @doc """ - Calculates effective gas price for transaction with type 2 (EIP-1559) - - `effective_gas_price = priority_fee_per_gas + block.base_fee_per_gas` + Wrapper around `effective_gas_price/2` """ @spec effective_gas_price(Transaction.t()) :: Wei.t() | nil + def effective_gas_price(%Transaction{} = transaction), do: effective_gas_price(transaction, transaction.block) + + @doc """ + Calculates effective gas price for transaction with type 2 (EIP-1559) + + `effective_gas_price = priority_fee_per_gas + block.base_fee_per_gas` + """ + @spec effective_gas_price(Transaction.t(), Block.t()) :: Wei.t() | nil - def effective_gas_price(%Transaction{block: nil}), do: nil - def effective_gas_price(%Transaction{block: %NotLoaded{}}), do: nil + def effective_gas_price(%Transaction{}, %NotLoaded{}), do: nil + def effective_gas_price(%Transaction{}, nil), do: nil - def effective_gas_price(%Transaction{} = transaction) do - base_fee_per_gas = transaction.block.base_fee_per_gas + def effective_gas_price(%Transaction{} = transaction, block) do + base_fee_per_gas = block.base_fee_per_gas max_priority_fee_per_gas = transaction.max_priority_fee_per_gas max_fee_per_gas = transaction.max_fee_per_gas diff --git a/apps/explorer/test/explorer/chain/csv_export/address_internal_transaction_csv_exporter_test.exs b/apps/explorer/test/explorer/chain/csv_export/address_internal_transaction_csv_exporter_test.exs index a9687dd0aae5..ac644a081382 100644 --- a/apps/explorer/test/explorer/chain/csv_export/address_internal_transaction_csv_exporter_test.exs +++ b/apps/explorer/test/explorer/chain/csv_export/address_internal_transaction_csv_exporter_test.exs @@ -71,6 +71,8 @@ defmodule Explorer.Chain.CSVExport.AddressInternalTransactionCsvExporterTest do [[], output], _, [[], error], + _, + [[], fee], _ ] -> %{ @@ -91,7 +93,8 @@ defmodule Explorer.Chain.CSVExport.AddressInternalTransactionCsvExporterTest do value: value, input: input, output: output, - error: error + error: error, + fee: fee } end) @@ -113,6 +116,13 @@ defmodule Explorer.Chain.CSVExport.AddressInternalTransactionCsvExporterTest do assert result.input == to_string(internal_transaction.input) assert result.output == to_string(internal_transaction.output) assert result.error == to_string(internal_transaction.error) + + assert result.fee == + to_string( + internal_transaction.transaction.gas_price + |> Wei.mult(internal_transaction.gas_used) + |> Wei.to(:wei) + ) end test "fetches all internal transactions" do From c77180c3accbd655ce4372048d63b17ff3639043 Mon Sep 17 00:00:00 2001 From: Viktor Baranov Date: Thu, 6 Jun 2024 15:45:48 +0300 Subject: [PATCH 063/150] Fix flaky test --- .../lib/block_scout_web/views/api/v2/helper.ex | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/apps/block_scout_web/lib/block_scout_web/views/api/v2/helper.ex b/apps/block_scout_web/lib/block_scout_web/views/api/v2/helper.ex index 2c958a5c3900..15adc47e935e 100644 --- a/apps/block_scout_web/lib/block_scout_web/views/api/v2/helper.ex +++ b/apps/block_scout_web/lib/block_scout_web/views/api/v2/helper.ex @@ -65,7 +65,12 @@ defmodule BlockScoutWeb.API.V2.Helper do implementation_address_hashes = (proxy_implementations && proxy_implementations.address_hashes) || [] implementation_names = (proxy_implementations && proxy_implementations.names) || [] - implementation_address = implementation_address_hashes |> Enum.at(0) + implementation_address = + (Enum.count(implementation_address_hashes) > 0 && + implementation_address_hashes + |> Enum.at(0) + |> Address.checksum()) || nil + implementation_name = implementation_names |> Enum.at(0) {implementation_address_hashes, implementation_names, implementation_address, implementation_name, From 80a8e3b4641cccf2d4b9d9d23c78b4430c8203a9 Mon Sep 17 00:00:00 2001 From: nikitosing <32202610+nikitosing@users.noreply.github.com> Date: Thu, 6 Jun 2024 17:56:20 +0300 Subject: [PATCH 064/150] fix: Filter WETH transfers in indexer + migration to delete historical incorrect WETH transfers (#10134) --- apps/explorer/config/config.exs | 1 + apps/explorer/config/runtime/test.exs | 1 + apps/explorer/lib/explorer/application.ex | 1 + .../lib/explorer/chain/token_transfer.ex | 12 ++ ...sanitize_incorrect_weth_token_transfers.ex | 145 ++++++++++++++++ ...ze_incorrect_weth_token_transfers_test.exs | 161 ++++++++++++++++++ .../lib/indexer/transform/token_transfers.ex | 33 +++- .../transform/token_transfers_test.exs | 153 +++++++++++++++++ config/runtime.exs | 7 + 9 files changed, 512 insertions(+), 2 deletions(-) create mode 100644 apps/explorer/lib/explorer/migrator/sanitize_incorrect_weth_token_transfers.ex create mode 100644 apps/explorer/test/explorer/migrator/sanitize_incorrect_weth_token_transfers_test.exs diff --git a/apps/explorer/config/config.exs b/apps/explorer/config/config.exs index 78aded683b3d..88e78975d152 100644 --- a/apps/explorer/config/config.exs +++ b/apps/explorer/config/config.exs @@ -122,6 +122,7 @@ config :explorer, Explorer.Migrator.AddressTokenBalanceTokenType, enabled: true config :explorer, Explorer.Migrator.SanitizeMissingBlockRanges, enabled: true config :explorer, Explorer.Migrator.SanitizeIncorrectNFTTokenTransfers, enabled: true config :explorer, Explorer.Migrator.TokenTransferTokenType, enabled: true +config :explorer, Explorer.Migrator.SanitizeIncorrectWETHTokenTransfers, enabled: true config :explorer, Explorer.Chain.Fetcher.CheckBytecodeMatchingOnDemand, enabled: true diff --git a/apps/explorer/config/runtime/test.exs b/apps/explorer/config/runtime/test.exs index d9cfc1a821b6..368874b21903 100644 --- a/apps/explorer/config/runtime/test.exs +++ b/apps/explorer/config/runtime/test.exs @@ -44,6 +44,7 @@ config :explorer, Explorer.Migrator.AddressTokenBalanceTokenType, enabled: false config :explorer, Explorer.Migrator.SanitizeMissingBlockRanges, enabled: false config :explorer, Explorer.Migrator.SanitizeIncorrectNFTTokenTransfers, enabled: false config :explorer, Explorer.Migrator.TokenTransferTokenType, enabled: false +config :explorer, Explorer.Migrator.SanitizeIncorrectWETHTokenTransfers, enabled: false config :explorer, realtime_events_sender: Explorer.Chain.Events.SimpleSender diff --git a/apps/explorer/lib/explorer/application.ex b/apps/explorer/lib/explorer/application.ex index 9fa12bb7297a..e2ffa55fedff 100644 --- a/apps/explorer/lib/explorer/application.ex +++ b/apps/explorer/lib/explorer/application.ex @@ -137,6 +137,7 @@ defmodule Explorer.Application do configure(Explorer.Migrator.SanitizeMissingBlockRanges), configure(Explorer.Migrator.SanitizeIncorrectNFTTokenTransfers), configure(Explorer.Migrator.TokenTransferTokenType), + configure(Explorer.Migrator.SanitizeIncorrectWETHTokenTransfers), configure_chain_type_dependent_process(Explorer.Chain.Cache.StabilityValidatorsCounters, :stability) ] |> List.flatten() diff --git a/apps/explorer/lib/explorer/chain/token_transfer.ex b/apps/explorer/lib/explorer/chain/token_transfer.ex index d0a33a1d304b..69f309410d29 100644 --- a/apps/explorer/lib/explorer/chain/token_transfer.ex +++ b/apps/explorer/lib/explorer/chain/token_transfer.ex @@ -528,4 +528,16 @@ defmodule Explorer.Chain.TokenTransfer do defp logs_to_token_transfers_query(query, []) do query end + + @doc """ + Checks if `WHITELISTED_WETH_CONTRACTS` env contains provided address hash. + WHITELISTED_WETH_CONTRACTS env is the list of whitelisted WETH contracts addresses. + """ + @spec whitelisted_weth_contract?(any()) :: boolean() + def whitelisted_weth_contract?(contract_address_hash), + do: + (contract_address_hash |> to_string() |> String.downcase()) in Application.get_env( + :explorer, + Explorer.Chain.TokenTransfer + )[:whitelisted_weth_contracts] end diff --git a/apps/explorer/lib/explorer/migrator/sanitize_incorrect_weth_token_transfers.ex b/apps/explorer/lib/explorer/migrator/sanitize_incorrect_weth_token_transfers.ex new file mode 100644 index 000000000000..5759f913368e --- /dev/null +++ b/apps/explorer/lib/explorer/migrator/sanitize_incorrect_weth_token_transfers.ex @@ -0,0 +1,145 @@ +defmodule Explorer.Migrator.SanitizeIncorrectWETHTokenTransfers do + @moduledoc """ + This migrator will delete all incorrect WETH token transfers. As incorrect we consider: + - WETH withdrawals and WETH deposits emitted by tokens which are not in `WHITELISTED_WETH_CONTRACTS` env + - WETH withdrawal or WETH deposit which has sibling token transfer within the same block and transaction, with the same amount, same from and to addresses, same token contract addresses. (We consider such pairs as duplicates) + """ + + use GenServer, restart: :transient + + import Ecto.Query + + require Logger + + alias Explorer.Chain.{Log, TokenTransfer} + alias Explorer.Migrator.MigrationStatus + alias Explorer.Repo + + @migration_name "sanitize_incorrect_weth_transfers" + @default_batch_size 500 + + def start_link(_) do + GenServer.start_link(__MODULE__, :ok, name: __MODULE__) + end + + @impl true + def init(_) do + case MigrationStatus.get_status(@migration_name) do + "completed" -> + :ignore + + _ -> + MigrationStatus.set_status(@migration_name, "started") + schedule_batch_migration() + {:ok, %{step: :delete_not_whitelisted_weth_transfers}} + end + end + + @impl true + def handle_info(:migrate_batch, %{step: step} = state) do + case last_unprocessed_identifiers(step) do + [] -> + case step do + :delete_not_whitelisted_weth_transfers -> + Logger.info( + "SanitizeIncorrectWETHTokenTransfers deletion of not whitelisted weth transfers finished, continuing with duplicates deletion" + ) + + schedule_batch_migration() + {:noreply, %{step: :delete_duplicates}} + + :delete_duplicates -> + Logger.info("SanitizeIncorrectWETHTokenTransfers migration finished") + MigrationStatus.set_status(@migration_name, "completed") + {:stop, :normal, state} + end + + identifiers -> + identifiers + |> Enum.chunk_every(batch_size()) + |> Enum.map(&run_task/1) + |> Task.await_many(:infinity) + + schedule_batch_migration() + + {:noreply, state} + end + end + + defp last_unprocessed_identifiers(step) do + limit = batch_size() * concurrency() + + step + |> unprocessed_identifiers() + |> limit(^limit) + |> Repo.all(timeout: :infinity) + end + + defp unprocessed_identifiers(:delete_duplicates) do + weth_transfers = + from( + tt in TokenTransfer, + left_join: l in Log, + on: tt.block_hash == l.block_hash and tt.transaction_hash == l.transaction_hash and tt.log_index == l.index, + where: + l.first_topic == ^TokenTransfer.weth_deposit_signature() or + l.first_topic == ^TokenTransfer.weth_withdrawal_signature() + ) + + from( + weth_tt in subquery(weth_transfers), + inner_join: tt in TokenTransfer, + on: weth_tt.block_hash == tt.block_hash and weth_tt.transaction_hash == tt.transaction_hash, + where: + weth_tt.log_index != tt.log_index and weth_tt.token_contract_address_hash == tt.token_contract_address_hash and + weth_tt.to_address_hash == tt.to_address_hash and weth_tt.from_address_hash == tt.from_address_hash and + weth_tt.amount == tt.amount, + select: {weth_tt.transaction_hash, weth_tt.block_hash, weth_tt.log_index} + ) + end + + defp unprocessed_identifiers(:delete_not_whitelisted_weth_transfers) do + from( + tt in TokenTransfer, + left_join: l in Log, + on: tt.block_hash == l.block_hash and tt.transaction_hash == l.transaction_hash and tt.log_index == l.index, + where: + (l.first_topic == ^TokenTransfer.weth_deposit_signature() or + l.first_topic == ^TokenTransfer.weth_withdrawal_signature()) and + tt.token_contract_address_hash not in ^Application.get_env(:explorer, Explorer.Chain.TokenTransfer)[ + :whitelisted_weth_contracts + ], + select: {tt.transaction_hash, tt.block_hash, tt.log_index} + ) + end + + defp run_task(batch), do: Task.async(fn -> handle_batch(batch) end) + + defp handle_batch(token_transfer_ids) do + token_transfer_ids + |> build_delete_query() + |> Repo.query!([], timeout: :infinity) + end + + defp schedule_batch_migration do + Process.send(self(), :migrate_batch, []) + end + + defp batch_size do + Application.get_env(:explorer, __MODULE__)[:batch_size] || @default_batch_size + end + + defp concurrency do + default = 4 * System.schedulers_online() + + Application.get_env(:explorer, __MODULE__)[:concurrency] || default + end + + defp build_delete_query(token_transfer_ids) do + """ + DELETE + FROM token_transfers tt + WHERE (tt.transaction_hash, tt.block_hash, tt.log_index) IN #{TokenTransfer.encode_token_transfer_ids(token_transfer_ids)} + """ + end +end diff --git a/apps/explorer/test/explorer/migrator/sanitize_incorrect_weth_token_transfers_test.exs b/apps/explorer/test/explorer/migrator/sanitize_incorrect_weth_token_transfers_test.exs new file mode 100644 index 000000000000..834c3d8054ee --- /dev/null +++ b/apps/explorer/test/explorer/migrator/sanitize_incorrect_weth_token_transfers_test.exs @@ -0,0 +1,161 @@ +defmodule Explorer.Migrator.SanitizeIncorrectWETHTokenTransfersTest do + use Explorer.DataCase, async: false + + alias Explorer.Chain.TokenTransfer + alias Explorer.Migrator.{SanitizeIncorrectWETHTokenTransfers, MigrationStatus} + alias Explorer.Repo + + describe "SanitizeIncorrectWETHTokenTransfers" do + test "Deletes not whitelisted WETH transfers and duplicated WETH transfers" do + %{contract_address: token_address} = insert(:token, type: "ERC-20") + block = insert(:block, consensus: true) + burn_address = insert(:address, hash: "0x0000000000000000000000000000000000000000") + + insert(:token_transfer, + from_address: insert(:address), + block: block, + block_number: block.number, + token_contract_address: token_address, + token_ids: nil + ) + + deposit_log = insert(:log, first_topic: TokenTransfer.weth_deposit_signature()) + + insert(:token_transfer, + from_address: insert(:address), + token_contract_address: token_address, + block: deposit_log.block, + transaction: deposit_log.transaction, + log_index: deposit_log.index + ) + + withdrawal_log = insert(:log, first_topic: TokenTransfer.weth_withdrawal_signature()) + + insert(:token_transfer, + from_address: insert(:address), + token_contract_address: token_address, + block: withdrawal_log.block, + transaction: withdrawal_log.transaction, + log_index: withdrawal_log.index + ) + + %{contract_address: whitelisted_token_address} = insert(:token, type: "ERC-20") + + env = Application.get_env(:explorer, Explorer.Chain.TokenTransfer) + + Application.put_env( + :explorer, + Explorer.Chain.TokenTransfer, + Keyword.put(env, :whitelisted_weth_contracts, [whitelisted_token_address |> to_string() |> String.downcase()]) + ) + + withdrawal_log = insert(:log, first_topic: TokenTransfer.weth_withdrawal_signature()) + + insert(:token_transfer, + from_address: insert(:address), + token_contract_address: whitelisted_token_address, + block: withdrawal_log.block, + transaction: withdrawal_log.transaction, + log_index: withdrawal_log.index + ) + + deposit_log = insert(:log, first_topic: TokenTransfer.weth_deposit_signature()) + + insert(:token_transfer, + from_address: insert(:address), + token_contract_address: whitelisted_token_address, + block: deposit_log.block, + transaction: deposit_log.transaction, + log_index: deposit_log.index + ) + + withdrawal_log_duplicate = insert(:log, first_topic: TokenTransfer.weth_withdrawal_signature()) + + tt_withdrawal = + insert(:token_transfer, + from_address: burn_address, + token_contract_address: whitelisted_token_address, + block: withdrawal_log_duplicate.block, + transaction: withdrawal_log_duplicate.transaction, + log_index: withdrawal_log_duplicate.index + ) + + insert(:token_transfer, + from_address: burn_address, + to_address: tt_withdrawal.to_address, + token_contract_address: whitelisted_token_address, + block: withdrawal_log_duplicate.block, + transaction: withdrawal_log_duplicate.transaction, + log_index: withdrawal_log_duplicate.index + 1, + amount: tt_withdrawal.amount + ) + + deposit_log_duplicate = insert(:log, first_topic: TokenTransfer.weth_deposit_signature()) + + tt_deposit = + insert(:token_transfer, + to_address: burn_address, + token_contract_address: whitelisted_token_address, + block: deposit_log_duplicate.block, + transaction: deposit_log_duplicate.transaction, + log_index: deposit_log_duplicate.index + ) + + insert(:token_transfer, + from_address: tt_deposit.from_address, + to_address: burn_address, + token_contract_address: whitelisted_token_address, + block: deposit_log_duplicate.block, + transaction: deposit_log_duplicate.transaction, + log_index: deposit_log_duplicate.index + 1, + amount: tt_deposit.amount + ) + + assert MigrationStatus.get_status("sanitize_incorrect_weth_transfers") == nil + + Application.put_env(:explorer, Explorer.Migrator.SanitizeIncorrectWETHTokenTransfers, + batch_size: 1, + concurrency: 1 + ) + + SanitizeIncorrectWETHTokenTransfers.start_link([]) + Process.sleep(100) + + assert MigrationStatus.get_status("sanitize_incorrect_weth_transfers") == "completed" + + token_address_hash = token_address.hash + whitelisted_token_address_hash = whitelisted_token_address.hash + + assert [ + %{token_contract_address_hash: ^token_address_hash}, + %{token_contract_address_hash: ^whitelisted_token_address_hash}, + %{token_contract_address_hash: ^whitelisted_token_address_hash}, + %{token_contract_address_hash: ^whitelisted_token_address_hash}, + %{token_contract_address_hash: ^whitelisted_token_address_hash} + ] = transfers = Repo.all(TokenTransfer) + + withdrawal = Enum.at(transfers, 1) + deposit = Enum.at(transfers, 2) + assert withdrawal.block_hash == withdrawal_log.block_hash + assert withdrawal.transaction_hash == withdrawal_log.transaction_hash + assert withdrawal.log_index == withdrawal_log.index + + assert deposit.block_hash == deposit_log.block_hash + assert deposit.transaction_hash == deposit_log.transaction_hash + assert deposit.log_index == deposit_log.index + + withdrawal_analogue = Enum.at(transfers, 3) + deposit_analogue = Enum.at(transfers, 4) + + assert withdrawal_analogue.block_hash == withdrawal_log_duplicate.block_hash + assert withdrawal_analogue.transaction_hash == withdrawal_log_duplicate.transaction_hash + assert withdrawal_analogue.log_index == withdrawal_log_duplicate.index + 1 + + assert deposit_analogue.block_hash == deposit_log_duplicate.block_hash + assert deposit_analogue.transaction_hash == deposit_log_duplicate.transaction_hash + assert deposit_analogue.log_index == deposit_log_duplicate.index + 1 + + Application.put_env(:explorer, Explorer.Chain.TokenTransfer, env) + end + end +end diff --git a/apps/indexer/lib/indexer/transform/token_transfers.ex b/apps/indexer/lib/indexer/transform/token_transfers.ex index 2ffd90abd1f5..761323d1a4b5 100644 --- a/apps/indexer/lib/indexer/transform/token_transfers.ex +++ b/apps/indexer/lib/indexer/transform/token_transfers.ex @@ -25,10 +25,12 @@ defmodule Indexer.Transform.TokenTransfers do weth_transfers = logs |> Enum.filter(fn log -> - log.first_topic == TokenTransfer.weth_deposit_signature() || - log.first_topic == TokenTransfer.weth_withdrawal_signature() + (log.first_topic == TokenTransfer.weth_deposit_signature() || + log.first_topic == TokenTransfer.weth_withdrawal_signature()) && + TokenTransfer.whitelisted_weth_contract?(log.address_hash) end) |> Enum.reduce(initial_acc, &do_parse/2) + |> drop_repeated_token_transfers(erc20_and_erc721_token_transfers.token_transfers) erc1155_token_transfers = logs @@ -80,6 +82,33 @@ defmodule Indexer.Transform.TokenTransfers do token_transfers_from_logs_uniq end + defp drop_repeated_token_transfers(weth_acc, erc_20_721_token_transfers) do + key_from_tt = fn tt -> + {tt.block_hash, tt.transaction_hash, tt.token_contract_address_hash, tt.to_address_hash, tt.from_address_hash, + tt.amount} + end + + deposit_withdrawal_like_transfers = + Enum.reduce(erc_20_721_token_transfers, %{}, fn token_transfer, acc -> + if token_transfer.token_type == "ERC-20" and + (token_transfer.from_address_hash == burn_address_hash_string() or + token_transfer.to_address_hash == burn_address_hash_string()) do + Map.put(acc, key_from_tt.(token_transfer), true) + else + acc + end + end) + + %{token_transfers: weth_token_transfer} = weth_acc + + weth_token_transfer_updated = + Enum.reject(weth_token_transfer, fn weth_tt -> + deposit_withdrawal_like_transfers[key_from_tt.(weth_tt)] + end) + + Map.put(weth_acc, :token_transfers, weth_token_transfer_updated) + end + defp sanitize_weth_transfers(total_tokens, total_transfers, weth_transfers) do existing_token_types_map = total_tokens diff --git a/apps/indexer/test/indexer/transform/token_transfers_test.exs b/apps/indexer/test/indexer/transform/token_transfers_test.exs index f8be8358ab9d..f25c207681bc 100644 --- a/apps/indexer/test/indexer/transform/token_transfers_test.exs +++ b/apps/indexer/test/indexer/transform/token_transfers_test.exs @@ -137,7 +137,19 @@ defmodule Indexer.Transform.TokenTransfersTest do ] } + env = Application.get_env(:explorer, Explorer.Chain.TokenTransfer) + + Application.put_env( + :explorer, + Explorer.Chain.TokenTransfer, + Keyword.put(env, :whitelisted_weth_contracts, [ + weth_deposit_log.address_hash |> to_string() |> String.downcase() + ]) + ) + assert TokenTransfers.parse(logs) == expected + + Application.put_env(:explorer, Explorer.Chain.TokenTransfer, env) end test "parses ERC-721 transfer with addresses in data field" do @@ -435,6 +447,147 @@ defmodule Indexer.Transform.TokenTransfersTest do ] } end + + test "Filters WETH transfers from not whitelisted tokens" do + logs = [ + %{ + address_hash: "0x0BE9e53fd7EDaC9F859882AfdDa116645287C629", + block_number: 23_704_638, + block_hash: "0x8f61c99b0dd1196714ffda5bf979a282e6a62fdd3cff25c291284e6b57de2106", + data: "0x00000000000000000000000000000000000000000000002be19edfcf6b480000", + first_topic: "0xe1fffcc4923d04b559f4d29a8bfc6cda04eb5b0d3c460751c2402c5c5cc9109c", + second_topic: "0x000000000000000000000000fb76e9e7d88e308ab530330ed90e84a952570319", + third_topic: nil, + fourth_topic: nil, + index: 1, + transaction_hash: "0x185889bc91372106ecf114a4e23f4ee615e131ae3e698078bd5d2ed7e3f55a49" + }, + %{ + address_hash: "0x0BE9e53fd7EDaC9F859882AfdDa116645287C629", + block_number: 23_704_608, + block_hash: "0x5a5e69984f78d65fc6d92e18058d21a9b114f1d56d06ca7aa017b3d87bf0491a", + data: "0x00000000000000000000000000000000000000000000000000e1315e1ebd28e8", + first_topic: "0x7fcf532c15f0a6db0bd6d0e038bea71d30d808c7d98cb3bf7268a95bf5081b65", + second_topic: "0x000000000000000000000000e3f85aad0c8dd7337427b9df5d0fb741d65eeeb5", + third_topic: nil, + fourth_topic: nil, + index: 1, + transaction_hash: "0x07510dbfddbac9064f7d607c2d9a14aa26fa19cdfcd578c0b585ff2395df543f" + } + ] + + expected = %{token_transfers: [], tokens: []} + + assert TokenTransfers.parse(logs) == expected + end + + test "Filters duplicates WETH transfers" do + [log_1, _weth_deposit_log, log_2, _weth_withdrawal_log] = + logs = [ + %{ + address_hash: "0x0BE9e53fd7EDaC9F859882AfdDa116645287C629", + block_number: 23_704_638, + block_hash: "0x79594150677f083756a37eee7b97ed99ab071f502104332cb3835bac345711ca", + data: "0x00000000000000000000000000000000000000000000002be19edfcf6b480000", + first_topic: "0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef", + fourth_topic: nil, + index: 1, + second_topic: "0x0000000000000000000000000000000000000000000000000000000000000000", + third_topic: "0x000000000000000000000000fb76e9e7d88e308ab530330ed90e84a952570319", + transaction_hash: "0x4011d9a930a3da620321589a54dc0ca3b88216b4886c7a7c3aaad1fb17702d35" + }, + %{ + address_hash: "0x0BE9e53fd7EDaC9F859882AfdDa116645287C629", + block_number: 23_704_638, + block_hash: "0x79594150677f083756a37eee7b97ed99ab071f502104332cb3835bac345711ca", + data: "0x00000000000000000000000000000000000000000000002be19edfcf6b480000", + first_topic: "0xe1fffcc4923d04b559f4d29a8bfc6cda04eb5b0d3c460751c2402c5c5cc9109c", + second_topic: "0x000000000000000000000000fb76e9e7d88e308ab530330ed90e84a952570319", + third_topic: nil, + fourth_topic: nil, + index: 2, + transaction_hash: "0x4011d9a930a3da620321589a54dc0ca3b88216b4886c7a7c3aaad1fb17702d35" + }, + %{ + address_hash: "0xf2eec76e45b328df99a34fa696320a262cb92154", + block_number: 3_530_917, + block_hash: "0x5a5e69984f78d65fc6d92e18058d21a9b114f1d56d06ca7aa017b3d87bf0491a", + data: "0x00000000000000000000000000000000000000000000000000e1315e1ebd28e8", + first_topic: "0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef", + fourth_topic: nil, + index: 8, + second_topic: "0x000000000000000000000000e3f85aad0c8dd7337427b9df5d0fb741d65eeeb5", + third_topic: "0x0000000000000000000000000000000000000000000000000000000000000000", + transaction_hash: "0x185889bc91372106ecf114a4e23f4ee615e131ae3e698078bd5d2ed7e3f55a49" + }, + %{ + address_hash: "0xf2eec76e45b328df99a34fa696320a262cb92154", + block_number: 3_530_917, + block_hash: "0x5a5e69984f78d65fc6d92e18058d21a9b114f1d56d06ca7aa017b3d87bf0491a", + data: "0x00000000000000000000000000000000000000000000000000e1315e1ebd28e8", + first_topic: "0x7fcf532c15f0a6db0bd6d0e038bea71d30d808c7d98cb3bf7268a95bf5081b65", + second_topic: "0x000000000000000000000000e3f85aad0c8dd7337427b9df5d0fb741d65eeeb5", + third_topic: nil, + fourth_topic: nil, + index: 1, + transaction_hash: "0x185889bc91372106ecf114a4e23f4ee615e131ae3e698078bd5d2ed7e3f55a49" + } + ] + + expected = %{ + tokens: [ + %{ + contract_address_hash: log_2.address_hash, + type: "ERC-20" + }, + %{ + contract_address_hash: log_1.address_hash, + type: "ERC-20" + } + ], + token_transfers: [ + %{ + token_ids: nil, + amount: Decimal.new(63_386_150_072_297_704), + block_number: log_2.block_number, + log_index: log_2.index, + from_address_hash: truncated_hash(log_2.second_topic), + to_address_hash: truncated_hash(log_2.third_topic), + token_contract_address_hash: log_2.address_hash, + transaction_hash: log_2.transaction_hash, + token_type: "ERC-20", + block_hash: log_2.block_hash + }, + %{ + block_number: log_1.block_number, + log_index: log_1.index, + from_address_hash: truncated_hash(log_1.second_topic), + to_address_hash: truncated_hash(log_1.third_topic), + token_contract_address_hash: log_1.address_hash, + token_ids: nil, + transaction_hash: log_1.transaction_hash, + token_type: "ERC-20", + block_hash: log_1.block_hash, + amount: Decimal.new(809_467_672_956_315_893_760) + } + ] + } + + env = Application.get_env(:explorer, Explorer.Chain.TokenTransfer) + + Application.put_env( + :explorer, + Explorer.Chain.TokenTransfer, + Keyword.put(env, :whitelisted_weth_contracts, [ + log_1.address_hash |> to_string() |> String.downcase(), + log_2.address_hash |> to_string() |> String.downcase() + ]) + ) + + assert TokenTransfers.parse(logs) == expected + + Application.put_env(:explorer, Explorer.Chain.TokenTransfer, env) + end end defp truncated_hash("0x000000000000000000000000" <> rest) do diff --git a/config/runtime.exs b/config/runtime.exs index 72c02fb50b18..833ed58e4e49 100644 --- a/config/runtime.exs +++ b/config/runtime.exs @@ -569,6 +569,10 @@ config :explorer, Explorer.Migrator.SanitizeIncorrectNFTTokenTransfers, batch_size: ConfigHelper.parse_integer_env_var("SANITIZE_INCORRECT_NFT_BATCH_SIZE", 100), concurrency: ConfigHelper.parse_integer_env_var("SANITIZE_INCORRECT_NFT_CONCURRENCY", 1) +config :explorer, Explorer.Migrator.SanitizeIncorrectWETHTokenTransfers, + batch_size: ConfigHelper.parse_integer_env_var("SANITIZE_INCORRECT_WETH_BATCH_SIZE", 100), + concurrency: ConfigHelper.parse_integer_env_var("SANITIZE_INCORRECT_WETH_CONCURRENCY", 1) + config :explorer, Explorer.Chain.BridgedToken, eth_omni_bridge_mediator: System.get_env("BRIDGED_TOKENS_ETH_OMNI_BRIDGE_MEDIATOR"), bsc_omni_bridge_mediator: System.get_env("BRIDGED_TOKENS_BSC_OMNI_BRIDGE_MEDIATOR"), @@ -579,6 +583,9 @@ config :explorer, Explorer.Chain.BridgedToken, config :explorer, Explorer.Utility.MissingBalanceOfToken, window_size: ConfigHelper.parse_integer_env_var("MISSING_BALANCE_OF_TOKENS_WINDOW_SIZE", 100) +config :explorer, Explorer.Chain.TokenTransfer, + whitelisted_weth_contracts: ConfigHelper.parse_list_env_var("WHITELISTED_WETH_CONTRACTS", "") + ############### ### Indexer ### ############### From 44bee1ef0b0ed119c9e23dbe2a01f85824e3d25e Mon Sep 17 00:00:00 2001 From: Victor Baranov Date: Thu, 6 Jun 2024 19:14:35 +0300 Subject: [PATCH 065/150] feat: Set dynamic ttl of cache modules derived from MapCache (#10109) * Set dynamic ttl of cache modules derived from MapCache * Update apps/explorer/lib/explorer/chain/cache/helper.ex Co-authored-by: Qwerty5Uiop <105209995+Qwerty5Uiop@users.noreply.github.com> --------- Co-authored-by: Qwerty5Uiop <105209995+Qwerty5Uiop@users.noreply.github.com> --- .../lib/explorer/chain/cache/address_sum.ex | 5 +-- .../chain/cache/address_sum_minus_burnt.ex | 5 +-- .../lib/explorer/chain/cache/block.ex | 4 +-- .../lib/explorer/chain/cache/gas_usage.ex | 5 +-- .../lib/explorer/chain/cache/helper.ex | 33 +++++++++++++++++++ .../lib/explorer/chain/cache/transaction.ex | 4 +-- 6 files changed, 46 insertions(+), 10 deletions(-) diff --git a/apps/explorer/lib/explorer/chain/cache/address_sum.ex b/apps/explorer/lib/explorer/chain/cache/address_sum.ex index dc430400ad5b..16da91ba0b9a 100644 --- a/apps/explorer/lib/explorer/chain/cache/address_sum.ex +++ b/apps/explorer/lib/explorer/chain/cache/address_sum.ex @@ -10,9 +10,10 @@ defmodule Explorer.Chain.Cache.AddressSum do key: :sum, key: :async_task, ttl_check_interval: Application.get_env(:explorer, __MODULE__)[:ttl_check_interval], - global_ttl: Application.get_env(:explorer, __MODULE__)[:global_ttl], + global_ttl: :infinity, callback: &async_task_on_deletion(&1) + alias Explorer.Chain.Cache.Helper alias Explorer.Etherscan defp handle_fallback(:sum) do @@ -31,7 +32,7 @@ defmodule Explorer.Chain.Cache.AddressSum do try do result = Etherscan.fetch_sum_coin_total_supply() - set_sum(result) + set_sum(%ConCache.Item{ttl: Helper.ttl(__MODULE__, "CACHE_ADDRESS_SUM_PERIOD"), value: result}) rescue e -> Logger.debug([ diff --git a/apps/explorer/lib/explorer/chain/cache/address_sum_minus_burnt.ex b/apps/explorer/lib/explorer/chain/cache/address_sum_minus_burnt.ex index 0fb0d7800bda..3af99f580a83 100644 --- a/apps/explorer/lib/explorer/chain/cache/address_sum_minus_burnt.ex +++ b/apps/explorer/lib/explorer/chain/cache/address_sum_minus_burnt.ex @@ -10,10 +10,11 @@ defmodule Explorer.Chain.Cache.AddressSumMinusBurnt do key: :sum_minus_burnt, key: :async_task, ttl_check_interval: Application.get_env(:explorer, __MODULE__)[:ttl_check_interval], - global_ttl: Application.get_env(:explorer, __MODULE__)[:global_ttl], + global_ttl: :infinity, callback: &async_task_on_deletion(&1) alias Explorer.{Chain, Etherscan} + alias Explorer.Chain.Cache.Helper defp handle_fallback(:sum_minus_burnt) do # This will get the task PID if one exists and launch a new task if not @@ -38,7 +39,7 @@ defmodule Explorer.Chain.Cache.AddressSumMinusBurnt do Chain.upsert_last_fetched_counter(params) - set_sum_minus_burnt(result) + set_sum_minus_burnt(%ConCache.Item{ttl: Helper.ttl(__MODULE__, "CACHE_ADDRESS_SUM_PERIOD"), value: result}) rescue e -> Logger.debug([ diff --git a/apps/explorer/lib/explorer/chain/cache/block.ex b/apps/explorer/lib/explorer/chain/cache/block.ex index 0dd216a71bee..672f80f76e2a 100644 --- a/apps/explorer/lib/explorer/chain/cache/block.ex +++ b/apps/explorer/lib/explorer/chain/cache/block.ex @@ -12,7 +12,7 @@ defmodule Explorer.Chain.Cache.Block do name: :block_count, key: :count, key: :async_task, - global_ttl: Application.get_env(:explorer, __MODULE__)[:global_ttl], + global_ttl: :infinity, ttl_check_interval: :timer.seconds(1), callback: &async_task_on_deletion(&1) @@ -78,7 +78,7 @@ defmodule Explorer.Chain.Cache.Block do Chain.upsert_last_fetched_counter(params) - set_count(result) + set_count(%ConCache.Item{ttl: Helper.ttl(__MODULE__, "CACHE_BLOCK_COUNT_PERIOD"), value: result}) rescue e -> Logger.debug([ diff --git a/apps/explorer/lib/explorer/chain/cache/gas_usage.ex b/apps/explorer/lib/explorer/chain/cache/gas_usage.ex index 65bb75b24f08..f671f2bf5377 100644 --- a/apps/explorer/lib/explorer/chain/cache/gas_usage.ex +++ b/apps/explorer/lib/explorer/chain/cache/gas_usage.ex @@ -17,10 +17,11 @@ defmodule Explorer.Chain.Cache.GasUsage do name: :gas_usage, key: :sum, key: :async_task, - global_ttl: Application.get_env(:explorer, __MODULE__)[:global_ttl], + global_ttl: :infinity, ttl_check_interval: :timer.seconds(1), callback: &async_task_on_deletion(&1) + alias Explorer.Chain.Cache.Helper alias Explorer.Chain.Transaction alias Explorer.Repo @@ -52,7 +53,7 @@ defmodule Explorer.Chain.Cache.GasUsage do try do result = fetch_sum_gas_used() - set_sum(result) + set_sum(%ConCache.Item{ttl: Helper.ttl(__MODULE__, "CACHE_TOTAL_GAS_USAGE_PERIOD"), value: result}) rescue e -> Logger.debug([ diff --git a/apps/explorer/lib/explorer/chain/cache/helper.ex b/apps/explorer/lib/explorer/chain/cache/helper.ex index f82925fe10f1..a3845f9b478e 100644 --- a/apps/explorer/lib/explorer/chain/cache/helper.ex +++ b/apps/explorer/lib/explorer/chain/cache/helper.ex @@ -4,6 +4,10 @@ defmodule Explorer.Chain.Cache.Helper do """ alias Explorer.Chain + @block_number_threshold_1 10_000 + @block_number_threshold_2 50_000 + @block_number_threshold_3 150_000 + @doc """ Estimates the row count of a given table using PostgreSQL system catalogs. @@ -28,4 +32,33 @@ defmodule Explorer.Chain.Cache.Helper do count end + + @doc """ + Calculates the time-to-live (TTL) for a given module in the cache. + + ## Parameters + + * `module` - The module for which to calculate the TTL. + * `management_variable` - The management environment variable. + + ## Returns + + The TTL for the module. + + """ + @spec ttl(atom, String.t()) :: non_neg_integer() + def ttl(module, management_variable) do + min_blockchain_block_number = Application.get_env(:indexer, :first_block) + max_block_number = Chain.fetch_max_block_number() + blocks_amount = max_block_number - min_blockchain_block_number + global_ttl_from_var = Application.get_env(:explorer, module)[:global_ttl] + + cond do + System.get_env(management_variable) not in ["", nil] -> global_ttl_from_var + blocks_amount < @block_number_threshold_1 -> :timer.seconds(10) + blocks_amount >= @block_number_threshold_1 and blocks_amount < @block_number_threshold_2 -> :timer.seconds(30) + blocks_amount >= @block_number_threshold_2 and blocks_amount < @block_number_threshold_3 -> :timer.minutes(2) + true -> global_ttl_from_var + end + end end diff --git a/apps/explorer/lib/explorer/chain/cache/transaction.ex b/apps/explorer/lib/explorer/chain/cache/transaction.ex index dd909502a4d3..bad41fe32e1e 100644 --- a/apps/explorer/lib/explorer/chain/cache/transaction.ex +++ b/apps/explorer/lib/explorer/chain/cache/transaction.ex @@ -7,7 +7,7 @@ defmodule Explorer.Chain.Cache.Transaction do name: :transaction_count, key: :count, key: :async_task, - global_ttl: Application.get_env(:explorer, __MODULE__)[:global_ttl], + global_ttl: :infinity, ttl_check_interval: :timer.seconds(1), callback: &async_task_on_deletion(&1) @@ -51,7 +51,7 @@ defmodule Explorer.Chain.Cache.Transaction do try do result = Repo.aggregate(Transaction, :count, :hash, timeout: :infinity) - set_count(result) + set_count(%ConCache.Item{ttl: Helper.ttl(__MODULE__, "CACHE_TXS_COUNT_PERIOD"), value: result}) rescue e -> Logger.debug([ From 85a51007f21e62bc94cce91fa9eee1fe040746d4 Mon Sep 17 00:00:00 2001 From: Alexander Kolotov Date: Fri, 7 Jun 2024 12:06:49 +0400 Subject: [PATCH 066/150] fix: excessive logging for Arbitrum batches confirmations (#10205) * revisited logging to explore start block of confirmation range * improvement of logging --- .../fetcher/arbitrum/workers/new_batches.ex | 2 +- .../arbitrum/workers/new_confirmations.ex | 131 +++++++++++++----- 2 files changed, 99 insertions(+), 34 deletions(-) diff --git a/apps/indexer/lib/indexer/fetcher/arbitrum/workers/new_batches.ex b/apps/indexer/lib/indexer/fetcher/arbitrum/workers/new_batches.ex index 1f5c2bf58511..649cede3c15b 100644 --- a/apps/indexer/lib/indexer/fetcher/arbitrum/workers/new_batches.ex +++ b/apps/indexer/lib/indexer/fetcher/arbitrum/workers/new_batches.ex @@ -690,7 +690,7 @@ defmodule Indexer.Fetcher.Arbitrum.Workers.NewBatches do blocks_to_batches = unwrap_rollup_block_ranges(batches) required_blocks_numbers = Map.keys(blocks_to_batches) - log_info("Identified #{length(required_blocks_numbers)} rollup blocks") + log_debug("Identified #{length(required_blocks_numbers)} rollup blocks") {blocks_to_import_map, txs_to_import_list} = get_rollup_blocks_and_txs_from_db(required_blocks_numbers, blocks_to_batches) diff --git a/apps/indexer/lib/indexer/fetcher/arbitrum/workers/new_confirmations.ex b/apps/indexer/lib/indexer/fetcher/arbitrum/workers/new_confirmations.ex index 35279518332b..ea8816ba0ccb 100644 --- a/apps/indexer/lib/indexer/fetcher/arbitrum/workers/new_confirmations.ex +++ b/apps/indexer/lib/indexer/fetcher/arbitrum/workers/new_confirmations.ex @@ -44,6 +44,16 @@ defmodule Indexer.Fetcher.Arbitrum.Workers.NewConfirmations do require Logger + @typedoc """ + A map containing list of transaction logs for a specific block range. + - the key is the tuple with the start and end of the block range + - the value is the list of transaction logs received for the block range + """ + @type cached_logs :: %{{non_neg_integer(), non_neg_integer()} => [%{String.t() => any()}]} + + @logs_per_report 10 + @zero_counters %{pairs_counter: 1, capped_logs_counter: 0, report?: false} + # keccak256("SendRootUpdated(bytes32,bytes32)") @send_root_updated_event "0xb4df3847300f076a369cd76d2314b470a1194d9e8a6bb97f1860aee88a5f6748" @@ -732,33 +742,48 @@ defmodule Indexer.Fetcher.Arbitrum.Workers.NewConfirmations do "Use L1 blocks #{batch.commitment_transaction.block_number}..#{confirmation_desc.l1_block_num - 1} to look for a rollup block confirmation within #{batch.start_block}..#{batch.end_block} of ##{batch.number}" ) - l1_blocks_pairs_to_get_logs( - batch.commitment_transaction.block_number, - confirmation_desc.l1_block_num - 1, - l1_outbox_config.logs_block_range - ) - |> Enum.reduce_while({:ok, nil, cache}, fn {log_start, log_end}, {_, _, updated_cache} -> - # credo:disable-for-previous-line Credo.Check.Refactor.PipeChainStart - {status, latest_block_confirmed, new_cache} = - do_check_if_batch_confirmed( - {batch.start_block, batch.end_block}, - {log_start, log_end}, - l1_outbox_config, - updated_cache - ) + block_pairs = + l1_blocks_pairs_to_get_logs( + batch.commitment_transaction.block_number, + confirmation_desc.l1_block_num - 1, + l1_outbox_config.logs_block_range + ) - case {status, latest_block_confirmed} do - {:error, _} -> - {:halt, {:error, nil, new_cache}} + block_pairs_length = length(block_pairs) + + {status, block, new_cache, _} = + block_pairs + |> Enum.reduce_while({:ok, nil, cache, @zero_counters}, fn {log_start, log_end}, + {_, _, updated_cache, counters} -> + {status, latest_block_confirmed, new_cache, logs_amount} = + do_check_if_batch_confirmed( + {batch.start_block, batch.end_block}, + {log_start, log_end}, + l1_outbox_config, + updated_cache + ) - {_, nil} -> - {:cont, {:ok, nil, new_cache}} + case {status, latest_block_confirmed} do + {:error, _} -> + {:halt, {:error, nil, new_cache, @zero_counters}} - {_, previous_confirmed_rollup_block} -> - log_info("Confirmed block ##{previous_confirmed_rollup_block} for the batch found") - {:halt, {:ok, previous_confirmed_rollup_block, new_cache}} - end - end) + {_, nil} -> + next_counters = next_counters(counters, logs_amount) + + # credo:disable-for-lines:3 Credo.Check.Refactor.Nesting + if next_counters.report? and block_pairs_length != next_counters.pairs_counter do + log_info("Examined #{next_counters.pairs_counter - 1} of #{block_pairs_length} L1 block ranges") + end + + {:cont, {:ok, nil, new_cache, next_counters}} + + {_, previous_confirmed_rollup_block} -> + log_info("Confirmed block ##{previous_confirmed_rollup_block} for the batch found") + {:halt, {:ok, previous_confirmed_rollup_block, new_cache, @zero_counters}} + end + end) + + {status, block, new_cache} end # Generates descending order pairs of start and finish block numbers, ensuring @@ -766,6 +791,9 @@ defmodule Indexer.Fetcher.Arbitrum.Workers.NewConfirmations do # Examples: # l1_blocks_pairs_to_get_logs(1, 10, 3) -> [{8, 10}, {5, 7}, {2, 4}, {1, 1}] # l1_blocks_pairs_to_get_logs(5, 10, 3) -> [{8, 10}, {5, 7}] + @spec l1_blocks_pairs_to_get_logs(non_neg_integer(), non_neg_integer(), non_neg_integer()) :: [ + {non_neg_integer(), non_neg_integer()} + ] defp l1_blocks_pairs_to_get_logs(start, finish, max_range) do # credo:disable-for-lines:9 Credo.Check.Refactor.PipeChainStart Stream.unfold(finish, fn cur_finish -> @@ -794,14 +822,26 @@ defmodule Indexer.Fetcher.Arbitrum.Workers.NewConfirmations do # - `cache`: A cache of previously fetched logs to reduce `eth_getLogs` calls. # # ## Returns - # - A tuple `{:ok, latest_block_confirmed, new_cache}`: + # - A tuple `{:ok, latest_block_confirmed, new_cache, logs_length}`: # - `latest_block_confirmed` is the highest rollup block number confirmed within # the specified range. - # - A tuple `{:ok, nil, new_cache}` if no rollup blocks within the specified range - # are confirmed. - # - A tuple `{:error, nil, new_cache}` if during parsing logs a rollup block with - # given hash is not being found in the database. + # - A tuple `{:ok, nil, new_cache, logs_length}` if no rollup blocks within the + # specified range are confirmed. + # - A tuple `{:error, nil, new_cache, logs_length}` if during parsing logs a rollup + # block with given hash is not being found in the database. # For all three cases the `new_cache` contains the updated logs cache. + @spec do_check_if_batch_confirmed( + {non_neg_integer(), non_neg_integer()}, + {non_neg_integer(), non_neg_integer()}, + %{ + :outbox_address => String.t(), + :json_rpc_named_arguments => EthereumJSONRPC.json_rpc_named_arguments(), + optional(any()) => any() + }, + __MODULE__.cached_logs() + ) :: + {:ok, nil | non_neg_integer(), __MODULE__.cached_logs(), non_neg_integer()} + | {:error, nil, __MODULE__.cached_logs(), non_neg_integer()} defp do_check_if_batch_confirmed( {rollup_start_block, rollup_end_block}, {log_start, log_end}, @@ -824,7 +864,7 @@ defmodule Indexer.Fetcher.Arbitrum.Workers.NewConfirmations do {status, latest_block_confirmed} = logs |> Enum.reduce_while({:ok, nil}, fn event, _acc -> - log_info("Examining the transaction #{event["transactionHash"]}") + log_debug("Examining the transaction #{event["transactionHash"]}") rollup_block_hash = send_root_updated_event_parse(event) rollup_block_num = Db.rollup_block_hash_to_num(rollup_block_hash) @@ -835,16 +875,34 @@ defmodule Indexer.Fetcher.Arbitrum.Workers.NewConfirmations do {:halt, {:error, nil}} value when value >= rollup_start_block and value <= rollup_end_block -> - log_info("The rollup block ##{rollup_block_num} within the range") + log_debug("The rollup block ##{rollup_block_num} within the range") {:halt, {:ok, rollup_block_num}} _ -> - log_info("The rollup block ##{rollup_block_num} outside of the range") + log_debug("The rollup block ##{rollup_block_num} outside of the range") {:cont, {:ok, nil}} end end) - {status, latest_block_confirmed, new_cache} + {status, latest_block_confirmed, new_cache, length(logs)} + end + + # Simplifies the process of updating counters for the `eth_getLogs` requests + # to be used for logging purposes. + @spec next_counters( + %{:pairs_counter => non_neg_integer(), :capped_logs_counter => non_neg_integer(), optional(any()) => any()}, + non_neg_integer() + ) :: %{ + :pairs_counter => non_neg_integer(), + :capped_logs_counter => non_neg_integer(), + :report? => boolean() + } + defp next_counters(%{pairs_counter: pairs_counter, capped_logs_counter: capped_logs_counter}, logs_amount) do + %{ + pairs_counter: pairs_counter + 1, + capped_logs_counter: rem(capped_logs_counter + logs_amount, @logs_per_report), + report?: div(capped_logs_counter + logs_amount, @logs_per_report) > 0 + } end # Retrieves logs for `SendRootUpdated` events between specified blocks, @@ -868,6 +926,13 @@ defmodule Indexer.Fetcher.Arbitrum.Workers.NewConfirmations do # - A tuple containing: # - The list of logs corresponding to `SendRootUpdated` events. # - The updated cache with the newly fetched logs. + @spec get_logs_new_confirmations( + non_neg_integer(), + non_neg_integer(), + binary(), + EthereumJSONRPC.json_rpc_named_arguments(), + __MODULE__.cached_logs() + ) :: {[%{String.t() => any()}], __MODULE__.cached_logs()} defp get_logs_new_confirmations(start_block, end_block, outbox_address, json_rpc_named_arguments, cache \\ %{}) when start_block <= end_block do # TODO: consider to have a persistent cache in DB to reduce the number of getLogs requests From a8e2e127b54e9b1e9a99cdf89d93736a8335716d Mon Sep 17 00:00:00 2001 From: nikitosing <32202610+nikitosing@users.noreply.github.com> Date: Fri, 7 Jun 2024 13:05:13 +0300 Subject: [PATCH 067/150] feat: Batch read methods requests (#10192) * feat: Batch read methods requests * Fix tests * Process review comments --- .../api/v2/smart_contract_controller_test.exs | 18 ++- .../lib/explorer/smart_contract/reader.ex | 105 +++++++++++++----- 2 files changed, 90 insertions(+), 33 deletions(-) diff --git a/apps/block_scout_web/test/block_scout_web/controllers/api/v2/smart_contract_controller_test.exs b/apps/block_scout_web/test/block_scout_web/controllers/api/v2/smart_contract_controller_test.exs index 99f30a208555..c0e19f0b9c34 100644 --- a/apps/block_scout_web/test/block_scout_web/controllers/api/v2/smart_contract_controller_test.exs +++ b/apps/block_scout_web/test/block_scout_web/controllers/api/v2/smart_contract_controller_test.exs @@ -1677,23 +1677,33 @@ defmodule BlockScoutWeb.API.V2.SmartContractControllerTest do target_contract = insert(:smart_contract, abi: abi) - blockchain_eth_call_mock() + address_hash = to_string(target_contract.address_hash) expect( EthereumJSONRPC.Mox, :json_rpc, fn [ %{ - id: id, + id: id_1, method: "eth_call", - params: [%{to: _address_hash, from: "0xBb36c792B9B45Aaf8b848A1392B0d6559202729E"}, _] + params: [%{to: ^address_hash, from: "0xBb36c792B9B45Aaf8b848A1392B0d6559202729E", data: "0x2e64cec1"}, _] + }, + %{ + id: id_2, + method: "eth_call", + params: [%{to: ^address_hash, from: "0xBb36c792B9B45Aaf8b848A1392B0d6559202729E", data: "0xab470f05"}, _] } ], _opts -> {:ok, [ %{ - id: id, + id: id_2, + jsonrpc: "2.0", + result: "0x000000000000000000000000fffffffffffffffffffffffffffffffffffffffe" + }, + %{ + id: id_1, jsonrpc: "2.0", result: "0x0000000000000000000000000000000000000000000000000000000000000020fe6a43fa23a0269092cbf97cb908e1d5a49a18fd6942baf2467fb5b221e39ab200000000000000000000000000000000000000000000000000000000000003e8fe6a43fa23a0269092cbf97cb908e1d5a49a18fd6942baf2467fb5b221e39ab2000000000000000000000000000000000000000000000000000000000000000a000000000000000000000000bb36c792b9b45aaf8b848a1392b0d6559202729e000000000000000000000000bb36c792b9b45aaf8b848a1392b0d6559202729e000000000000000000000000000000000000000000000000000000000001e0f30000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000012000000000000000000000000000000000000000000000000000000000000000030000000000000000000000000000000000000000000000000000000000000060000000000000000000000000000000000000000000000000000000000000018000000000000000000000000000000000000000000000000000000000000002a0000000000000000000000000bb36c792b9b45aaf8b848a1392b0d6559202729e000000000000000000000000000000000000000000000000000000000000006000000000000000000000000000000000000000000000000000000000000000e00000000000000000000000000000000000000000000000000000000000000003307830000000000000000000000000000000000000000000000000000000000030783030313132323333000000000000000000000000000000000000000000003078303031313232333331323300000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000c3078303030303132333132330000000000000000000000000000000000000000000000000000000000000000bb36c792b9b45aaf8b848a1392b0d6559202729e000000000000000000000000000000000000000000000000000000000000006000000000000000000000000000000000000000000000000000000000000000e00000000000000000000000000000000000000000000000000000000000000003307830000000000000000000000000000000000000000000000000000000000030783030313132323333000000000000000000000000000000000000000000003078303031313232333331323300000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000c3078303030303132333132330000000000000000000000000000000000000000000000000000000000000000bb36c792b9b45aaf8b848a1392b0d6559202729e000000000000000000000000000000000000000000000000000000000000006000000000000000000000000000000000000000000000000000000000000000e00000000000000000000000000000000000000000000000000000000000000003307830000000000000000000000000000000000000000000000000000000000030783030313132323333000000000000000000000000000000000000000000003078303031313232333331323300000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000c3078303030303132333132330000000000000000000000000000000000000000" diff --git a/apps/explorer/lib/explorer/smart_contract/reader.ex b/apps/explorer/lib/explorer/smart_contract/reader.ex index 2b7d5e7cc647..804e27ff69b2 100644 --- a/apps/explorer/lib/explorer/smart_contract/reader.ex +++ b/apps/explorer/lib/explorer/smart_contract/reader.ex @@ -291,9 +291,7 @@ defmodule Explorer.SmartContract.Reader do abi_with_method_id |> Enum.filter(&Helper.queriable_method?(&1)) - |> Enum.map( - &fetch_current_value_from_blockchain(&1, abi_with_method_id, contract_address_hash, false, options, from) - ) + |> fetch_current_values_from_blockchain(abi_with_method_id, contract_address_hash, false, options, from) end def read_only_functions_from_abi_with_sender(_, _, _, _), do: [] @@ -356,39 +354,88 @@ defmodule Explorer.SmartContract.Reader do "tuple[#{tuple_types}]" end - def fetch_current_value_from_blockchain( - function, + @spec fetch_current_values_from_blockchain( + any(), + [%{optional(binary()) => any()}], + Explorer.Chain.Hash.t(), + boolean(), + keyword(), + nil | binary() + ) :: [SmartContract.function_description()] + def fetch_current_values_from_blockchain( + functions, abi, contract_address_hash, leave_error_as_map, options, from \\ nil ) do - case function do - %{"inputs" => []} -> - method_id = function["method_id"] - args = function["inputs"] - - %{output: outputs, names: names} = - query_function_with_names( - contract_address_hash, - %{method_id: method_id, args: args}, - :regular, - from, - abi, - leave_error_as_map, - options - ) - - function - |> Map.replace!("outputs", outputs) - |> Map.put("abi_outputs", Map.get(function, "outputs", [])) - |> Map.put("names", names) + initial_methods_id_order = Enum.map(functions, &Map.get(&1, "method_id")) + + %{to_be_fetched: to_be_fetched, method_id_to_outputs: method_id_to_outputs, unchanged: unchanged} = + Enum.reduce( + functions, + %{to_be_fetched: %{}, method_id_to_outputs: %{}, unchanged: %{}}, + fn function, + %{ + to_be_fetched: to_be_fetched, + unchanged: unchanged, + method_id_to_outputs: method_id_to_outputs + } -> + case function do + %{"inputs" => []} -> + [%ABI.FunctionSelector{returns: returns, method_id: _method_id}] = ABI.parse_specification([function]) + + outputs = extract_outputs(returns) + + %{ + to_be_fetched: Map.put(to_be_fetched, function["method_id"], function), + unchanged: unchanged, + method_id_to_outputs: Map.put(method_id_to_outputs, function["method_id"], {outputs, function}) + } + + _ -> + %{ + to_be_fetched: to_be_fetched, + unchanged: + Map.put( + unchanged, + function["method_id"], + Map.put(function, "abi_outputs", Map.get(function, "outputs", [])) + ), + method_id_to_outputs: method_id_to_outputs + } + end + end + ) - _ -> - function - |> Map.put("abi_outputs", Map.get(function, "outputs", [])) - end + methods = to_be_fetched |> Enum.map(fn {method_id, _function} -> {method_id, []} end) |> Enum.into(%{}) + + res = + contract_address_hash + |> query_verified_contract(methods, from, leave_error_as_map, abi, options) + + method_id_to_abi_with_fetched_value = + res + |> Enum.map(fn {method_id, _result} -> + {outputs, function} = method_id_to_outputs[method_id] + + names = outputs_to_list(function["outputs"]) + + outputs = link_outputs_and_values(res, outputs, method_id) + function = to_be_fetched[method_id] + + {method_id, + function + |> Map.replace!("outputs", outputs) + |> Map.put("abi_outputs", Map.get(function, "outputs", [])) + |> Map.put("names", names)} + end) + |> Enum.into(%{}) + + Enum.map(initial_methods_id_order, fn method_id -> + unchanged[method_id] || method_id_to_abi_with_fetched_value[method_id] + end) end @doc """ From eddaeadc053ae51fcb526eab8e17cf74d5345909 Mon Sep 17 00:00:00 2001 From: nikitosing <32202610+nikitosing@users.noreply.github.com> Date: Fri, 7 Jun 2024 14:16:37 +0300 Subject: [PATCH 068/150] feat: Add feature toggle for WETH filtering (#10208) * feat: Add feature toggle for WETH filtering * Add new envs to docker-compose/envs/common-blockscout.env * Fix tests --- .github/workflows/config.yml | 3 +++ .../lib/explorer/chain/token_transfer.ex | 15 +++++++----- ...sanitize_incorrect_weth_token_transfers.ex | 24 ++++++++++++++----- config/runtime.exs | 3 ++- docker-compose/envs/common-blockscout.env | 4 ++++ 5 files changed, 36 insertions(+), 13 deletions(-) diff --git a/.github/workflows/config.yml b/.github/workflows/config.yml index 420c0827048b..bcf5b142e0de 100644 --- a/.github/workflows/config.yml +++ b/.github/workflows/config.yml @@ -581,6 +581,7 @@ jobs: ETHEREUM_JSONRPC_CASE: "EthereumJSONRPC.Case.Nethermind.Mox" ETHEREUM_JSONRPC_WEB_SOCKET_CASE: "EthereumJSONRPC.WebSocket.Case.Mox" CHAIN_TYPE: ${{ matrix.chain-type != 'default' && matrix.chain-type || '' }} + WETH_TOKEN_TRANSFERS_FILTERING_ENABLED: "true" test_nethermind_mox_indexer: strategy: fail-fast: false @@ -647,6 +648,7 @@ jobs: ETHEREUM_JSONRPC_CASE: "EthereumJSONRPC.Case.Nethermind.Mox" ETHEREUM_JSONRPC_WEB_SOCKET_CASE: "EthereumJSONRPC.WebSocket.Case.Mox" CHAIN_TYPE: ${{ matrix.chain-type != 'default' && matrix.chain-type || '' }} + WETH_TOKEN_TRANSFERS_FILTERING_ENABLED: "true" test_nethermind_mox_block_scout_web: strategy: fail-fast: false @@ -747,3 +749,4 @@ jobs: ACCOUNT_REDIS_URL: "redis://localhost:6379" SOURCIFY_INTEGRATION_ENABLED: "true" CHAIN_TYPE: ${{ matrix.chain-type != 'default' && matrix.chain-type || '' }} + WETH_TOKEN_TRANSFERS_FILTERING_ENABLED: "true" diff --git a/apps/explorer/lib/explorer/chain/token_transfer.ex b/apps/explorer/lib/explorer/chain/token_transfer.ex index 69f309410d29..12edc67d89de 100644 --- a/apps/explorer/lib/explorer/chain/token_transfer.ex +++ b/apps/explorer/lib/explorer/chain/token_transfer.ex @@ -534,10 +534,13 @@ defmodule Explorer.Chain.TokenTransfer do WHITELISTED_WETH_CONTRACTS env is the list of whitelisted WETH contracts addresses. """ @spec whitelisted_weth_contract?(any()) :: boolean() - def whitelisted_weth_contract?(contract_address_hash), - do: - (contract_address_hash |> to_string() |> String.downcase()) in Application.get_env( - :explorer, - Explorer.Chain.TokenTransfer - )[:whitelisted_weth_contracts] + def whitelisted_weth_contract?(contract_address_hash) do + env = Application.get_env(:explorer, Explorer.Chain.TokenTransfer) + + if env[:weth_token_transfers_filtering_enabled] do + (contract_address_hash |> to_string() |> String.downcase()) in env[:whitelisted_weth_contracts] + else + true + end + end end diff --git a/apps/explorer/lib/explorer/migrator/sanitize_incorrect_weth_token_transfers.ex b/apps/explorer/lib/explorer/migrator/sanitize_incorrect_weth_token_transfers.ex index 5759f913368e..038ce5fd6d7b 100644 --- a/apps/explorer/lib/explorer/migrator/sanitize_incorrect_weth_token_transfers.ex +++ b/apps/explorer/lib/explorer/migrator/sanitize_incorrect_weth_token_transfers.ex @@ -31,25 +31,37 @@ defmodule Explorer.Migrator.SanitizeIncorrectWETHTokenTransfers do _ -> MigrationStatus.set_status(@migration_name, "started") schedule_batch_migration() - {:ok, %{step: :delete_not_whitelisted_weth_transfers}} + {:ok, %{step: :delete_duplicates}} end end @impl true def handle_info(:migrate_batch, %{step: step} = state) do + if step == :delete_not_whitelisted_weth_transfers and + !Application.get_env(:explorer, Explorer.Chain.TokenTransfer)[:weth_token_transfers_filtering_enabled] do + {:stop, :normal, state} + else + process_batch(state) + end + end + + defp process_batch(%{step: step} = state) do case last_unprocessed_identifiers(step) do [] -> case step do - :delete_not_whitelisted_weth_transfers -> + :delete_duplicates -> Logger.info( - "SanitizeIncorrectWETHTokenTransfers deletion of not whitelisted weth transfers finished, continuing with duplicates deletion" + "SanitizeIncorrectWETHTokenTransfers deletion of duplicates finished, continuing with deletion of not whitelisted weth transfers" ) schedule_batch_migration() - {:noreply, %{step: :delete_duplicates}} + {:noreply, %{step: :delete_not_whitelisted_weth_transfers}} + + :delete_not_whitelisted_weth_transfers -> + Logger.info( + "SanitizeIncorrectWETHTokenTransfers deletion of not whitelisted weth transfers finished. Sanitizing is completed." + ) - :delete_duplicates -> - Logger.info("SanitizeIncorrectWETHTokenTransfers migration finished") MigrationStatus.set_status(@migration_name, "completed") {:stop, :normal, state} end diff --git a/config/runtime.exs b/config/runtime.exs index 833ed58e4e49..cd2072166e89 100644 --- a/config/runtime.exs +++ b/config/runtime.exs @@ -584,7 +584,8 @@ config :explorer, Explorer.Utility.MissingBalanceOfToken, window_size: ConfigHelper.parse_integer_env_var("MISSING_BALANCE_OF_TOKENS_WINDOW_SIZE", 100) config :explorer, Explorer.Chain.TokenTransfer, - whitelisted_weth_contracts: ConfigHelper.parse_list_env_var("WHITELISTED_WETH_CONTRACTS", "") + whitelisted_weth_contracts: ConfigHelper.parse_list_env_var("WHITELISTED_WETH_CONTRACTS", ""), + weth_token_transfers_filtering_enabled: ConfigHelper.parse_bool_env_var("WETH_TOKEN_TRANSFERS_FILTERING_ENABLED") ############### ### Indexer ### diff --git a/docker-compose/envs/common-blockscout.env b/docker-compose/envs/common-blockscout.env index 08a2e64ba0e6..1cc45e89d0a7 100644 --- a/docker-compose/envs/common-blockscout.env +++ b/docker-compose/envs/common-blockscout.env @@ -379,3 +379,7 @@ TENDERLY_CHAIN_PATH= # MUD_INDEXER_ENABLED= # MUD_DATABASE_URL= # MUD_POOL_SIZE=50 +# WETH_TOKEN_TRANSFERS_FILTERING_ENABLED=false +# WHITELISTED_WETH_CONTRACTS= +# SANITIZE_INCORRECT_WETH_BATCH_SIZE=100 +# SANITIZE_INCORRECT_WETH_CONCURRENCY=1 \ No newline at end of file From 3bea0e842e4bbd32a2608ed8669ea3d6b2ec97a6 Mon Sep 17 00:00:00 2001 From: Maxim Filonov <53992153+sl1depengwyn@users.noreply.github.com> Date: Fri, 7 Jun 2024 14:20:55 +0300 Subject: [PATCH 069/150] perf: replace individual queries with ecto preload (#10203) --- .../controllers/api/v2/address_controller.ex | 5 ++- .../api/v2/smart_contract_controller.ex | 7 +++- .../controllers/api/v2/token_controller.ex | 2 +- .../views/account/api/v2/user_view.ex | 6 ++- .../block_scout_web/views/api/v2/helper.ex | 39 ++++++++++--------- .../api/v2/address_controller_test.exs | 3 +- apps/explorer/lib/explorer/chain/address.ex | 4 +- .../proxy/models/implementation.ex | 8 ++++ 8 files changed, 49 insertions(+), 25 deletions(-) diff --git a/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/address_controller.ex b/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/address_controller.ex index 10c0d44a18d9..8170bbb9994e 100644 --- a/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/address_controller.ex +++ b/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/address_controller.ex @@ -38,7 +38,7 @@ defmodule BlockScoutWeb.API.V2.AddressController do necessity_by_association: %{ [created_contract_address: :names] => :optional, [from_address: :names] => :optional, - [to_address: :names] => :optional, + [to_address: [:names, :proxy_implementations]] => :optional, :block => :optional, [created_contract_address: :smart_contract] => :optional, [from_address: :smart_contract] => :optional, @@ -71,7 +71,8 @@ defmodule BlockScoutWeb.API.V2.AddressController do @contract_address_preloads [ :smart_contract, :contracts_creation_internal_transaction, - :contracts_creation_transaction + :contracts_creation_transaction, + :proxy_implementations ] @nft_necessity_by_association [ diff --git a/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/smart_contract_controller.ex b/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/smart_contract_controller.ex index a1c7d00a5f9c..03d33589e1d8 100644 --- a/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/smart_contract_controller.ex +++ b/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/smart_contract_controller.ex @@ -235,7 +235,12 @@ defmodule BlockScoutWeb.API.V2.SmartContractController do def smart_contracts_list(conn, params) do full_options = - [necessity_by_association: %{[address: :token] => :optional, [address: :names] => :optional, address: :required}] + [ + necessity_by_association: %{ + [address: [:token, :names, :proxy_implementations]] => :optional, + address: :required + } + ] |> Keyword.merge(paging_options(params)) |> Keyword.merge(current_filter(params)) |> Keyword.merge(search_query(params)) diff --git a/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/token_controller.ex b/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/token_controller.ex index 6d212582423a..53180c925798 100644 --- a/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/token_controller.ex +++ b/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/token_controller.ex @@ -134,7 +134,7 @@ defmodule BlockScoutWeb.API.V2.TokenController do {:not_found, false} <- {:not_found, Chain.erc_20_token?(token)}, {:format, {:ok, holder_address_hash}} <- {:format, Chain.string_to_address_hash(holder_address_hash_string)}, {:ok, false} <- AccessHelper.restricted_access?(holder_address_hash_string, params) do - holder_address = Repo.get_by(Address, hash: holder_address_hash) + holder_address = %Address{Repo.get_by(Address, hash: holder_address_hash) | proxy_implementations: nil} results_plus_one = Instance.token_instances_by_holder_address_hash( diff --git a/apps/block_scout_web/lib/block_scout_web/views/account/api/v2/user_view.ex b/apps/block_scout_web/lib/block_scout_web/views/account/api/v2/user_view.ex index fb86dd26bcdc..bf6f869a224c 100644 --- a/apps/block_scout_web/lib/block_scout_web/views/account/api/v2/user_view.ex +++ b/apps/block_scout_web/lib/block_scout_web/views/account/api/v2/user_view.ex @@ -186,7 +186,11 @@ defmodule BlockScoutWeb.Account.Api.V2.UserView do end defp get_address(address_hash) do - case Chain.hash_to_address(address_hash, [necessity_by_association: %{:smart_contract => :optional}], false) do + case Chain.hash_to_address( + address_hash, + [necessity_by_association: %{smart_contract: :optional, proxy_implementations: :optional}], + false + ) do {:ok, address} -> address _ -> nil end diff --git a/apps/block_scout_web/lib/block_scout_web/views/api/v2/helper.ex b/apps/block_scout_web/lib/block_scout_web/views/api/v2/helper.ex index 15adc47e935e..2ad947c0412a 100644 --- a/apps/block_scout_web/lib/block_scout_web/views/api/v2/helper.ex +++ b/apps/block_scout_web/lib/block_scout_web/views/api/v2/helper.ex @@ -6,7 +6,6 @@ defmodule BlockScoutWeb.API.V2.Helper do alias Ecto.Association.NotLoaded alias Explorer.Chain alias Explorer.Chain.{Address, Hash} - alias Explorer.Chain.SmartContract.Proxy.Models.Implementation alias Explorer.Chain.Transaction.History.TransactionStats import BlockScoutWeb.Account.AuthController, only: [current_user: 1] @@ -53,30 +52,34 @@ defmodule BlockScoutWeb.API.V2.Helper do @doc """ Gets address with the additional info for api v2 """ - @spec address_with_info(any(), any()) :: nil | %{optional(<<_::32, _::_*8>>) => any()} + @spec address_with_info(any(), any()) :: nil | %{optional(String.t()) => any()} + def address_with_info( + %Address{proxy_implementations: %NotLoaded{}, contract_code: contract_code} = _address, + _address_hash + ) + when not is_nil(contract_code) do + raise "proxy_implementations is not loaded for address" + end + def address_with_info(%Address{} = address, _address_hash) do smart_contract? = Address.smart_contract?(address) - {implementation_address_hashes, implementation_names, implementation_address, implementation_name, - proxy_implementations} = - if smart_contract? do - proxy_implementations = Implementation.get_proxy_implementations(address.hash) + {proxy_implementations, implementation_address_hashes, implementation_names, implementation_address, + implementation_name} = + case address.proxy_implementations do + %NotLoaded{} -> + {nil, [], [], nil, nil} - implementation_address_hashes = (proxy_implementations && proxy_implementations.address_hashes) || [] - implementation_names = (proxy_implementations && proxy_implementations.names) || [] + nil -> + {nil, [], [], nil, nil} - implementation_address = - (Enum.count(implementation_address_hashes) > 0 && - implementation_address_hashes - |> Enum.at(0) - |> Address.checksum()) || nil + proxy_implementations -> + address_hashes = proxy_implementations.address_hashes + names = proxy_implementations.names - implementation_name = implementation_names |> Enum.at(0) + address_hash = Enum.at(address_hashes, 0) && address_hashes |> Enum.at(0) |> Address.checksum() - {implementation_address_hashes, implementation_names, implementation_address, implementation_name, - proxy_implementations} - else - {[], [], nil, nil, nil} + {proxy_implementations, address_hashes, names, address_hash, Enum.at(names, 0)} end %{ diff --git a/apps/block_scout_web/test/block_scout_web/controllers/api/v2/address_controller_test.exs b/apps/block_scout_web/test/block_scout_web/controllers/api/v2/address_controller_test.exs index e727e6af7e2b..612156a8db6b 100644 --- a/apps/block_scout_web/test/block_scout_web/controllers/api/v2/address_controller_test.exs +++ b/apps/block_scout_web/test/block_scout_web/controllers/api/v2/address_controller_test.exs @@ -162,7 +162,8 @@ defmodule BlockScoutWeb.API.V2.AddressControllerTest do {:ok, implementation_contract_address_hash} = Chain.string_to_address_hash("0x" <> implementation_contract_address_hash_string) - checksummed_implementation_contract_address_hash = Address.checksum(implementation_contract_address_hash) + checksummed_implementation_contract_address_hash = + implementation_contract_address_hash && Address.checksum(implementation_contract_address_hash) insert(:proxy_implementation, proxy_address_hash: proxy_address.hash, diff --git a/apps/explorer/lib/explorer/chain/address.ex b/apps/explorer/lib/explorer/chain/address.ex index 2c3077df75e9..95ce08038f29 100644 --- a/apps/explorer/lib/explorer/chain/address.ex +++ b/apps/explorer/lib/explorer/chain/address.ex @@ -26,6 +26,7 @@ defmodule Explorer.Chain.Address do } alias Explorer.Chain.Cache.{Accounts, NetVersion} + alias Explorer.Chain.SmartContract.Proxy.Models.Implementation @optional_attrs ~w(contract_code fetched_coin_balance fetched_coin_balance_block_number nonce decompiled verified gas_used transactions_count token_transfers_count)a @required_attrs ~w(hash)a @@ -96,6 +97,7 @@ defmodule Explorer.Chain.Address do has_one(:smart_contract, SmartContract, references: :hash) has_one(:token, Token, foreign_key: :contract_address_hash, references: :hash) + has_one(:proxy_implementations, Implementation, foreign_key: :proxy_address_hash, references: :hash) has_one( :contracts_creation_internal_transaction, @@ -359,7 +361,7 @@ defmodule Explorer.Chain.Address do from(a in Address, where: a.fetched_coin_balance > ^0, order_by: [desc: a.fetched_coin_balance, asc: a.hash], - preload: [:names, :smart_contract], + preload: [:names, :smart_contract, :proxy_implementations], select: {a, a.transactions_count} ) diff --git a/apps/explorer/lib/explorer/chain/smart_contract/proxy/models/implementation.ex b/apps/explorer/lib/explorer/chain/smart_contract/proxy/models/implementation.ex index 0bb3fdbd748f..91496e870b91 100644 --- a/apps/explorer/lib/explorer/chain/smart_contract/proxy/models/implementation.ex +++ b/apps/explorer/lib/explorer/chain/smart_contract/proxy/models/implementation.ex @@ -51,6 +51,14 @@ defmodule Explorer.Chain.SmartContract.Proxy.Models.Implementation do field(:address_hashes, {:array, Hash.Address}, null: false) field(:names, {:array, :string}, null: false) + belongs_to( + :address, + Address, + foreign_key: :proxy_address_hash, + references: :hash, + define_field: false + ) + timestamps() end From 653f91248ba15dfe41a9f853841ac6f3976994f6 Mon Sep 17 00:00:00 2001 From: Qwerty5Uiop <105209995+Qwerty5Uiop@users.noreply.github.com> Date: Fri, 7 Jun 2024 15:21:44 +0400 Subject: [PATCH 070/150] feat: Push relevant entries to the front of bound queue (#10193) * feat: Push relevant entries to the front of bound queue * Refactor realtime? parameter in async_import_remaining_block_data --- .../lib/indexer/block/catchup/fetcher.ex | 34 +++++----- apps/indexer/lib/indexer/block/fetcher.ex | 48 +++++++------- .../lib/indexer/block/realtime/fetcher.ex | 34 +++++----- apps/indexer/lib/indexer/bound_queue.ex | 18 +++++ apps/indexer/lib/indexer/buffered_task.ex | 65 ++++++++++++++----- .../lib/indexer/fetcher/beacon/blob.ex | 4 +- .../lib/indexer/fetcher/block_reward.ex | 6 +- .../indexer/fetcher/coin_balance/catchup.ex | 2 +- .../indexer/fetcher/coin_balance/realtime.ex | 2 +- .../lib/indexer/fetcher/contract_code.ex | 7 +- .../indexer/fetcher/internal_transaction.ex | 6 +- .../pending_block_operations_sanitizer.ex | 2 +- .../fetcher/polygon_zkevm/bridge_l1_tokens.ex | 2 +- .../indexer/fetcher/replaced_transaction.ex | 21 +++--- apps/indexer/lib/indexer/fetcher/token.ex | 6 +- .../lib/indexer/fetcher/token_balance.ex | 25 +++---- .../fetcher/token_instance/realtime.ex | 6 +- .../lib/indexer/fetcher/uncle_block.ex | 9 ++- apps/indexer/lib/indexer/token_balances.ex | 2 +- .../indexer/block/catchup/fetcher_test.exs | 6 +- .../test/indexer/buffered_task_test.exs | 16 ++--- .../test/indexer/fetcher/beacon/blob_test.exs | 2 +- .../indexer/fetcher/block_reward_test.exs | 18 ++--- .../indexer/fetcher/contract_code_test.exs | 9 ++- .../fetcher/replaced_transaction_test.exs | 17 +++-- 25 files changed, 218 insertions(+), 149 deletions(-) diff --git a/apps/indexer/lib/indexer/block/catchup/fetcher.ex b/apps/indexer/lib/indexer/block/catchup/fetcher.ex index c8fa267f6dc8..67ab266f98a4 100644 --- a/apps/indexer/lib/indexer/block/catchup/fetcher.ex +++ b/apps/indexer/lib/indexer/block/catchup/fetcher.ex @@ -9,16 +9,16 @@ defmodule Indexer.Block.Catchup.Fetcher do import Indexer.Block.Fetcher, only: [ - async_import_blobs: 1, - async_import_block_rewards: 1, + async_import_blobs: 2, + async_import_block_rewards: 2, async_import_coin_balances: 2, - async_import_created_contract_codes: 1, - async_import_internal_transactions: 1, - async_import_replaced_transactions: 1, - async_import_tokens: 1, - async_import_token_balances: 1, + async_import_created_contract_codes: 2, + async_import_internal_transactions: 2, + async_import_replaced_transactions: 2, + async_import_tokens: 2, + async_import_token_balances: 2, async_import_token_instances: 1, - async_import_uncles: 1, + async_import_uncles: 2, fetch_and_import_range: 2 ] @@ -127,16 +127,18 @@ defmodule Indexer.Block.Catchup.Fetcher do imported, %{block_rewards: %{errors: block_reward_errors}} = options ) do - async_import_block_rewards(block_reward_errors) + realtime? = false + + async_import_block_rewards(block_reward_errors, realtime?) async_import_coin_balances(imported, options) - async_import_created_contract_codes(imported) - async_import_internal_transactions(imported) - async_import_tokens(imported) - async_import_token_balances(imported) - async_import_uncles(imported) - async_import_replaced_transactions(imported) + async_import_created_contract_codes(imported, realtime?) + async_import_internal_transactions(imported, realtime?) + async_import_tokens(imported, realtime?) + async_import_token_balances(imported, realtime?) + async_import_uncles(imported, realtime?) + async_import_replaced_transactions(imported, realtime?) async_import_token_instances(imported) - async_import_blobs(imported) + async_import_blobs(imported, realtime?) end defp stream_fetch_and_import(state, ranges) do diff --git a/apps/indexer/lib/indexer/block/fetcher.ex b/apps/indexer/lib/indexer/block/fetcher.ex index 44fa80684839..ff0a34288148 100644 --- a/apps/indexer/lib/indexer/block/fetcher.ex +++ b/apps/indexer/lib/indexer/block/fetcher.ex @@ -364,25 +364,25 @@ defmodule Indexer.Block.Fetcher do def async_import_token_instances(_), do: :ok - def async_import_blobs(%{blocks: blocks}) do + def async_import_blobs(%{blocks: blocks}, realtime?) do timestamps = blocks |> Enum.filter(fn block -> block |> Map.get(:blob_gas_used, 0) > 0 end) |> Enum.map(&Map.get(&1, :timestamp)) if not Enum.empty?(timestamps) do - Blob.async_fetch(timestamps) + Blob.async_fetch(timestamps, realtime?) end end - def async_import_blobs(_), do: :ok + def async_import_blobs(_, _), do: :ok - def async_import_block_rewards([]), do: :ok + def async_import_block_rewards([], _realtime?), do: :ok - def async_import_block_rewards(errors) when is_list(errors) do + def async_import_block_rewards(errors, realtime?) when is_list(errors) do errors |> block_reward_errors_to_block_numbers() - |> BlockReward.async_fetch() + |> BlockReward.async_fetch(realtime?) end def async_import_coin_balances(%{addresses: addresses}, %{ @@ -404,7 +404,7 @@ defmodule Indexer.Block.Fetcher do def async_import_realtime_coin_balances(_), do: :ok - def async_import_created_contract_codes(%{transactions: transactions}) do + def async_import_created_contract_codes(%{transactions: transactions}, realtime?) do transactions |> Enum.flat_map(fn %Transaction{ @@ -418,40 +418,40 @@ defmodule Indexer.Block.Fetcher do %Transaction{created_contract_address_hash: nil} -> [] end) - |> ContractCode.async_fetch(10_000) + |> ContractCode.async_fetch(realtime?, 10_000) end - def async_import_created_contract_codes(_), do: :ok + def async_import_created_contract_codes(_, _), do: :ok - def async_import_internal_transactions(%{blocks: blocks}) do + def async_import_internal_transactions(%{blocks: blocks}, realtime?) do blocks |> Enum.map(fn %Block{number: block_number} -> block_number end) - |> InternalTransaction.async_fetch(10_000) + |> InternalTransaction.async_fetch(realtime?, 10_000) end - def async_import_internal_transactions(_), do: :ok + def async_import_internal_transactions(_, _), do: :ok - def async_import_tokens(%{tokens: tokens}) do + def async_import_tokens(%{tokens: tokens}, realtime?) do tokens |> Enum.map(& &1.contract_address_hash) - |> Token.async_fetch() + |> Token.async_fetch(realtime?) end - def async_import_tokens(_), do: :ok + def async_import_tokens(_, _), do: :ok - def async_import_token_balances(%{address_token_balances: token_balances}) do - TokenBalance.async_fetch(token_balances) + def async_import_token_balances(%{address_token_balances: token_balances}, realtime?) do + TokenBalance.async_fetch(token_balances, realtime?) end - def async_import_token_balances(_), do: :ok + def async_import_token_balances(_, _), do: :ok - def async_import_uncles(%{block_second_degree_relations: block_second_degree_relations}) do - UncleBlock.async_fetch_blocks(block_second_degree_relations) + def async_import_uncles(%{block_second_degree_relations: block_second_degree_relations}, realtime?) do + UncleBlock.async_fetch_blocks(block_second_degree_relations, realtime?) end - def async_import_uncles(_), do: :ok + def async_import_uncles(_, _), do: :ok - def async_import_replaced_transactions(%{transactions: transactions}) do + def async_import_replaced_transactions(%{transactions: transactions}, realtime?) do transactions |> Enum.flat_map(fn %Transaction{block_hash: %Hash{} = block_hash, nonce: nonce, from_address_hash: %Hash{} = from_address_hash} -> @@ -460,10 +460,10 @@ defmodule Indexer.Block.Fetcher do %Transaction{block_hash: nil} -> [] end) - |> ReplacedTransaction.async_fetch(10_000) + |> ReplacedTransaction.async_fetch(realtime?, 10_000) end - def async_import_replaced_transactions(_), do: :ok + def async_import_replaced_transactions(_, _), do: :ok @doc """ Fills a buffer of L1 token addresses to handle it asynchronously in diff --git a/apps/indexer/lib/indexer/block/realtime/fetcher.ex b/apps/indexer/lib/indexer/block/realtime/fetcher.ex index 0229acf348c3..85c12f4d0fcf 100644 --- a/apps/indexer/lib/indexer/block/realtime/fetcher.ex +++ b/apps/indexer/lib/indexer/block/realtime/fetcher.ex @@ -14,15 +14,15 @@ defmodule Indexer.Block.Realtime.Fetcher do import Indexer.Block.Fetcher, only: [ async_import_realtime_coin_balances: 1, - async_import_blobs: 1, - async_import_block_rewards: 1, - async_import_created_contract_codes: 1, - async_import_internal_transactions: 1, - async_import_replaced_transactions: 1, - async_import_tokens: 1, - async_import_token_balances: 1, + async_import_blobs: 2, + async_import_block_rewards: 2, + async_import_created_contract_codes: 2, + async_import_internal_transactions: 2, + async_import_replaced_transactions: 2, + async_import_tokens: 2, + async_import_token_balances: 2, async_import_token_instances: 1, - async_import_uncles: 1, + async_import_uncles: 2, async_import_polygon_zkevm_bridge_l1_tokens: 1, fetch_and_import_range: 2 ] @@ -452,16 +452,18 @@ defmodule Indexer.Block.Realtime.Fetcher do imported, %{block_rewards: %{errors: block_reward_errors}} ) do + realtime? = true + async_import_realtime_coin_balances(imported) - async_import_block_rewards(block_reward_errors) - async_import_created_contract_codes(imported) - async_import_internal_transactions(imported) - async_import_tokens(imported) - async_import_token_balances(imported) + async_import_block_rewards(block_reward_errors, realtime?) + async_import_created_contract_codes(imported, realtime?) + async_import_internal_transactions(imported, realtime?) + async_import_tokens(imported, realtime?) + async_import_token_balances(imported, realtime?) async_import_token_instances(imported) - async_import_uncles(imported) - async_import_replaced_transactions(imported) - async_import_blobs(imported) + async_import_uncles(imported, realtime?) + async_import_replaced_transactions(imported, realtime?) + async_import_blobs(imported, realtime?) async_import_polygon_zkevm_bridge_l1_tokens(imported) end end diff --git a/apps/indexer/lib/indexer/bound_queue.ex b/apps/indexer/lib/indexer/bound_queue.ex index bdce80382e2f..44c1f13ec733 100644 --- a/apps/indexer/lib/indexer/bound_queue.ex +++ b/apps/indexer/lib/indexer/bound_queue.ex @@ -103,6 +103,24 @@ defmodule Indexer.BoundQueue do end end + @doc """ + `push_front/2` items from `items` into `bound_queue` until it is full. + """ + def push_front_until_maximum_size( + %__MODULE__{size: maximum_size, maximum_size: maximum_size} = bound_queue, + remaining + ), + do: {bound_queue, remaining} + + def push_front_until_maximum_size(%__MODULE__{} = bound_queue, [] = remaining), do: {bound_queue, remaining} + + def push_front_until_maximum_size(%__MODULE__{} = bound_queue, [head | tail] = remaining) do + case push_front(bound_queue, head) do + {:ok, new_bound_queue} -> push_front_until_maximum_size(new_bound_queue, tail) + {:error, :maximum_size} -> {bound_queue, remaining} + end + end + @doc """ Shrinks the queue to half its current `size` and sets that as its new `max_size`. """ diff --git a/apps/indexer/lib/indexer/buffered_task.ex b/apps/indexer/lib/indexer/buffered_task.ex index 51b9d995afe7..bf6a9dfc820a 100644 --- a/apps/indexer/lib/indexer/buffered_task.ex +++ b/apps/indexer/lib/indexer/buffered_task.ex @@ -74,6 +74,7 @@ defmodule Indexer.BufferedTask do poll: true, metadata: [], current_buffer: [], + current_front_buffer: [], bound_queue: %BoundQueue{}, task_ref_to_batch: %{} @@ -155,9 +156,9 @@ defmodule Indexer.BufferedTask do @doc """ Buffers list of entries for future async execution. """ - @spec buffer(GenServer.name(), entries(), timeout()) :: :ok - def buffer(server, entries, timeout \\ 5000) when is_list(entries) do - GenServer.call(server, {:buffer, entries}, timeout) + @spec buffer(GenServer.name(), entries(), boolean(), timeout()) :: :ok + def buffer(server, entries, front?, timeout \\ 5000) when is_list(entries) do + GenServer.call(server, {:buffer, entries, front?}, timeout) end def child_spec([init_arguments]) do @@ -277,12 +278,12 @@ defmodule Indexer.BufferedTask do {:noreply, drop_task_and_retry(state, ref)} end - def handle_info({:buffer, entries}, state) do - {:noreply, buffer_entries(state, entries)} + def handle_info({:buffer, entries, front?}, state) do + {:noreply, buffer_entries(state, entries, front?)} end - def handle_call({:buffer, entries}, _from, state) do - {:reply, :ok, buffer_entries(state, entries)} + def handle_call({:buffer, entries, front?}, _from, state) do + {:reply, :ok, buffer_entries(state, entries, front?)} end def handle_call( @@ -290,12 +291,13 @@ defmodule Indexer.BufferedTask do _from, %BufferedTask{ current_buffer: current_buffer, + current_front_buffer: current_front_buffer, bound_queue: bound_queue, max_batch_size: max_batch_size, task_ref_to_batch: task_ref_to_batch } = state ) do - count = length(current_buffer) + Enum.count(bound_queue) * max_batch_size + count = length(current_buffer) + length(current_front_buffer) + Enum.count(bound_queue) * max_batch_size {:reply, %{buffer: count, tasks: Enum.count(task_ref_to_batch)}, state} end @@ -317,6 +319,15 @@ defmodule Indexer.BufferedTask do {:reply, :ok, new_state} end + def handle_call({:push_front, entries}, _from, state) when is_list(entries) do + new_state = + state + |> push_front(entries) + |> spawn_next_batch() + + {:reply, :ok, new_state} + end + def handle_call(:shrink, _from, %__MODULE__{bound_queue: bound_queue} = state) do {reply, shrunk_state} = case BoundQueue.shrink(bound_queue) do @@ -350,9 +361,13 @@ defmodule Indexer.BufferedTask do |> push_back(new_batch || batch) end - defp buffer_entries(state, []), do: state + defp buffer_entries(state, [], _front?), do: state - defp buffer_entries(state, entries) do + defp buffer_entries(state, entries, true) do + %{state | current_front_buffer: [entries | state.current_front_buffer]} + end + + defp buffer_entries(state, entries, false) do %{state | current_buffer: [entries | state.current_buffer]} end @@ -408,9 +423,17 @@ defmodule Indexer.BufferedTask do GenServer.call(pid, {:push_back, entries}) end - defp push_back(%BufferedTask{bound_queue: bound_queue} = state, entries) when is_list(entries) do + defp push_back(%BufferedTask{} = state, entries), do: push(state, entries, false) + + defp push_front(pid, entries) when is_pid(pid) and is_list(entries) do + GenServer.call(pid, {:push_front, entries}) + end + + defp push_front(%BufferedTask{} = state, entries), do: push(state, entries, true) + + defp push(%BufferedTask{bound_queue: bound_queue} = state, entries, front?) when is_list(entries) do new_bound_queue = - case BoundQueue.push_back_until_maximum_size(bound_queue, entries) do + case push_until_maximum_size(bound_queue, entries, front?) do {new_bound_queue, []} -> new_bound_queue @@ -433,6 +456,12 @@ defmodule Indexer.BufferedTask do %BufferedTask{state | bound_queue: new_bound_queue} end + defp push_until_maximum_size(bound_queue, entries, true), + do: BoundQueue.push_front_until_maximum_size(bound_queue, entries) + + defp push_until_maximum_size(bound_queue, entries, false), + do: BoundQueue.push_back_until_maximum_size(bound_queue, entries) + defp take_batch(%BufferedTask{bound_queue: bound_queue, max_batch_size: max_batch_size} = state) do {batch, new_bound_queue} = take_batch(bound_queue, max_batch_size) {batch, %BufferedTask{state | bound_queue: new_bound_queue}} @@ -525,17 +554,19 @@ defmodule Indexer.BufferedTask do callback_module.run(batch, callback_module_state) end - defp flush(%BufferedTask{current_buffer: []} = state) do + defp flush(%BufferedTask{current_buffer: [], current_front_buffer: []} = state) do state |> spawn_next_batch() |> schedule_next() end - defp flush(%BufferedTask{current_buffer: current} = state) do - entries = List.flatten(current) + defp flush(%BufferedTask{current_buffer: buffer, current_front_buffer: front_buffer} = state) do + back_entries = List.flatten(buffer) + front_entries = List.flatten(front_buffer) - %BufferedTask{state | current_buffer: []} - |> push_back(entries) + %BufferedTask{state | current_buffer: [], current_front_buffer: []} + |> push_back(back_entries) + |> push_front(front_entries) |> flush() end end diff --git a/apps/indexer/lib/indexer/fetcher/beacon/blob.ex b/apps/indexer/lib/indexer/fetcher/beacon/blob.ex index d35a680a7d13..aab82cff9ace 100644 --- a/apps/indexer/lib/indexer/fetcher/beacon/blob.ex +++ b/apps/indexer/lib/indexer/fetcher/beacon/blob.ex @@ -25,11 +25,11 @@ defmodule Indexer.Fetcher.Beacon.Blob do @doc """ Asynchronously fetches blobs for given `block_timestamp`. """ - def async_fetch(block_timestamps) do + def async_fetch(block_timestamps, realtime?) do if BlobSupervisor.disabled?() do :ok else - BufferedTask.buffer(__MODULE__, block_timestamps |> Enum.map(&entry/1)) + BufferedTask.buffer(__MODULE__, Enum.map(block_timestamps, &entry/1), realtime?) end end diff --git a/apps/indexer/lib/indexer/fetcher/block_reward.ex b/apps/indexer/lib/indexer/fetcher/block_reward.ex index 4928cef1c811..bbe62314b44b 100644 --- a/apps/indexer/lib/indexer/fetcher/block_reward.ex +++ b/apps/indexer/lib/indexer/fetcher/block_reward.ex @@ -31,12 +31,12 @@ defmodule Indexer.Fetcher.BlockReward do @doc """ Asynchronously fetches block rewards for each `t:Explorer.Chain.Explorer.block_number/0`` in `block_numbers`. """ - @spec async_fetch([Block.block_number()]) :: :ok - def async_fetch(block_numbers) when is_list(block_numbers) do + @spec async_fetch([Block.block_number()], boolean()) :: :ok + def async_fetch(block_numbers, realtime?) when is_list(block_numbers) do if BlockRewardSupervisor.disabled?() do :ok else - BufferedTask.buffer(__MODULE__, block_numbers) + BufferedTask.buffer(__MODULE__, block_numbers, realtime?) end end diff --git a/apps/indexer/lib/indexer/fetcher/coin_balance/catchup.ex b/apps/indexer/lib/indexer/fetcher/coin_balance/catchup.ex index 48369138963a..8487c7b0d187 100644 --- a/apps/indexer/lib/indexer/fetcher/coin_balance/catchup.ex +++ b/apps/indexer/lib/indexer/fetcher/coin_balance/catchup.ex @@ -30,7 +30,7 @@ defmodule Indexer.Fetcher.CoinBalance.Catchup do else entries = Enum.map(balance_fields, &Helper.entry/1) - BufferedTask.buffer(__MODULE__, entries) + BufferedTask.buffer(__MODULE__, entries, false) end end diff --git a/apps/indexer/lib/indexer/fetcher/coin_balance/realtime.ex b/apps/indexer/lib/indexer/fetcher/coin_balance/realtime.ex index d8c8feb71bf1..ebd055e2f150 100644 --- a/apps/indexer/lib/indexer/fetcher/coin_balance/realtime.ex +++ b/apps/indexer/lib/indexer/fetcher/coin_balance/realtime.ex @@ -24,7 +24,7 @@ defmodule Indexer.Fetcher.CoinBalance.Realtime do def async_fetch_balances(balance_fields) when is_list(balance_fields) do entries = Enum.map(balance_fields, &Helper.entry/1) - BufferedTask.buffer(__MODULE__, entries) + BufferedTask.buffer(__MODULE__, entries, true) end def child_spec(params) do diff --git a/apps/indexer/lib/indexer/fetcher/contract_code.ex b/apps/indexer/lib/indexer/fetcher/contract_code.ex index 891b52676fe9..bd2809da52e0 100644 --- a/apps/indexer/lib/indexer/fetcher/contract_code.ex +++ b/apps/indexer/lib/indexer/fetcher/contract_code.ex @@ -29,11 +29,12 @@ defmodule Indexer.Fetcher.ContractCode do metadata: [fetcher: :code] ] - @spec async_fetch([%{required(:block_number) => Block.block_number(), required(:hash) => Hash.Full.t()}]) :: :ok - def async_fetch(transactions_fields, timeout \\ 5000) when is_list(transactions_fields) do + @spec async_fetch([%{required(:block_number) => Block.block_number(), required(:hash) => Hash.Full.t()}], boolean()) :: + :ok + def async_fetch(transactions_fields, realtime?, timeout \\ 5000) when is_list(transactions_fields) do entries = Enum.map(transactions_fields, &entry/1) - BufferedTask.buffer(__MODULE__, entries, timeout) + BufferedTask.buffer(__MODULE__, entries, realtime?, timeout) end @doc false diff --git a/apps/indexer/lib/indexer/fetcher/internal_transaction.ex b/apps/indexer/lib/indexer/fetcher/internal_transaction.ex index 6562d6dc8411..f847af2ebc86 100644 --- a/apps/indexer/lib/indexer/fetcher/internal_transaction.ex +++ b/apps/indexer/lib/indexer/fetcher/internal_transaction.ex @@ -41,12 +41,12 @@ defmodule Indexer.Fetcher.InternalTransaction do *Note*: The internal transactions for individual transactions cannot be paginated, so the total number of internal transactions that could be produced is unknown. """ - @spec async_fetch([Block.block_number()]) :: :ok - def async_fetch(block_numbers, timeout \\ 5000) when is_list(block_numbers) do + @spec async_fetch([Block.block_number()], boolean()) :: :ok + def async_fetch(block_numbers, realtime?, timeout \\ 5000) when is_list(block_numbers) do if InternalTransactionSupervisor.disabled?() do :ok else - BufferedTask.buffer(__MODULE__, block_numbers, timeout) + BufferedTask.buffer(__MODULE__, block_numbers, realtime?, timeout) end end diff --git a/apps/indexer/lib/indexer/fetcher/pending_block_operations_sanitizer.ex b/apps/indexer/lib/indexer/fetcher/pending_block_operations_sanitizer.ex index ead377e20702..61f023bf8a94 100644 --- a/apps/indexer/lib/indexer/fetcher/pending_block_operations_sanitizer.ex +++ b/apps/indexer/lib/indexer/fetcher/pending_block_operations_sanitizer.ex @@ -60,7 +60,7 @@ defmodule Indexer.Fetcher.PendingBlockOperationsSanitizer do |> update([pbo, po, b], set: [block_number: b.number]) |> Repo.update_all([], timeout: @timeout) - InternalTransaction.async_fetch(block_numbers) + InternalTransaction.async_fetch(block_numbers, false) block_numbers end diff --git a/apps/indexer/lib/indexer/fetcher/polygon_zkevm/bridge_l1_tokens.ex b/apps/indexer/lib/indexer/fetcher/polygon_zkevm/bridge_l1_tokens.ex index 034298174f78..c208b9f6c7b0 100644 --- a/apps/indexer/lib/indexer/fetcher/polygon_zkevm/bridge_l1_tokens.ex +++ b/apps/indexer/lib/indexer/fetcher/polygon_zkevm/bridge_l1_tokens.ex @@ -63,7 +63,7 @@ defmodule Indexer.Fetcher.PolygonZkevm.BridgeL1Tokens do |> Enum.map(fn operation -> operation.l1_token_address end) |> Enum.uniq() - BufferedTask.buffer(__MODULE__, l1_token_addresses) + BufferedTask.buffer(__MODULE__, l1_token_addresses, true) end defp defaults do diff --git a/apps/indexer/lib/indexer/fetcher/replaced_transaction.ex b/apps/indexer/lib/indexer/fetcher/replaced_transaction.ex index 88eba340c7c5..152e719340d6 100644 --- a/apps/indexer/lib/indexer/fetcher/replaced_transaction.ex +++ b/apps/indexer/lib/indexer/fetcher/replaced_transaction.ex @@ -25,19 +25,22 @@ defmodule Indexer.Fetcher.ReplacedTransaction do metadata: [fetcher: :replaced_transaction] ] - @spec async_fetch([ - %{ - required(:nonce) => non_neg_integer, - required(:from_address_hash) => Hash.Address.t(), - required(:block_hash) => Hash.Full.t() - } - ]) :: :ok - def async_fetch(transactions_fields, timeout \\ 5000) when is_list(transactions_fields) do + @spec async_fetch( + [ + %{ + required(:nonce) => non_neg_integer, + required(:from_address_hash) => Hash.Address.t(), + required(:block_hash) => Hash.Full.t() + } + ], + boolean() + ) :: :ok + def async_fetch(transactions_fields, realtime?, timeout \\ 5000) when is_list(transactions_fields) do if ReplacedTransactionSupervisor.disabled?() do :ok else entries = Enum.map(transactions_fields, &entry/1) - BufferedTask.buffer(__MODULE__, entries, timeout) + BufferedTask.buffer(__MODULE__, entries, realtime?, timeout) end end diff --git a/apps/indexer/lib/indexer/fetcher/token.ex b/apps/indexer/lib/indexer/fetcher/token.ex index cf92773a5aff..8af89c559617 100644 --- a/apps/indexer/lib/indexer/fetcher/token.ex +++ b/apps/indexer/lib/indexer/fetcher/token.ex @@ -60,9 +60,9 @@ defmodule Indexer.Fetcher.Token do @doc """ Fetches token data asynchronously given a list of `t:Explorer.Chain.Token.t/0`s. """ - @spec async_fetch([Address.t()]) :: :ok - def async_fetch(token_contract_addresses) do - BufferedTask.buffer(__MODULE__, token_contract_addresses) + @spec async_fetch([Address.t()], boolean()) :: :ok + def async_fetch(token_contract_addresses, realtime?) do + BufferedTask.buffer(__MODULE__, token_contract_addresses, realtime?) end defp catalog_token(%Token{contract_address_hash: contract_address_hash} = token) do diff --git a/apps/indexer/lib/indexer/fetcher/token_balance.ex b/apps/indexer/lib/indexer/fetcher/token_balance.ex index be3bfafb2eb9..643ed9fdc00a 100644 --- a/apps/indexer/lib/indexer/fetcher/token_balance.ex +++ b/apps/indexer/lib/indexer/fetcher/token_balance.ex @@ -34,22 +34,25 @@ defmodule Indexer.Fetcher.TokenBalance do @max_retries 3 - @spec async_fetch([ - %{ - token_contract_address_hash: Hash.Address.t(), - address_hash: Hash.Address.t(), - block_number: non_neg_integer(), - token_type: String.t(), - token_id: non_neg_integer() - } - ]) :: :ok - def async_fetch(token_balances) do + @spec async_fetch( + [ + %{ + token_contract_address_hash: Hash.Address.t(), + address_hash: Hash.Address.t(), + block_number: non_neg_integer(), + token_type: String.t(), + token_id: non_neg_integer() + } + ], + boolean() + ) :: :ok + def async_fetch(token_balances, realtime?) do if TokenBalanceSupervisor.disabled?() do :ok else formatted_params = Enum.map(token_balances, &entry/1) - BufferedTask.buffer(__MODULE__, formatted_params, :infinity) + BufferedTask.buffer(__MODULE__, formatted_params, realtime?, :infinity) end end diff --git a/apps/indexer/lib/indexer/fetcher/token_instance/realtime.ex b/apps/indexer/lib/indexer/fetcher/token_instance/realtime.ex index 3c5eabc0b400..57b095458148 100644 --- a/apps/indexer/lib/indexer/fetcher/token_instance/realtime.ex +++ b/apps/indexer/lib/indexer/fetcher/token_instance/realtime.ex @@ -74,11 +74,11 @@ defmodule Indexer.Fetcher.TokenInstance.Realtime do |> List.flatten() |> Enum.uniq() - BufferedTask.buffer(__MODULE__, data) + BufferedTask.buffer(__MODULE__, data, true) end def async_fetch(data, _disabled?) do - BufferedTask.buffer(__MODULE__, data) + BufferedTask.buffer(__MODULE__, data, true) end @spec retry_some_instances([map()], boolean(), map()) :: any() @@ -105,7 +105,7 @@ defmodule Indexer.Fetcher.TokenInstance.Realtime do if token_instances_to_refetch != [] do timeout = Application.get_env(:indexer, Indexer.Fetcher.TokenInstance.Realtime)[:retry_timeout] - Process.send_after(__MODULE__, {:buffer, token_instances_to_refetch}, timeout) + Process.send_after(__MODULE__, {:buffer, token_instances_to_refetch, false}, timeout) end end diff --git a/apps/indexer/lib/indexer/fetcher/uncle_block.ex b/apps/indexer/lib/indexer/fetcher/uncle_block.ex index 564d428f7519..1b65dc2966fb 100644 --- a/apps/indexer/lib/indexer/fetcher/uncle_block.ex +++ b/apps/indexer/lib/indexer/fetcher/uncle_block.ex @@ -34,13 +34,16 @@ defmodule Indexer.Fetcher.UncleBlock do Asynchronously fetches `t:Explorer.Chain.Block.t/0` for the given `nephew_hash` and `index` and updates `t:Explorer.Chain.Block.SecondDegreeRelation.t/0` `block_fetched_at`. """ - @spec async_fetch_blocks([%{required(:nephew_hash) => Hash.Full.t(), required(:index) => non_neg_integer()}]) :: :ok - def async_fetch_blocks(relations) when is_list(relations) do + @spec async_fetch_blocks( + [%{required(:nephew_hash) => Hash.Full.t(), required(:index) => non_neg_integer()}], + boolean() + ) :: :ok + def async_fetch_blocks(relations, realtime? \\ false) when is_list(relations) do if UncleBlockSupervisor.disabled?() do :ok else entries = Enum.map(relations, &entry/1) - BufferedTask.buffer(__MODULE__, entries) + BufferedTask.buffer(__MODULE__, entries, realtime?) end end diff --git a/apps/indexer/lib/indexer/token_balances.ex b/apps/indexer/lib/indexer/token_balances.ex index da6ecb56e36d..5348bd23826b 100644 --- a/apps/indexer/lib/indexer/token_balances.ex +++ b/apps/indexer/lib/indexer/token_balances.ex @@ -137,7 +137,7 @@ defmodule Indexer.TokenBalances do block_number: token_balance.block_number }) end) - |> TokenBalance.async_fetch() + |> TokenBalance.async_fetch(false) end defp ignore_request_with_errors(%{value: nil, value_fetched_at: nil, error: _error}), do: false diff --git a/apps/indexer/test/indexer/block/catchup/fetcher_test.exs b/apps/indexer/test/indexer/block/catchup/fetcher_test.exs index dc3d820e4b79..fcec1ef1998e 100644 --- a/apps/indexer/test/indexer/block/catchup/fetcher_test.exs +++ b/apps/indexer/test/indexer/block/catchup/fetcher_test.exs @@ -59,7 +59,7 @@ defmodule Indexer.Block.Catchup.FetcherTest do pid = spawn_link(fn -> receive do - {:"$gen_call", from, {:buffer, uncles}} -> + {:"$gen_call", from, {:buffer, uncles, _front?}} -> GenServer.reply(from, :ok) send(parent, {:uncles, uncles}) end @@ -434,7 +434,7 @@ defmodule Indexer.Block.Catchup.FetcherTest do pid = spawn_link(fn -> receive do - {:"$gen_call", from, {:buffer, block_numbers}} -> + {:"$gen_call", from, {:buffer, block_numbers, _front?}} -> GenServer.reply(from, :ok) send(parent, {:block_numbers, block_numbers}) end @@ -584,7 +584,7 @@ defmodule Indexer.Block.Catchup.FetcherTest do pid = spawn_link(fn -> receive do - {:"$gen_call", from, {:buffer, block_numbers}} -> + {:"$gen_call", from, {:buffer, block_numbers, _front?}} -> GenServer.reply(from, :ok) send(parent, {:block_numbers, block_numbers}) end diff --git a/apps/indexer/test/indexer/buffered_task_test.exs b/apps/indexer/test/indexer/buffered_task_test.exs index 2d41b5d7a070..5c996cbf1297 100644 --- a/apps/indexer/test/indexer/buffered_task_test.exs +++ b/apps/indexer/test/indexer/buffered_task_test.exs @@ -74,13 +74,13 @@ defmodule Indexer.BufferedTaskTest do refute_receive _ - BufferedTask.buffer(buffer, ~w(12 13 14 15 16)) + BufferedTask.buffer(buffer, ~w(12 13 14 15 16), false) assert_receive {:run, ~w(12 13)}, @assert_receive_timeout assert_receive {:run, ~w(14 15)}, @assert_receive_timeout assert_receive {:run, ~w(16)}, @assert_receive_timeout refute_receive _ - BufferedTask.buffer(buffer, ~w(17)) + BufferedTask.buffer(buffer, ~w(17), false) assert_receive {:run, ~w(17)}, @assert_receive_timeout refute_receive _ end @@ -90,7 +90,7 @@ defmodule Indexer.BufferedTaskTest do {:ok, buffer} = start_buffer(EmptyTask) refute_receive _ - BufferedTask.buffer(buffer, ~w(some more entries)) + BufferedTask.buffer(buffer, ~w(some more entries), false) assert_receive {:run, ~w(some more)}, @assert_receive_timeout assert_receive {:run, ~w(entries)}, @assert_receive_timeout @@ -113,7 +113,7 @@ defmodule Indexer.BufferedTaskTest do Process.register(self(), RetryableTask) {:ok, buffer} = start_buffer(RetryableTask) - BufferedTask.buffer(buffer, [:boom]) + BufferedTask.buffer(buffer, [:boom], false) assert_receive {:run, {0, [:boom]}}, @assert_receive_timeout assert_receive {:run, {1, [:boom]}}, @assert_receive_timeout refute_receive _ @@ -150,7 +150,7 @@ defmodule Indexer.BufferedTaskTest do Process.register(self(), RetryableTask) {:ok, buffer} = start_buffer(RetryableTask) - BufferedTask.buffer(buffer, [1, 2, 3]) + BufferedTask.buffer(buffer, [1, 2, 3], false) assert_receive {:run, {0, [1, 2]}}, @assert_receive_timeout assert_receive {:run, {0, [3]}}, @assert_receive_timeout assert_receive {:run, {1, [1, 2]}}, @assert_receive_timeout @@ -172,9 +172,9 @@ defmodule Indexer.BufferedTaskTest do assert %{buffer: 0, tasks: 0} = BufferedTask.debug_count(buffer) - BufferedTask.buffer(buffer, [{:sleep, 1_000}]) - BufferedTask.buffer(buffer, [{:sleep, 1_000}]) - BufferedTask.buffer(buffer, [{:sleep, 1_000}]) + BufferedTask.buffer(buffer, [{:sleep, 1_000}], false) + BufferedTask.buffer(buffer, [{:sleep, 1_000}], false) + BufferedTask.buffer(buffer, [{:sleep, 1_000}], false) Process.sleep(200) assert %{buffer: buffer, tasks: tasks} = BufferedTask.debug_count(buffer) diff --git a/apps/indexer/test/indexer/fetcher/beacon/blob_test.exs b/apps/indexer/test/indexer/fetcher/beacon/blob_test.exs index b744b1423aee..c0816d017831 100644 --- a/apps/indexer/test/indexer/fetcher/beacon/blob_test.exs +++ b/apps/indexer/test/indexer/fetcher/beacon/blob_test.exs @@ -148,7 +148,7 @@ defmodule Indexer.Fetcher.Beacon.BlobTest do BlobSupervisor.Case.start_supervised!() - assert :ok = Indexer.Fetcher.Beacon.Blob.async_fetch([block_a.timestamp]) + assert :ok = Indexer.Fetcher.Beacon.Blob.async_fetch([block_a.timestamp], false) wait_for_results(fn -> Repo.one!(from(blob in Blob, where: blob.hash == ^blob_hash_a)) diff --git a/apps/indexer/test/indexer/fetcher/block_reward_test.exs b/apps/indexer/test/indexer/fetcher/block_reward_test.exs index 82bb612feed2..10b8f250dbde 100644 --- a/apps/indexer/test/indexer/fetcher/block_reward_test.exs +++ b/apps/indexer/test/indexer/fetcher/block_reward_test.exs @@ -126,7 +126,7 @@ defmodule Indexer.Fetcher.BlockRewardTest do pid = spawn_link(fn -> receive do - {:"$gen_call", from, {:buffer, balance_fields}} -> + {:"$gen_call", from, {:buffer, balance_fields, _front?}} -> GenServer.reply(from, :ok) send(parent, {:balance_fields, balance_fields}) end @@ -134,7 +134,7 @@ defmodule Indexer.Fetcher.BlockRewardTest do Process.register(pid, Indexer.Fetcher.CoinBalance.Catchup) - assert :ok = BlockReward.async_fetch([block_number]) + assert :ok = BlockReward.async_fetch([block_number], false) wait_for_tasks(BlockReward) @@ -199,7 +199,7 @@ defmodule Indexer.Fetcher.BlockRewardTest do pid = spawn_link(fn -> receive do - {:"$gen_call", from, {:buffer, balance_fields}} -> + {:"$gen_call", from, {:buffer, balance_fields, _front?}} -> GenServer.reply(from, :ok) send(parent, {:balance_fields, balance_fields}) end @@ -207,7 +207,7 @@ defmodule Indexer.Fetcher.BlockRewardTest do Process.register(pid, Indexer.Fetcher.CoinBalance.Catchup) - assert :ok = BlockReward.async_fetch([block_number]) + assert :ok = BlockReward.async_fetch([block_number], false) wait_for_tasks(BlockReward) @@ -260,7 +260,7 @@ defmodule Indexer.Fetcher.BlockRewardTest do } end) - assert :ok = BlockReward.async_fetch([block_number]) + assert :ok = BlockReward.async_fetch([block_number], false) wait_for_tasks(BlockReward) @@ -334,7 +334,7 @@ defmodule Indexer.Fetcher.BlockRewardTest do pid = spawn_link(fn -> receive do - {:"$gen_call", from, {:buffer, balance_fields}} -> + {:"$gen_call", from, {:buffer, balance_fields, _front?}} -> GenServer.reply(from, :ok) send(parent, {:balance_fields, balance_fields}) end @@ -424,7 +424,7 @@ defmodule Indexer.Fetcher.BlockRewardTest do pid = spawn_link(fn -> receive do - {:"$gen_call", from, {:buffer, balance_fields}} -> + {:"$gen_call", from, {:buffer, balance_fields, _front?}} -> GenServer.reply(from, :ok) send(parent, {:balance_fields, balance_fields}) end @@ -508,7 +508,7 @@ defmodule Indexer.Fetcher.BlockRewardTest do pid = spawn_link(fn -> receive do - {:"$gen_call", from, {:buffer, balance_fields}} -> + {:"$gen_call", from, {:buffer, balance_fields, _front?}} -> GenServer.reply(from, :ok) send(parent, {:balance_fields, balance_fields}) end @@ -645,7 +645,7 @@ defmodule Indexer.Fetcher.BlockRewardTest do pid = spawn_link(fn -> receive do - {:"$gen_call", from, {:buffer, balance_fields}} -> + {:"$gen_call", from, {:buffer, balance_fields, _front?}} -> GenServer.reply(from, :ok) send(parent, {:balance_fields, balance_fields}) end diff --git a/apps/indexer/test/indexer/fetcher/contract_code_test.exs b/apps/indexer/test/indexer/fetcher/contract_code_test.exs index a269ee997b6e..b29568ea43fa 100644 --- a/apps/indexer/test/indexer/fetcher/contract_code_test.exs +++ b/apps/indexer/test/indexer/fetcher/contract_code_test.exs @@ -86,9 +86,12 @@ defmodule Indexer.Fetcher.ContractCodeTest do ContractCode.Supervisor.Case.start_supervised!(json_rpc_named_arguments: json_rpc_named_arguments) assert :ok = - ContractCode.async_fetch([ - %{created_contract_address_hash: address, block_number: block_number, hash: hash} - ]) + ContractCode.async_fetch( + [ + %{created_contract_address_hash: address, block_number: block_number, hash: hash} + ], + false + ) fetched_address = wait(fn -> diff --git a/apps/indexer/test/indexer/fetcher/replaced_transaction_test.exs b/apps/indexer/test/indexer/fetcher/replaced_transaction_test.exs index ab3da8d1e6af..ea4ac2d2194c 100644 --- a/apps/indexer/test/indexer/fetcher/replaced_transaction_test.exs +++ b/apps/indexer/test/indexer/fetcher/replaced_transaction_test.exs @@ -71,13 +71,16 @@ defmodule Indexer.Fetcher.ReplacedTransactionTest do ReplacedTransaction.Supervisor.Case.start_supervised!() assert :ok = - ReplacedTransaction.async_fetch([ - %{ - block_hash: mined_transaction.block_hash, - nonce: mined_transaction.nonce, - from_address_hash: mined_transaction.from_address_hash - } - ]) + ReplacedTransaction.async_fetch( + [ + %{ + block_hash: mined_transaction.block_hash, + nonce: mined_transaction.nonce, + from_address_hash: mined_transaction.from_address_hash + } + ], + false + ) found_replaced_transaction = wait(fn -> From e29f3e2a527acd144c113796f8ecd0bd46035187 Mon Sep 17 00:00:00 2001 From: Victor Baranov Date: Fri, 7 Jun 2024 15:01:54 +0300 Subject: [PATCH 071/150] feat: Get ERC-1155 token name from contractURI getter fallback (#10187) * Get ERC-1155 token name from contractURI getter fallback * Process review comments * Change doc and spec for stream_cataloged_tokens function --- apps/explorer/lib/explorer/chain.ex | 8 +- apps/explorer/lib/explorer/chain/token.ex | 1 - .../lib/explorer/token/metadata_retriever.ex | 504 ++++++++++++++++-- .../test/explorer/chain/token_test.exs | 8 +- apps/explorer/test/explorer/chain_test.exs | 16 +- .../token/metadata_retriever_test.exs | 495 ++++++++++++++++- apps/indexer/lib/indexer/fetcher/token.ex | 6 +- .../indexer/fetcher/token_instance/helper.ex | 2 +- .../token_instance/metadata_retriever.ex | 366 ------------- .../lib/indexer/fetcher/token_updater.ex | 3 +- .../metadata_retriever_test.exs | 485 ----------------- 11 files changed, 983 insertions(+), 911 deletions(-) delete mode 100644 apps/indexer/lib/indexer/fetcher/token_instance/metadata_retriever.ex delete mode 100644 apps/indexer/test/indexer/fetcher/token_instance/metadata_retriever_test.exs diff --git a/apps/explorer/lib/explorer/chain.ex b/apps/explorer/lib/explorer/chain.ex index 8f283b4d792e..80bb57d31c2a 100644 --- a/apps/explorer/lib/explorer/chain.ex +++ b/apps/explorer/lib/explorer/chain.ex @@ -3722,16 +3722,16 @@ defmodule Explorer.Chain do end @doc """ - Streams a list of token contract addresses that have been cataloged. + Streams a list of tokens that have been cataloged. """ - @spec stream_cataloged_token_contract_address_hashes( + @spec stream_cataloged_tokens( initial :: accumulator, - reducer :: (entry :: Hash.Address.t(), accumulator -> accumulator), + reducer :: (entry :: Token.t(), accumulator -> accumulator), some_time_ago_updated :: integer(), limited? :: boolean() ) :: {:ok, accumulator} when accumulator: term() - def stream_cataloged_token_contract_address_hashes(initial, reducer, some_time_ago_updated \\ 2880, limited? \\ false) + def stream_cataloged_tokens(initial, reducer, some_time_ago_updated \\ 2880, limited? \\ false) when is_function(reducer, 2) do some_time_ago_updated |> Token.cataloged_tokens() diff --git a/apps/explorer/lib/explorer/chain/token.ex b/apps/explorer/lib/explorer/chain/token.ex index 9ac2db994bc1..d5d8838d06df 100644 --- a/apps/explorer/lib/explorer/chain/token.ex +++ b/apps/explorer/lib/explorer/chain/token.ex @@ -177,7 +177,6 @@ defmodule Explorer.Chain.Token do from( token in __MODULE__, - select: token.contract_address_hash, where: token.cataloged == true and token.updated_at <= ^some_time_ago_date ) end diff --git a/apps/explorer/lib/explorer/token/metadata_retriever.ex b/apps/explorer/lib/explorer/token/metadata_retriever.ex index 2ce09b959327..b8ab4afaa510 100644 --- a/apps/explorer/lib/explorer/token/metadata_retriever.ex +++ b/apps/explorer/lib/explorer/token/metadata_retriever.ex @@ -7,7 +7,21 @@ defmodule Explorer.Token.MetadataRetriever do alias Explorer.{Chain, Repo} alias Explorer.Chain.{Hash, Token} + alias Explorer.Helper, as: ExplorerHelper alias Explorer.SmartContract.Reader + alias HTTPoison.{Error, Response} + + @no_uri_error "no uri" + @vm_execution_error "VM execution error" + @ipfs_protocol "ipfs://" + @invalid_base64_data "invalid data:application/json;base64" + + # https://eips.ethereum.org/EIPS/eip-1155#metadata + @erc1155_token_id_placeholder "{id}" + + @max_error_length 255 + + @ignored_hosts ["localhost", "127.0.0.1", "0.0.0.0", "", nil] @contract_abi [ %{ @@ -84,33 +98,61 @@ defmodule Explorer.Token.MetadataRetriever do ], "payable" => false, "type" => "function" + }, + %{ + "name" => "contractURI", + "type" => "function", + "inputs" => [], + "outputs" => [ + %{ + "name" => "", + "type" => "string", + "internalType" => "string" + } + ], + "stateMutability" => "view" } ] - # 18160ddd = keccak256(totalSupply()) # 313ce567 = keccak256(decimals()) + @decimals_signature "313ce567" # 06fdde03 = keccak256(name()) + @name_signature "06fdde03" # 95d89b41 = keccak256(symbol()) + @symbol_signature "95d89b41" + # 18160ddd = keccak256(totalSupply()) + @total_supply_signature "18160ddd" @contract_functions %{ - "18160ddd" => [], - "313ce567" => [], - "06fdde03" => [], - "95d89b41" => [] + @decimals_signature => [], + @name_signature => [], + @symbol_signature => [], + @total_supply_signature => [] + } + + # e8a3d485 = keccak256(contractURI()) + @erc1155_contract_uri_signature "e8a3d485" + @erc1155_contract_uri_function %{ + @erc1155_contract_uri_signature => [] } - # 18160ddd = keccak256(totalSupply()) @total_supply_function %{ - "18160ddd" => [] + @total_supply_signature => [] } @doc """ - Read functions below in the Smart Contract given the Contract's address hash. + Read functions below in the token's smart contract given the contract's address hash. * totalSupply * decimals * name * symbol + if a token is of ERC-1155 type: + + * contractURI + + is added. + This function will return a map with functions that were read in the Smart Contract, for instance: * Given that all functions were read: @@ -130,17 +172,19 @@ defmodule Explorer.Token.MetadataRetriever do It will retry to fetch each function in the Smart Contract according to :token_functions_reader_max_retries configured in the application env case one of them raised error. """ - @spec get_functions_of([String.t()] | Hash.t() | String.t()) :: map() | {:ok, [map()]} - def get_functions_of(hashes) when is_list(hashes) do + @spec get_functions_of([Token.t()] | Token.t()) :: map() | {:ok, [map()]} + def get_functions_of(tokens) when is_list(tokens) do requests = - hashes - |> Enum.flat_map(fn hash -> + tokens + |> Enum.flat_map(fn token -> @contract_functions |> Enum.map(fn {method_id, args} -> - %{contract_address: hash, method_id: method_id, args: args} + %{contract_address: token.contract_address_hash, method_id: method_id, args: args} end) end) + hashes = Enum.map(tokens, fn token -> token.contract_address_hash end) + updated_at = DateTime.utc_now() fetched_result = @@ -150,7 +194,7 @@ defmodule Explorer.Token.MetadataRetriever do |> Enum.zip(hashes) |> Enum.map(fn {result, hash} -> formatted_result = - ["name", "totalSupply", "decimals", "symbol"] + [@name_signature, @total_supply_signature, @decimals_signature, @symbol_signature] |> Enum.zip(result) |> format_contract_functions_result(hash) @@ -159,22 +203,40 @@ defmodule Explorer.Token.MetadataRetriever do |> Map.put(:updated_at, updated_at) end) - {:ok, fetched_result} - end - - def get_functions_of(%Hash{byte_count: unquote(Hash.Address.byte_count())} = address) do - address_string = Hash.to_string(address) + erc_1155_tokens = tokens |> Enum.filter(fn token -> token.type == "ERC-1155" end) + + processed_result = + if Enum.empty?(erc_1155_tokens) do + fetched_result + else + result = + fetched_result + |> Enum.filter(fn token -> + !Map.has_key?(token, :name) && + Enum.any?(erc_1155_tokens, fn erc_1155_token -> + erc_1155_token.contract_address_hash == token.contract_address_hash + end) + end) + + result + |> Enum.map(fn token -> + try_to_fetch_erc_1155_name(%{}, token.contract_address_hash, "ERC-1155") + end) + |> Enum.filter(fn result -> result != %{} end) + end - get_functions_of(address_string) + {:ok, processed_result} end - def get_functions_of(contract_address_hash) when is_binary(contract_address_hash) do - res = + def get_functions_of(%Token{contract_address_hash: contract_address_hash, type: type}) do + base_metadata = contract_address_hash |> fetch_functions_from_contract(@contract_functions) |> format_contract_functions_result(contract_address_hash) - if res == %{} do + metadata = try_to_fetch_erc_1155_name(base_metadata, contract_address_hash, type) + + if metadata == %{} do token_to_update = Token |> Repo.get_by(contract_address_hash: contract_address_hash) @@ -182,7 +244,36 @@ defmodule Explorer.Token.MetadataRetriever do set_skip_metadata(token_to_update) end - res + metadata + end + + defp try_to_fetch_erc_1155_name(base_metadata, contract_address_hash, token_type) do + if token_type == "ERC-1155" && !Map.has_key?(base_metadata, :name) do + erc_1155_name_uri = + contract_address_hash + |> fetch_functions_from_contract(@erc1155_contract_uri_function) + |> format_contract_functions_result(contract_address_hash) + + case erc_1155_name_uri do + %{:name => name} when is_binary(name) -> + uri = {:ok, [name]} + + with {:ok, %{metadata: metadata}} <- fetch_json(uri, nil, nil, false), + true <- Map.has_key?(metadata, "name"), + false <- is_nil(metadata["name"]) do + name_metadata = %{:name => metadata["name"]} + + Map.merge(base_metadata, name_metadata) + else + _ -> base_metadata + end + + _ -> + base_metadata + end + else + base_metadata + end end def set_skip_metadata(token_to_update) do @@ -267,14 +358,11 @@ defmodule Explorer.Token.MetadataRetriever do |> handle_large_strings end - defp atomized_key("decimals"), do: :decimals - defp atomized_key("name"), do: :name - defp atomized_key("symbol"), do: :symbol - defp atomized_key("totalSupply"), do: :total_supply - defp atomized_key("313ce567"), do: :decimals - defp atomized_key("06fdde03"), do: :name - defp atomized_key("95d89b41"), do: :symbol - defp atomized_key("18160ddd"), do: :total_supply + defp atomized_key(@name_signature), do: :name + defp atomized_key(@symbol_signature), do: :symbol + defp atomized_key(@decimals_signature), do: :decimals + defp atomized_key(@total_supply_signature), do: :total_supply + defp atomized_key(@erc1155_contract_uri_signature), do: :name # It's a temp fix to store tokens that have names and/or symbols with characters that the database # doesn't accept. See https://github.com/blockscout/blockscout/issues/669 for more info. @@ -315,8 +403,10 @@ defmodule Explorer.Token.MetadataRetriever do end end + @spec format_according_contract_address_hash(Hash.Address.t()) :: binary defp format_according_contract_address_hash(contract_address_hash) do - String.slice(contract_address_hash, 0, 6) + contract_address_hash_string = Hash.to_string(contract_address_hash) + String.slice(contract_address_hash_string, 0, 6) end defp handle_large_strings(%{name: name, symbol: symbol} = contract_functions) do @@ -350,4 +440,352 @@ defmodule Explorer.Token.MetadataRetriever do defp remove_null_bytes(string) do String.replace(string, "\0", "") end + + @spec ipfs_link(uid :: any()) :: String.t() + defp ipfs_link(uid) do + base_url = + :indexer + |> Application.get_env(:ipfs) + |> Keyword.get(:gateway_url) + |> String.trim_trailing("/") + + url = base_url <> "/" <> uid + + ipfs_params = Application.get_env(:indexer, :ipfs) + + if ipfs_params[:gateway_url_param_location] == :query do + gateway_url_param_key = ipfs_params[:gateway_url_param_key] + gateway_url_param_value = ipfs_params[:gateway_url_param_value] + + if gateway_url_param_key && gateway_url_param_value do + url <> "?#{gateway_url_param_key}=#{gateway_url_param_value}" + else + url + end + else + url + end + end + + @spec ipfs_headers() :: [{binary(), binary()}] + defp ipfs_headers do + ipfs_params = Application.get_env(:indexer, :ipfs) + + if ipfs_params[:gateway_url_param_location] == :header do + gateway_url_param_key = ipfs_params[:gateway_url_param_key] + gateway_url_param_value = ipfs_params[:gateway_url_param_value] + + if gateway_url_param_key && gateway_url_param_value do + [{gateway_url_param_key, gateway_url_param_value}] + else + [] + end + else + [] + end + end + + @doc """ + Fetch/parse metadata using smart-contract's response + """ + @spec fetch_json(any, binary() | nil, binary() | nil, boolean) :: + {:error, binary} | {:error_code, any} | {:ok, %{metadata: any}} + def fetch_json(uri, token_id \\ nil, hex_token_id \\ nil, from_base_uri? \\ false) + + def fetch_json({:ok, [""]}, _token_id, _hex_token_id, _from_base_uri?) do + {:error, @no_uri_error} + end + + def fetch_json(uri, token_id, hex_token_id, from_base_uri?) do + fetch_json_from_uri(uri, false, token_id, hex_token_id, from_base_uri?) + end + + defp fetch_json_from_uri(_uri, _ipfs?, _token_id, _hex_token_id, _from_base_uri?) + + defp fetch_json_from_uri({:error, error}, _ipfs?, _token_id, _hex_token_id, _from_base_uri?) do + error = to_string(error) + + if error =~ "execution reverted" or error =~ @vm_execution_error do + {:error, @vm_execution_error} + else + Logger.warn(["Unknown metadata format error #{inspect(error)}."], fetcher: :token_instances) + + # truncate error since it will be stored in DB + {:error, truncate_error(error)} + end + end + + # CIDv0 IPFS links # https://docs.ipfs.tech/concepts/content-addressing/#version-0-v0 + defp fetch_json_from_uri({:ok, ["Qm" <> _ = result]}, _, token_id, hex_token_id, from_base_uri?) do + if String.length(result) == 46 do + ipfs? = true + fetch_json_from_uri({:ok, [ipfs_link(result)]}, ipfs?, token_id, hex_token_id, from_base_uri?) + else + Logger.warn(["Unknown metadata format result #{inspect(result)}."], fetcher: :token_instances) + + {:error, truncate_error(result)} + end + end + + defp fetch_json_from_uri({:ok, ["'" <> token_uri]}, ipfs?, token_id, hex_token_id, from_base_uri?) do + token_uri = token_uri |> String.split("'") |> List.first() + fetch_metadata_inner(token_uri, ipfs?, token_id, hex_token_id, from_base_uri?) + end + + defp fetch_json_from_uri({:ok, ["http://" <> _ = token_uri]}, ipfs?, token_id, hex_token_id, from_base_uri?) do + fetch_metadata_inner(token_uri, ipfs?, token_id, hex_token_id, from_base_uri?) + end + + defp fetch_json_from_uri({:ok, ["https://" <> _ = token_uri]}, ipfs?, token_id, hex_token_id, from_base_uri?) do + fetch_metadata_inner(token_uri, ipfs?, token_id, hex_token_id, from_base_uri?) + end + + defp fetch_json_from_uri( + {:ok, [type = "data:application/json;utf8," <> json]}, + ipfs?, + token_id, + hex_token_id, + from_base_uri? + ) do + fetch_json_from_json_string(json, ipfs?, token_id, hex_token_id, from_base_uri?, type) + end + + defp fetch_json_from_uri( + {:ok, [type = "data:application/json," <> json]}, + ipfs?, + token_id, + hex_token_id, + from_base_uri? + ) do + fetch_json_from_json_string(json, ipfs?, token_id, hex_token_id, from_base_uri?, type) + end + + defp fetch_json_from_uri( + {:ok, ["data:application/json;base64," <> base64_encoded_json]}, + ipfs?, + token_id, + hex_token_id, + from_base_uri? + ) do + case Base.decode64(base64_encoded_json) do + {:ok, base64_decoded} -> + fetch_json_from_uri({:ok, [base64_decoded]}, ipfs?, token_id, hex_token_id, from_base_uri?) + + _ -> + {:error, @invalid_base64_data} + end + rescue + e -> + Logger.warn( + [ + "Unknown metadata format base64 #{inspect(base64_encoded_json)}.", + Exception.format(:error, e, __STACKTRACE__) + ], + fetcher: :token_instances + ) + + {:error, @invalid_base64_data} + end + + defp fetch_json_from_uri({:ok, ["#{@ipfs_protocol}ipfs/" <> right]}, _ipfs?, _token_id, hex_token_id, _from_base_uri?) do + fetch_from_ipfs(right, hex_token_id) + end + + defp fetch_json_from_uri({:ok, ["ipfs/" <> right]}, _ipfs?, _token_id, hex_token_id, _from_base_uri?) do + fetch_from_ipfs(right, hex_token_id) + end + + defp fetch_json_from_uri({:ok, [@ipfs_protocol <> right]}, _ipfs?, _token_id, hex_token_id, _from_base_uri?) do + fetch_from_ipfs(right, hex_token_id) + end + + defp fetch_json_from_uri({:ok, [json]}, _ipfs?, _token_id, hex_token_id, _from_base_uri?) do + json = ExplorerHelper.decode_json(json, true) + + check_type(json, hex_token_id) + rescue + e -> + Logger.warn(["Unknown metadata format #{inspect(json)}.", Exception.format(:error, e, __STACKTRACE__)], + fetcher: :token_instances + ) + + {:error, "invalid json"} + end + + defp fetch_json_from_uri(uri, _ipfs?, _token_id, _hex_token_id, _from_base_uri?) do + Logger.warn(["Unknown metadata uri format #{inspect(uri)}."], fetcher: :token_instances) + + {:error, "unknown metadata uri format"} + end + + defp fetch_json_from_json_string(json, ipfs?, token_id, hex_token_id, from_base_uri?, type) do + decoded_json = URI.decode(json) + + fetch_json_from_uri({:ok, [decoded_json]}, ipfs?, token_id, hex_token_id, from_base_uri?) + rescue + e -> + Logger.warn(["Unknown metadata format #{inspect(json)}.", Exception.format(:error, e, __STACKTRACE__)], + fetcher: :token_instances + ) + + {:error, "invalid #{type}"} + end + + defp fetch_from_ipfs(ipfs_uid, hex_token_id) do + ipfs_url = ipfs_link(ipfs_uid) + ipfs? = true + fetch_metadata_inner(ipfs_url, ipfs?, nil, hex_token_id) + end + + defp fetch_metadata_inner(uri, ipfs?, token_id, hex_token_id, from_base_uri? \\ false) + + defp fetch_metadata_inner(uri, ipfs?, token_id, hex_token_id, from_base_uri?) do + prepared_uri = substitute_token_id_to_token_uri(uri, token_id, hex_token_id, from_base_uri?) + fetch_metadata_from_uri(prepared_uri, ipfs?, hex_token_id) + rescue + e -> + Logger.warn( + ["Could not prepare token uri #{inspect(uri)}.", Exception.format(:error, e, __STACKTRACE__)], + fetcher: :token_instances + ) + + {:error, "preparation error"} + end + + def fetch_metadata_from_uri(uri, ipfs?, hex_token_id \\ nil) do + case Mix.env() != :test && URI.parse(uri) do + %URI{host: host} when host in @ignored_hosts -> + {:error, "ignored host #{host}"} + + _ -> + fetch_metadata_from_uri_request(uri, hex_token_id, ipfs?) + end + end + + defp fetch_metadata_from_uri_request(uri, hex_token_id, ipfs?) do + headers = if ipfs?, do: ipfs_headers(), else: [] + + case Application.get_env(:explorer, :http_adapter).get(uri, headers, + recv_timeout: 30_000, + follow_redirect: true, + hackney: [pool: :token_instance_fetcher] + ) do + {:ok, %Response{body: body, status_code: 200, headers: response_headers}} -> + content_type = get_content_type_from_headers(response_headers) + + check_content_type(content_type, uri, hex_token_id, body) + + {:ok, %Response{body: body, status_code: code}} -> + Logger.debug( + ["Request to token uri: #{inspect(uri)} failed with code #{code}. Body:", inspect(body)], + fetcher: :token_instances + ) + + {:error_code, code} + + {:error, %Error{reason: reason}} -> + Logger.warn( + ["Request to token uri failed: #{inspect(uri)}.", inspect(reason)], + fetcher: :token_instances + ) + + {:error, reason |> inspect() |> truncate_error()} + end + rescue + e -> + Logger.warn( + ["Could not send request to token uri #{inspect(uri)}.", Exception.format(:error, e, __STACKTRACE__)], + fetcher: :token_instances + ) + + {:error, "request error"} + end + + defp check_content_type(content_type, uri, hex_token_id, body) do + image = image?(content_type) + video = video?(content_type) + + if content_type && (image || video) do + json = if image, do: %{"image" => uri}, else: %{"animation_url" => uri} + + check_type(json, nil) + else + json = ExplorerHelper.decode_json(body, true) + + check_type(json, hex_token_id) + end + end + + defp get_content_type_from_headers(headers) do + {_, content_type} = + Enum.find(headers, fn {header_name, _header_value} -> + header_name == "Content-Type" + end) || {nil, nil} + + content_type + end + + defp image?(content_type) do + content_type && String.starts_with?(content_type, "image/") + end + + defp video?(content_type) do + content_type && String.starts_with?(content_type, "video/") + end + + defp check_type(json, nil) when is_map(json) do + {:ok, %{metadata: json}} + end + + defp check_type(json, hex_token_id) when is_map(json) do + metadata = + case json + |> Jason.encode!() + |> String.replace(@erc1155_token_id_placeholder, hex_token_id) + |> Jason.decode() do + {:ok, map} -> + map + + _ -> + json + end + + {:ok, %{metadata: metadata}} + end + + defp check_type(_, _) do + {:error, "wrong metadata type"} + end + + defp substitute_token_id_to_token_uri(base_uri, nil, _empty_token_id, true) do + base_uri + end + + defp substitute_token_id_to_token_uri(base_uri, token_id, _empty_token_id, true) do + if String.ends_with?(base_uri, "/") do + base_uri <> to_string(token_id) + else + base_uri <> "/" <> to_string(token_id) + end + end + + defp substitute_token_id_to_token_uri(token_uri, _token_id, empty_token_id, _from_base_uri?) + when empty_token_id in [nil, ""], + do: token_uri + + defp substitute_token_id_to_token_uri(token_uri, _token_id, hex_token_id, _from_base_uri?) do + String.replace(token_uri, @erc1155_token_id_placeholder, hex_token_id) + end + + @doc """ + Truncate error string to @max_error_length symbols + """ + @spec truncate_error(binary()) :: binary() + def truncate_error(error) do + if String.length(error) > @max_error_length - 2 do + String.slice(error, 0, @max_error_length - 3) <> "..." + else + error + end + end end diff --git a/apps/explorer/test/explorer/chain/token_test.exs b/apps/explorer/test/explorer/chain/token_test.exs index 42eb7fecad49..d58a7912c2e4 100644 --- a/apps/explorer/test/explorer/chain/token_test.exs +++ b/apps/explorer/test/explorer/chain/token_test.exs @@ -13,7 +13,9 @@ defmodule Explorer.Chain.TokenTest do token = insert(:token, cataloged: true, updated_at: hours_ago_date) insert(:token, cataloged: false) - assert Repo.all(Token.cataloged_tokens()) == [token.contract_address_hash] + [token_from_db] = Repo.all(Token.cataloged_tokens()) + + assert token_from_db.contract_address_hash == token.contract_address_hash end test "filter tokens by updated_at field" do @@ -23,7 +25,9 @@ defmodule Explorer.Chain.TokenTest do token = insert(:token, cataloged: true, updated_at: hours_ago_date) insert(:token, cataloged: true) - assert Repo.all(Token.cataloged_tokens()) == [token.contract_address_hash] + [token_from_db] = Repo.all(Token.cataloged_tokens()) + + assert token_from_db.contract_address_hash == token.contract_address_hash end end end diff --git a/apps/explorer/test/explorer/chain_test.exs b/apps/explorer/test/explorer/chain_test.exs index 2b71b6ef9ec8..eca2a97a09ac 100644 --- a/apps/explorer/test/explorer/chain_test.exs +++ b/apps/explorer/test/explorer/chain_test.exs @@ -3491,13 +3491,14 @@ defmodule Explorer.ChainTest do assert Chain.stream_uncataloged_token_contract_address_hashes([], &[&1 | &2]) == {:ok, [uncatalog_address]} end - describe "stream_cataloged_token_contract_address_hashes/2" do + describe "stream_cataloged_tokens/2" do test "reduces with given reducer and accumulator" do today = DateTime.utc_now() yesterday = Timex.shift(today, days: -1) - %Token{contract_address_hash: catalog_address} = insert(:token, cataloged: true, updated_at: yesterday) + token = insert(:token, cataloged: true, updated_at: yesterday) insert(:token, cataloged: false) - assert Chain.stream_cataloged_token_contract_address_hashes([], &[&1 | &2], 1) == {:ok, [catalog_address]} + {:ok, [token_from_func]} = Chain.stream_cataloged_tokens([], &[&1 | &2], 1) + assert token_from_func.contract_address_hash == token.contract_address_hash end test "sorts the tokens by updated_at in ascending order" do @@ -3513,7 +3514,14 @@ defmodule Explorer.ChainTest do |> Enum.sort(&(Timex.to_unix(&1.updated_at) < Timex.to_unix(&2.updated_at))) |> Enum.map(& &1.contract_address_hash) - assert Chain.stream_cataloged_token_contract_address_hashes([], &(&2 ++ [&1]), 12) == {:ok, expected_response} + {:ok, response} = Chain.stream_cataloged_tokens([], &(&2 ++ [&1]), 12) + + formatted_response = + response + |> Enum.sort(&(Timex.to_unix(&1.updated_at) < Timex.to_unix(&2.updated_at))) + |> Enum.map(& &1.contract_address_hash) + + assert formatted_response == expected_response end end diff --git a/apps/explorer/test/explorer/token/metadata_retriever_test.exs b/apps/explorer/test/explorer/token/metadata_retriever_test.exs index 404ae713bb9f..97d59bd1dd26 100644 --- a/apps/explorer/test/explorer/token/metadata_retriever_test.exs +++ b/apps/explorer/test/explorer/token/metadata_retriever_test.exs @@ -3,6 +3,7 @@ defmodule Explorer.Token.MetadataRetrieverTest do use Explorer.DataCase alias Explorer.Token.MetadataRetriever + alias Plug.Conn import Mox @@ -56,7 +57,7 @@ defmodule Explorer.Token.MetadataRetrieverTest do decimals: 18 } - assert MetadataRetriever.get_functions_of(token.contract_address_hash) == expected + assert MetadataRetriever.get_functions_of(token) == expected end test "returns results for multiple coins" do @@ -112,7 +113,7 @@ defmodule Explorer.Token.MetadataRetrieverTest do total_supply: 1_000_000_000_000_000_000, decimals: 18 } - ]} = MetadataRetriever.get_functions_of([token.contract_address_hash, token.contract_address_hash]) + ]} = MetadataRetriever.get_functions_of([token, token]) end test "returns only the functions that were read without error" do @@ -184,7 +185,7 @@ defmodule Explorer.Token.MetadataRetrieverTest do total_supply: 1_000_000_000_000_000_000 } - assert MetadataRetriever.get_functions_of(token.contract_address_hash) == expected + assert MetadataRetriever.get_functions_of(token) == expected end test "considers the contract address formatted hash when it is an invalid string" do @@ -234,7 +235,7 @@ defmodule Explorer.Token.MetadataRetrieverTest do symbol: "BNT" } - assert MetadataRetriever.get_functions_of(token.contract_address_hash) == expected + assert MetadataRetriever.get_functions_of(token) == expected end test "considers the symbol nil when it is an invalid string" do @@ -283,7 +284,7 @@ defmodule Explorer.Token.MetadataRetrieverTest do symbol: nil } - assert MetadataRetriever.get_functions_of(token.contract_address_hash) == expected + assert MetadataRetriever.get_functions_of(token) == expected Application.put_env(:explorer, :token_functions_reader_max_retries, original) end @@ -337,7 +338,7 @@ defmodule Explorer.Token.MetadataRetrieverTest do symbol: "BNT" } - assert MetadataRetriever.get_functions_of(token.contract_address_hash) == expected + assert MetadataRetriever.get_functions_of(token) == expected end test "shortens strings larger than 255 characters with unicode graphemes" do @@ -389,7 +390,7 @@ defmodule Explorer.Token.MetadataRetrieverTest do symbol: "BNT" } - assert MetadataRetriever.get_functions_of(token.contract_address_hash) == expected + assert MetadataRetriever.get_functions_of(token) == expected end test "retries when some function gave error" do @@ -455,7 +456,7 @@ defmodule Explorer.Token.MetadataRetrieverTest do decimals: 18 } - assert MetadataRetriever.get_functions_of(token.contract_address_hash) == expected + assert MetadataRetriever.get_functions_of(token) == expected end test "retries according to the configured number" do @@ -526,7 +527,7 @@ defmodule Explorer.Token.MetadataRetrieverTest do end ) - assert MetadataRetriever.get_functions_of(token.contract_address_hash) == %{ + assert MetadataRetriever.get_functions_of(token) == %{ name: "Bancor", total_supply: 1_000_000_000_000_000_000 } @@ -579,6 +580,480 @@ defmodule Explorer.Token.MetadataRetrieverTest do total_supply: 1_000_000_000_000_000_000_000_000 } - assert MetadataRetriever.get_functions_of(token.contract_address_hash) == expected + assert MetadataRetriever.get_functions_of(token) == expected + end + + describe "fetch_json/4" do + setup do + bypass = Bypass.open() + + {:ok, bypass: bypass} + end + + test "returns {:error, @no_uri_error} when empty uri is passed" do + error = {:error, "no uri"} + token_id = "TOKEN_ID" + hex_token_id = "HEX_TOKEN_ID" + from_base_uri = true + + result = MetadataRetriever.fetch_json({:ok, [""]}, token_id, hex_token_id, from_base_uri) + + assert result == error + end + + test "returns {:error, @vm_execution_error} when 'execution reverted' error passed in uri" do + uri_error = {:error, "something happened: execution reverted"} + token_id = "TOKEN_ID" + hex_token_id = "HEX_TOKEN_ID" + from_base_uri = true + result_error = {:error, "VM execution error"} + + result = MetadataRetriever.fetch_json(uri_error, token_id, hex_token_id, from_base_uri) + + assert result == result_error + end + + test "returns {:error, @vm_execution_error} when 'VM execution error' error passed in uri" do + error = {:error, "VM execution error"} + token_id = "TOKEN_ID" + hex_token_id = "HEX_TOKEN_ID" + from_base_uri = true + + result = MetadataRetriever.fetch_json(error, token_id, hex_token_id, from_base_uri) + + assert result == error + end + + test "returns {:error, error} when all other errors passed in uri" do + error = {:error, "Some error"} + token_id = "TOKEN_ID" + hex_token_id = "HEX_TOKEN_ID" + from_base_uri = true + + result = MetadataRetriever.fetch_json(error, token_id, hex_token_id, from_base_uri) + + assert result == error + end + + test "returns {:error, truncated_error} when long error passed in uri" do + error = + {:error, + "ERROR: Unable to establish a connection to the database server. The database server may be offline, or there could be a network issue preventing access. Please ensure that the database server is running and that the network configuration is correct. Additionally, check the database credentials and permissions to ensure they are valid. If the issue persists, contact your system administrator for further assistance. Error code: DB_CONN_FAILED_101234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890"} + + token_id = "TOKEN_ID" + hex_token_id = "HEX_TOKEN_ID" + from_base_uri = true + + truncated_error = + {:error, + "ERROR: Unable to establish a connection to the database server. The database server may be offline, or there could be a network issue preventing access. Please ensure that the database server is running and that the network configuration is correct. Ad..."} + + result = MetadataRetriever.fetch_json(error, token_id, hex_token_id, from_base_uri) + + assert result == truncated_error + end + + test "Constructs IPFS link with query param" do + configuration = Application.get_env(:indexer, :ipfs) + + Application.put_env(:indexer, :ipfs, + gateway_url: Keyword.get(configuration, :gateway_url), + gateway_url_param_location: :query, + gateway_url_param_key: "x-apikey", + gateway_url_param_value: "mykey" + ) + + data = "QmT1Yz43R1PLn2RVovAnEM5dHQEvpTcnwgX8zftvY1FcjP" + + result = %{ + "name" => "asda", + "description" => "asda", + "salePrice" => 34, + "img_hash" => "QmUfW3PVnh9GGuHcQgc3ZeNEbhwp5HE8rS5ac9MDWWQebz", + "collectionId" => "1871_1665123820823" + } + + Application.put_env(:explorer, :http_adapter, Explorer.Mox.HTTPoison) + + Explorer.Mox.HTTPoison + |> expect(:get, fn "https://ipfs.io/ipfs/QmT1Yz43R1PLn2RVovAnEM5dHQEvpTcnwgX8zftvY1FcjP?x-apikey=mykey", + _headers, + _options -> + {:ok, %HTTPoison.Response{status_code: 200, body: Jason.encode!(result)}} + end) + + assert {:ok, + %{ + metadata: %{ + "collectionId" => "1871_1665123820823", + "description" => "asda", + "img_hash" => "QmUfW3PVnh9GGuHcQgc3ZeNEbhwp5HE8rS5ac9MDWWQebz", + "name" => "asda", + "salePrice" => 34 + } + }} == MetadataRetriever.fetch_json({:ok, [data]}) + + Application.put_env(:explorer, :http_adapter, HTTPoison) + Application.put_env(:indexer, :ipfs, configuration) + end + + test "Constructs IPFS link with no query param, if gateway_url_param_location is invalid" do + configuration = Application.get_env(:indexer, :ipfs) + + Application.put_env(:indexer, :ipfs, + gateway_url: Keyword.get(configuration, :gateway_url), + gateway_url_param_location: :query2, + gateway_url_param_key: "x-apikey", + gateway_url_param_value: "mykey" + ) + + data = "QmT1Yz43R1PLn2RVovAnEM5dHQEvpTcnwgX8zftvY1FcjP" + + result = %{ + "name" => "asda", + "description" => "asda", + "salePrice" => 34, + "img_hash" => "QmUfW3PVnh9GGuHcQgc3ZeNEbhwp5HE8rS5ac9MDWWQebz", + "collectionId" => "1871_1665123820823" + } + + Application.put_env(:explorer, :http_adapter, Explorer.Mox.HTTPoison) + + Explorer.Mox.HTTPoison + |> expect(:get, fn "https://ipfs.io/ipfs/QmT1Yz43R1PLn2RVovAnEM5dHQEvpTcnwgX8zftvY1FcjP", _headers, _options -> + {:ok, %HTTPoison.Response{status_code: 200, body: Jason.encode!(result)}} + end) + + assert {:ok, + %{ + metadata: %{ + "collectionId" => "1871_1665123820823", + "description" => "asda", + "img_hash" => "QmUfW3PVnh9GGuHcQgc3ZeNEbhwp5HE8rS5ac9MDWWQebz", + "name" => "asda", + "salePrice" => 34 + } + }} == MetadataRetriever.fetch_json({:ok, [data]}) + + Application.put_env(:explorer, :http_adapter, HTTPoison) + Application.put_env(:indexer, :ipfs, configuration) + end + + test "Constructs IPFS link with additional header" do + configuration = Application.get_env(:indexer, :ipfs) + + Application.put_env(:indexer, :ipfs, + gateway_url: Keyword.get(configuration, :gateway_url), + gateway_url_param_location: :header, + gateway_url_param_key: "x-apikey", + gateway_url_param_value: "mykey" + ) + + data = "QmT1Yz43R1PLn2RVovAnEM5dHQEvpTcnwgX8zftvY1FcjP" + + result = %{ + "name" => "asda", + "description" => "asda", + "salePrice" => 34, + "img_hash" => "QmUfW3PVnh9GGuHcQgc3ZeNEbhwp5HE8rS5ac9MDWWQebz", + "collectionId" => "1871_1665123820823" + } + + Application.put_env(:explorer, :http_adapter, Explorer.Mox.HTTPoison) + + Explorer.Mox.HTTPoison + |> expect(:get, fn "https://ipfs.io/ipfs/QmT1Yz43R1PLn2RVovAnEM5dHQEvpTcnwgX8zftvY1FcjP", + [{"x-apikey", "mykey"}], + _options -> + {:ok, %HTTPoison.Response{status_code: 200, body: Jason.encode!(result)}} + end) + + assert {:ok, + %{ + metadata: %{ + "collectionId" => "1871_1665123820823", + "description" => "asda", + "img_hash" => "QmUfW3PVnh9GGuHcQgc3ZeNEbhwp5HE8rS5ac9MDWWQebz", + "name" => "asda", + "salePrice" => 34 + } + }} == MetadataRetriever.fetch_json({:ok, [data]}) + + Application.put_env(:explorer, :http_adapter, HTTPoison) + Application.put_env(:indexer, :ipfs, configuration) + end + + test "fetches json with latin1 encoding", %{bypass: bypass} do + path = "/api/card/55265" + + json = """ + { + "name": "Sérgio Mendonça" + } + """ + + Bypass.expect(bypass, "GET", path, fn conn -> + Conn.resp(conn, 200, json) + end) + + assert {:ok, %{metadata: %{"name" => "Sérgio Mendonça"}}} == + MetadataRetriever.fetch_json({:ok, ["http://localhost:#{bypass.port}#{path}"]}) + end + + test "fetches json metadata when HTTP status 301", %{bypass: bypass} do + path = "/1302" + + attributes = """ + [ + {"trait_type": "Mouth", "value": "Discomfort"}, + {"trait_type": "Background", "value": "Army Green"}, + {"trait_type": "Eyes", "value": "Wide Eyed"}, + {"trait_type": "Fur", "value": "Black"}, + {"trait_type": "Earring", "value": "Silver Hoop"}, + {"trait_type": "Hat", "value": "Sea Captain's Hat"} + ] + """ + + json = """ + { + "attributes": #{attributes} + } + """ + + Bypass.expect(bypass, "GET", path, fn conn -> + Conn.resp(conn, 200, json) + end) + + {:ok, %{metadata: metadata}} = + MetadataRetriever.fetch_metadata_from_uri("http://localhost:#{bypass.port}#{path}", []) + + assert Map.get(metadata, "attributes") == Jason.decode!(attributes) + end + + test "decodes json file in tokenURI" do + data = + {:ok, + [ + "data:application/json,{\"name\":\"Home%20Address%20-%200x0000000000C1A6066c6c8B9d63e9B6E8865dC117\",\"description\":\"This%20NFT%20can%20be%20redeemed%20on%20HomeWork%20to%20grant%20a%20controller%20the%20exclusive%20right%20to%20deploy%20contracts%20with%20arbitrary%20bytecode%20to%20the%20designated%20home%20address.\",\"image\":\"data:image/svg+xml;charset=utf-8;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHZpZXdCb3g9IjAgMCAxNDQgNzIiPjxzdHlsZT48IVtDREFUQVsuQntzdHJva2UtbGluZWpvaW46cm91bmR9LkN7c3Ryb2tlLW1pdGVybGltaXQ6MTB9LkR7c3Ryb2tlLXdpZHRoOjJ9LkV7ZmlsbDojOWI5YjlhfS5Ge3N0cm9rZS1saW5lY2FwOnJvdW5kfV1dPjwvc3R5bGU+PGcgdHJhbnNmb3JtPSJtYXRyaXgoMS4wMiAwIDAgMS4wMiA4LjEgMCkiPjxwYXRoIGZpbGw9IiNmZmYiIGQ9Ik0xOSAzMmgzNHYyNEgxOXoiLz48ZyBzdHJva2U9IiMwMDAiIGNsYXNzPSJCIEMgRCI+PHBhdGggZmlsbD0iI2E1NzkzOSIgZD0iTTI1IDQwaDl2MTZoLTl6Ii8+PHBhdGggZmlsbD0iIzkyZDNmNSIgZD0iTTQwIDQwaDh2N2gtOHoiLz48cGF0aCBmaWxsPSIjZWE1YTQ3IiBkPSJNNTMgMzJIMTl2LTFsMTYtMTYgMTggMTZ6Ii8+PHBhdGggZmlsbD0ibm9uZSIgZD0iTTE5IDMyaDM0djI0SDE5eiIvPjxwYXRoIGZpbGw9IiNlYTVhNDciIGQ9Ik0yOSAyMWwtNSA1di05aDV6Ii8+PC9nPjwvZz48ZyB0cmFuc2Zvcm09Im1hdHJpeCguODQgMCAwIC44NCA2NSA1KSI+PHBhdGggZD0iTTkuNSAyMi45bDQuOCA2LjRhMy4xMiAzLjEyIDAgMCAxLTMgMi4ybC00LjgtNi40Yy4zLTEuNCAxLjYtMi40IDMtMi4yeiIgZmlsbD0iI2QwY2ZjZSIvPjxwYXRoIGZpbGw9IiMwMTAxMDEiIGQ9Ik00MS43IDM4LjVsNS4xLTYuNSIvPjxwYXRoIGQ9Ik00Mi45IDI3LjhMMTguNCA1OC4xIDI0IDYybDIxLjgtMjcuMyAyLjMtMi44eiIgY2xhc3M9IkUiLz48cGF0aCBmaWxsPSIjMDEwMTAxIiBkPSJNNDMuNCAyOS4zbC00LjcgNS44Ii8+PHBhdGggZD0iTTQ2LjggMzJjMy4yIDIuNiA4LjcgMS4yIDEyLjEtMy4yczMuNi05LjkuMy0xMi41bC01LjEgNi41LTIuOC0uMS0uNy0yLjcgNS4xLTYuNWMtMy4yLTIuNi04LjctMS4yLTEyLjEgMy4ycy0zLjYgOS45LS4zIDEyLjUiIGNsYXNzPSJFIi8+PHBhdGggZmlsbD0iI2E1NzkzOSIgZD0iTTI3LjMgMjZsMTEuOCAxNS43IDMuNCAyLjQgOS4xIDE0LjQtMy4yIDIuMy0xIC43LTEwLjItMTMuNi0xLjMtMy45LTExLjgtMTUuN3oiLz48cGF0aCBkPSJNMTIgMTkuOWw1LjkgNy45IDEwLjItNy42LTMuNC00LjVzNi44LTUuMSAxMC43LTQuNWMwIDAtNi42LTMtMTMuMyAxLjFTMTIgMTkuOSAxMiAxOS45eiIgY2xhc3M9IkUiLz48ZyBmaWxsPSJub25lIiBzdHJva2U9IiMwMDAiIGNsYXNzPSJCIEMgRCI+PHBhdGggZD0iTTUyIDU4LjlMNDAuOSA0My4ybC0zLjEtMi4zLTEwLjYtMTQuNy0yLjkgMi4yIDEwLjYgMTQuNyAxLjEgMy42IDExLjUgMTUuNXpNMTIuNSAxOS44bDUuOCA4IDEwLjMtNy40LTMuMy00LjZzNi45LTUgMTAuOC00LjNjMCAwLTYuNi0zLjEtMTMuMy45cy0xMC4zIDcuNC0xMC4zIDcuNHptLTIuNiAyLjlsNC43IDYuNWMtLjUgMS4zLTEuNyAyLjEtMyAyLjJsLTQuNy02LjVjLjMtMS40IDEuNi0yLjQgMy0yLjJ6Ii8+PHBhdGggZD0iTTQxLjMgMzguNWw1LjEtNi41bS0zLjUtMi43bC00LjYgNS44bTguMS0zLjFjMy4yIDIuNiA4LjcgMS4yIDEyLjEtMy4yczMuNi05LjkuMy0xMi41bC01LjEgNi41LTIuOC0uMS0uOC0yLjcgNS4xLTYuNWMtMy4yLTIuNi04LjctMS4yLTEyLjEgMy4yLTMuNCA0LjMtMy42IDkuOS0uMyAxMi41IiBjbGFzcz0iRiIvPjxwYXRoIGQ9Ik0zMC44IDQ0LjRMMTkgNTguOWw0IDMgMTAtMTIuNyIgY2xhc3M9IkYiLz48L2c+PC9nPjwvc3ZnPg==\"}" + ]} + + assert MetadataRetriever.fetch_json(data) == + {:ok, + %{ + metadata: %{ + "description" => + "This NFT can be redeemed on HomeWork to grant a controller the exclusive right to deploy contracts with arbitrary bytecode to the designated home address.", + "image" => + "data:image/svg+xml;charset=utf-8;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHZpZXdCb3g9IjAgMCAxNDQgNzIiPjxzdHlsZT48IVtDREFUQVsuQntzdHJva2UtbGluZWpvaW46cm91bmR9LkN7c3Ryb2tlLW1pdGVybGltaXQ6MTB9LkR7c3Ryb2tlLXdpZHRoOjJ9LkV7ZmlsbDojOWI5YjlhfS5Ge3N0cm9rZS1saW5lY2FwOnJvdW5kfV1dPjwvc3R5bGU+PGcgdHJhbnNmb3JtPSJtYXRyaXgoMS4wMiAwIDAgMS4wMiA4LjEgMCkiPjxwYXRoIGZpbGw9IiNmZmYiIGQ9Ik0xOSAzMmgzNHYyNEgxOXoiLz48ZyBzdHJva2U9IiMwMDAiIGNsYXNzPSJCIEMgRCI+PHBhdGggZmlsbD0iI2E1NzkzOSIgZD0iTTI1IDQwaDl2MTZoLTl6Ii8+PHBhdGggZmlsbD0iIzkyZDNmNSIgZD0iTTQwIDQwaDh2N2gtOHoiLz48cGF0aCBmaWxsPSIjZWE1YTQ3IiBkPSJNNTMgMzJIMTl2LTFsMTYtMTYgMTggMTZ6Ii8+PHBhdGggZmlsbD0ibm9uZSIgZD0iTTE5IDMyaDM0djI0SDE5eiIvPjxwYXRoIGZpbGw9IiNlYTVhNDciIGQ9Ik0yOSAyMWwtNSA1di05aDV6Ii8+PC9nPjwvZz48ZyB0cmFuc2Zvcm09Im1hdHJpeCguODQgMCAwIC44NCA2NSA1KSI+PHBhdGggZD0iTTkuNSAyMi45bDQuOCA2LjRhMy4xMiAzLjEyIDAgMCAxLTMgMi4ybC00LjgtNi40Yy4zLTEuNCAxLjYtMi40IDMtMi4yeiIgZmlsbD0iI2QwY2ZjZSIvPjxwYXRoIGZpbGw9IiMwMTAxMDEiIGQ9Ik00MS43IDM4LjVsNS4xLTYuNSIvPjxwYXRoIGQ9Ik00Mi45IDI3LjhMMTguNCA1OC4xIDI0IDYybDIxLjgtMjcuMyAyLjMtMi44eiIgY2xhc3M9IkUiLz48cGF0aCBmaWxsPSIjMDEwMTAxIiBkPSJNNDMuNCAyOS4zbC00LjcgNS44Ii8+PHBhdGggZD0iTTQ2LjggMzJjMy4yIDIuNiA4LjcgMS4yIDEyLjEtMy4yczMuNi05LjkuMy0xMi41bC01LjEgNi41LTIuOC0uMS0uNy0yLjcgNS4xLTYuNWMtMy4yLTIuNi04LjctMS4yLTEyLjEgMy4ycy0zLjYgOS45LS4zIDEyLjUiIGNsYXNzPSJFIi8+PHBhdGggZmlsbD0iI2E1NzkzOSIgZD0iTTI3LjMgMjZsMTEuOCAxNS43IDMuNCAyLjQgOS4xIDE0LjQtMy4yIDIuMy0xIC43LTEwLjItMTMuNi0xLjMtMy45LTExLjgtMTUuN3oiLz48cGF0aCBkPSJNMTIgMTkuOWw1LjkgNy45IDEwLjItNy42LTMuNC00LjVzNi44LTUuMSAxMC43LTQuNWMwIDAtNi42LTMtMTMuMyAxLjFTMTIgMTkuOSAxMiAxOS45eiIgY2xhc3M9IkUiLz48ZyBmaWxsPSJub25lIiBzdHJva2U9IiMwMDAiIGNsYXNzPSJCIEMgRCI+PHBhdGggZD0iTTUyIDU4LjlMNDAuOSA0My4ybC0zLjEtMi4zLTEwLjYtMTQuNy0yLjkgMi4yIDEwLjYgMTQuNyAxLjEgMy42IDExLjUgMTUuNXpNMTIuNSAxOS44bDUuOCA4IDEwLjMtNy40LTMuMy00LjZzNi45LTUgMTAuOC00LjNjMCAwLTYuNi0zLjEtMTMuMy45cy0xMC4zIDcuNC0xMC4zIDcuNHptLTIuNiAyLjlsNC43IDYuNWMtLjUgMS4zLTEuNyAyLjEtMyAyLjJsLTQuNy02LjVjLjMtMS40IDEuNi0yLjQgMy0yLjJ6Ii8+PHBhdGggZD0iTTQxLjMgMzguNWw1LjEtNi41bS0zLjUtMi43bC00LjYgNS44bTguMS0zLjFjMy4yIDIuNiA4LjcgMS4yIDEyLjEtMy4yczMuNi05LjkuMy0xMi41bC01LjEgNi41LTIuOC0uMS0uOC0yLjcgNS4xLTYuNWMtMy4yLTIuNi04LjctMS4yLTEyLjEgMy4yLTMuNCA0LjMtMy42IDkuOS0uMyAxMi41IiBjbGFzcz0iRiIvPjxwYXRoIGQ9Ik0zMC44IDQ0LjRMMTkgNTguOWw0IDMgMTAtMTIuNyIgY2xhc3M9IkYiLz48L2c+PC9nPjwvc3ZnPg==", + "name" => "Home Address - 0x0000000000C1A6066c6c8B9d63e9B6E8865dC117" + } + }} + end + + test "decodes base64 encoded json file in tokenURI" do + data = + {:ok, + [ + "data:application/json;base64,eyJuYW1lIjogIi54ZGFpIiwgImRlc2NyaXB0aW9uIjogIlB1bmsgRG9tYWlucyBkaWdpdGFsIGlkZW50aXR5LiBWaXNpdCBodHRwczovL3B1bmsuZG9tYWlucy8iLCAiaW1hZ2UiOiAiZGF0YTppbWFnZS9zdmcreG1sO2Jhc2U2NCxQSE4yWnlCNGJXeHVjejBpYUhSMGNEb3ZMM2QzZHk1M015NXZjbWN2TWpBd01DOXpkbWNpSUhacFpYZENiM2c5SWpBZ01DQTFNREFnTlRBd0lpQjNhV1IwYUQwaU5UQXdJaUJvWldsbmFIUTlJalV3TUNJK1BHUmxabk0rUEd4cGJtVmhja2R5WVdScFpXNTBJR2xrUFNKbmNtRmtJaUI0TVQwaU1DVWlJSGt4UFNJd0pTSWdlREk5SWpFd01DVWlJSGt5UFNJd0pTSStQSE4wYjNBZ2IyWm1jMlYwUFNJd0pTSWdjM1I1YkdVOUluTjBiM0F0WTI5c2IzSTZjbWRpS0RVNExERTNMREV4TmlrN2MzUnZjQzF2Y0dGamFYUjVPakVpSUM4K1BITjBiM0FnYjJabWMyVjBQU0l4TURBbElpQnpkSGxzWlQwaWMzUnZjQzFqYjJ4dmNqcHlaMklvTVRFMkxESTFMREUzS1R0emRHOXdMVzl3WVdOcGRIazZNU0lnTHo0OEwyeHBibVZoY2tkeVlXUnBaVzUwUGp3dlpHVm1jejQ4Y21WamRDQjRQU0l3SWlCNVBTSXdJaUIzYVdSMGFEMGlOVEF3SWlCb1pXbG5hSFE5SWpVd01DSWdabWxzYkQwaWRYSnNLQ05uY21Ga0tTSXZQangwWlhoMElIZzlJalV3SlNJZ2VUMGlOVEFsSWlCa2IyMXBibUZ1ZEMxaVlYTmxiR2x1WlQwaWJXbGtaR3hsSWlCbWFXeHNQU0ozYUdsMFpTSWdkR1Y0ZEMxaGJtTm9iM0k5SW0xcFpHUnNaU0lnWm05dWRDMXphWHBsUFNKNExXeGhjbWRsSWo0dWVHUmhhVHd2ZEdWNGRENDhkR1Y0ZENCNFBTSTFNQ1VpSUhrOUlqY3dKU0lnWkc5dGFXNWhiblF0WW1GelpXeHBibVU5SW0xcFpHUnNaU0lnWm1sc2JEMGlkMmhwZEdVaUlIUmxlSFF0WVc1amFHOXlQU0p0YVdSa2JHVWlQbkIxYm1zdVpHOXRZV2x1Y3p3dmRHVjRkRDQ4TDNOMlp6ND0ifQ==" + ]} + + assert MetadataRetriever.fetch_json(data) == + {:ok, + %{ + metadata: %{ + "name" => ".xdai", + "description" => "Punk Domains digital identity. Visit https://punk.domains/", + "image" => + "data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHZpZXdCb3g9IjAgMCA1MDAgNTAwIiB3aWR0aD0iNTAwIiBoZWlnaHQ9IjUwMCI+PGRlZnM+PGxpbmVhckdyYWRpZW50IGlkPSJncmFkIiB4MT0iMCUiIHkxPSIwJSIgeDI9IjEwMCUiIHkyPSIwJSI+PHN0b3Agb2Zmc2V0PSIwJSIgc3R5bGU9InN0b3AtY29sb3I6cmdiKDU4LDE3LDExNik7c3RvcC1vcGFjaXR5OjEiIC8+PHN0b3Agb2Zmc2V0PSIxMDAlIiBzdHlsZT0ic3RvcC1jb2xvcjpyZ2IoMTE2LDI1LDE3KTtzdG9wLW9wYWNpdHk6MSIgLz48L2xpbmVhckdyYWRpZW50PjwvZGVmcz48cmVjdCB4PSIwIiB5PSIwIiB3aWR0aD0iNTAwIiBoZWlnaHQ9IjUwMCIgZmlsbD0idXJsKCNncmFkKSIvPjx0ZXh0IHg9IjUwJSIgeT0iNTAlIiBkb21pbmFudC1iYXNlbGluZT0ibWlkZGxlIiBmaWxsPSJ3aGl0ZSIgdGV4dC1hbmNob3I9Im1pZGRsZSIgZm9udC1zaXplPSJ4LWxhcmdlIj4ueGRhaTwvdGV4dD48dGV4dCB4PSI1MCUiIHk9IjcwJSIgZG9taW5hbnQtYmFzZWxpbmU9Im1pZGRsZSIgZmlsbD0id2hpdGUiIHRleHQtYW5jaG9yPSJtaWRkbGUiPnB1bmsuZG9tYWluczwvdGV4dD48L3N2Zz4=" + } + }} + end + + test "decodes base64 encoded json file (with unicode string) in tokenURI" do + data = + {:ok, + [ + "data:application/json;base64,eyJkZXNjcmlwdGlvbiI6ICJQdW5rIERvbWFpbnMgZGlnaXRhbCBpZGVudGl0eSDDry4gVmlzaXQgaHR0cHM6Ly9wdW5rLmRvbWFpbnMvIn0=" + ]} + + assert MetadataRetriever.fetch_json(data) == + {:ok, + %{ + metadata: %{ + "description" => "Punk Domains digital identity ï. Visit https://punk.domains/" + } + }} + end + + test "fetches image from ipfs link directly", %{bypass: bypass} do + path = "/ipfs/bafybeig6nlmyzui7llhauc52j2xo5hoy4lzp6442lkve5wysdvjkizxonu" + + json = """ + { + "image": "https://ipfs.io/ipfs/bafybeig6nlmyzui7llhauc52j2xo5hoy4lzp6442lkve5wysdvjkizxonu" + } + """ + + Bypass.expect(bypass, "GET", path, fn conn -> + Conn.resp(conn, 200, json) + end) + + data = + {:ok, + [ + "http://localhost:#{bypass.port}#{path}" + ]} + + assert {:ok, + %{ + metadata: %{ + "image" => "https://ipfs.io/ipfs/bafybeig6nlmyzui7llhauc52j2xo5hoy4lzp6442lkve5wysdvjkizxonu" + } + }} == MetadataRetriever.fetch_json(data) + end + + test "Fetches metadata from ipfs", %{bypass: bypass} do + path = "/ipfs/bafybeid4ed2ua7fwupv4nx2ziczr3edhygl7ws3yx6y2juon7xakgj6cfm/51.json" + + json = """ + { + "image": "ipfs://bafybeihxuj3gxk7x5p36amzootyukbugmx3pw7dyntsrohg3se64efkuga/51.png" + } + """ + + Bypass.expect(bypass, "GET", path, fn conn -> + Conn.resp(conn, 200, json) + end) + + data = + {:ok, + [ + "http://localhost:#{bypass.port}#{path}" + ]} + + {:ok, + %{ + metadata: metadata + }} = MetadataRetriever.fetch_json(data) + + assert "ipfs://bafybeihxuj3gxk7x5p36amzootyukbugmx3pw7dyntsrohg3se64efkuga/51.png" == Map.get(metadata, "image") + end + + test "Fetches metadata from '${url}'", %{bypass: bypass} do + path = "/data/8/8578.json" + + data = + {:ok, + [ + "'http://localhost:#{bypass.port}#{path}'" + ]} + + json = """ + { + "attributes": [ + {"trait_type": "Character", "value": "Blue Suit Boxing Glove"}, + {"trait_type": "Face", "value": "Wink"}, + {"trait_type": "Hat", "value": "Blue"}, + {"trait_type": "Background", "value": "Red Carpet"} + ], + "image": "https://cards.collecttrumpcards.com/cards/0c68b1ab6.jpg", + "name": "Trump Digital Trading Card #8578", + "tokeId": 8578 + } + """ + + Bypass.expect(bypass, "GET", path, fn conn -> + Conn.resp(conn, 200, json) + end) + + assert {:ok, + %{ + metadata: Jason.decode!(json) + }} == MetadataRetriever.fetch_json(data) + end + + test "Process custom execution reverted" do + data = + {:error, + "(3) execution reverted: Nonexistent token (0x08c379a0000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000114e6f6e6578697374656e7420746f6b656e000000000000000000000000000000)"} + + assert {:error, "VM execution error"} == MetadataRetriever.fetch_json(data) + end + + test "Process CIDv0 IPFS links" do + data = "QmT1Yz43R1PLn2RVovAnEM5dHQEvpTcnwgX8zftvY1FcjP" + + result = %{ + "name" => "asda", + "description" => "asda", + "salePrice" => 34, + "img_hash" => "QmUfW3PVnh9GGuHcQgc3ZeNEbhwp5HE8rS5ac9MDWWQebz", + "collectionId" => "1871_1665123820823" + } + + Application.put_env(:explorer, :http_adapter, Explorer.Mox.HTTPoison) + + Explorer.Mox.HTTPoison + |> expect(:get, fn "https://ipfs.io/ipfs/QmT1Yz43R1PLn2RVovAnEM5dHQEvpTcnwgX8zftvY1FcjP", _headers, _options -> + {:ok, %HTTPoison.Response{status_code: 200, body: Jason.encode!(result)}} + end) + + assert {:ok, + %{ + metadata: %{ + "collectionId" => "1871_1665123820823", + "description" => "asda", + "img_hash" => "QmUfW3PVnh9GGuHcQgc3ZeNEbhwp5HE8rS5ac9MDWWQebz", + "name" => "asda", + "salePrice" => 34 + } + }} == MetadataRetriever.fetch_json({:ok, [data]}) + + Application.put_env(:explorer, :http_adapter, HTTPoison) + end + + test "Process URI directly from link", %{bypass: bypass} do + path = "/api/dejobio/v1/nftproduct/1" + + json = """ + { + "image": "https:\/\/cdn.discordapp.com\/attachments\/1008567215739650078\/1080111780858187796\/savechives_a_dragon_playing_football_in_a_city_full_of_flowers__0739cc42-aae1-4909-a964-3f9c0ed1a9ed.png", + "external_url": "https:\/\/dejob.io\/blue-reign-the-dragon-football-champion-of-the-floral-city\/", + "name": "Blue Reign: The Dragon Football Champion of the Floral City", + "description": "Test", + "attributes": [ + { + "trait_type": "Product Type", + "value": "Book" + }, + { + "display_type": "number", + "trait_type": "Total Sold", + "value": "0" + }, + { + "display_type": "number", + "trait_type": "Success Sold", + "value": "0" + }, + { + "max_value": "100", + "trait_type": "Success Rate", + "value": "0" + } + ] + } + """ + + Bypass.expect(bypass, "GET", path, fn conn -> + Conn.resp(conn, 200, json) + end) + + assert {:ok, + %{ + metadata: Jason.decode!(json) + }} == + MetadataRetriever.fetch_json({:ok, ["http://localhost:#{bypass.port}#{path}"]}) + end end end diff --git a/apps/indexer/lib/indexer/fetcher/token.ex b/apps/indexer/lib/indexer/fetcher/token.ex index 8af89c559617..9d4236b0231d 100644 --- a/apps/indexer/lib/indexer/fetcher/token.ex +++ b/apps/indexer/lib/indexer/fetcher/token.ex @@ -65,11 +65,11 @@ defmodule Indexer.Fetcher.Token do BufferedTask.buffer(__MODULE__, token_contract_addresses, realtime?) end - defp catalog_token(%Token{contract_address_hash: contract_address_hash} = token) do + defp catalog_token(token) do token_params = - contract_address_hash + token |> MetadataRetriever.get_functions_of() - |> Map.put(:cataloged, true) + |> (&if(&1 == %{}, do: &1, else: Map.put(&1, :cataloged, true))).() {:ok, _} = Chain.update_token(token, token_params) :ok diff --git a/apps/indexer/lib/indexer/fetcher/token_instance/helper.ex b/apps/indexer/lib/indexer/fetcher/token_instance/helper.ex index 2608d377aca6..cdc4254864ff 100644 --- a/apps/indexer/lib/indexer/fetcher/token_instance/helper.ex +++ b/apps/indexer/lib/indexer/fetcher/token_instance/helper.ex @@ -4,7 +4,7 @@ defmodule Indexer.Fetcher.TokenInstance.Helper do """ alias Explorer.Chain alias Explorer.SmartContract.Reader - alias Indexer.Fetcher.TokenInstance.MetadataRetriever + alias Explorer.Token.MetadataRetriever require Logger diff --git a/apps/indexer/lib/indexer/fetcher/token_instance/metadata_retriever.ex b/apps/indexer/lib/indexer/fetcher/token_instance/metadata_retriever.ex deleted file mode 100644 index a74a2579096c..000000000000 --- a/apps/indexer/lib/indexer/fetcher/token_instance/metadata_retriever.ex +++ /dev/null @@ -1,366 +0,0 @@ -defmodule Indexer.Fetcher.TokenInstance.MetadataRetriever do - @moduledoc """ - Fetches ERC-721/ERC-1155/ERC-404 token instance metadata. - """ - - require Logger - - alias Explorer.Helper, as: ExplorerHelper - alias HTTPoison.{Error, Response} - - @no_uri_error "no uri" - @vm_execution_error "VM execution error" - @ipfs_protocol "ipfs://" - @invalid_base64_data "invalid data:application/json;base64" - - # https://eips.ethereum.org/EIPS/eip-1155#metadata - @erc1155_token_id_placeholder "{id}" - - @max_error_length 255 - - @ignored_hosts ["localhost", "127.0.0.1", "0.0.0.0", "", nil] - - @spec ipfs_link(uid :: any()) :: String.t() - defp ipfs_link(uid) do - base_url = - :indexer - |> Application.get_env(:ipfs) - |> Keyword.get(:gateway_url) - |> String.trim_trailing("/") - - url = base_url <> "/" <> uid - - ipfs_params = Application.get_env(:indexer, :ipfs) - - if ipfs_params[:gateway_url_param_location] == :query do - gateway_url_param_key = ipfs_params[:gateway_url_param_key] - gateway_url_param_value = ipfs_params[:gateway_url_param_value] - - if gateway_url_param_key && gateway_url_param_value do - url <> "?#{gateway_url_param_key}=#{gateway_url_param_value}" - else - url - end - else - url - end - end - - @spec ipfs_headers() :: [{binary(), binary()}] - defp ipfs_headers do - ipfs_params = Application.get_env(:indexer, :ipfs) - - if ipfs_params[:gateway_url_param_location] == :header do - gateway_url_param_key = ipfs_params[:gateway_url_param_key] - gateway_url_param_value = ipfs_params[:gateway_url_param_value] - - if gateway_url_param_key && gateway_url_param_value do - [{gateway_url_param_key, gateway_url_param_value}] - else - [] - end - else - [] - end - end - - @doc """ - Fetch/parse metadata using smart-contract's response - """ - @spec fetch_json(any, binary() | nil, binary() | nil, boolean) :: - {:error, binary} | {:error_code, any} | {:ok, %{metadata: any}} - def fetch_json(uri, token_id \\ nil, hex_token_id \\ nil, from_base_uri? \\ false) - - def fetch_json({:ok, [""]}, _token_id, _hex_token_id, _from_base_uri?) do - {:error, @no_uri_error} - end - - def fetch_json(uri, token_id, hex_token_id, from_base_uri?) do - fetch_json_from_uri(uri, false, token_id, hex_token_id, from_base_uri?) - end - - defp fetch_json_from_uri(_uri, _ipfs?, _token_id, _hex_token_id, _from_base_uri?) - - defp fetch_json_from_uri({:error, error}, _ipfs?, _token_id, _hex_token_id, _from_base_uri?) do - error = to_string(error) - - if error =~ "execution reverted" or error =~ @vm_execution_error do - {:error, @vm_execution_error} - else - Logger.warn(["Unknown metadata format error #{inspect(error)}."], fetcher: :token_instances) - - # truncate error since it will be stored in DB - {:error, truncate_error(error)} - end - end - - # CIDv0 IPFS links # https://docs.ipfs.tech/concepts/content-addressing/#version-0-v0 - defp fetch_json_from_uri({:ok, ["Qm" <> _ = result]}, _, token_id, hex_token_id, from_base_uri?) do - if String.length(result) == 46 do - ipfs? = true - fetch_json_from_uri({:ok, [ipfs_link(result)]}, ipfs?, token_id, hex_token_id, from_base_uri?) - else - Logger.warn(["Unknown metadata format result #{inspect(result)}."], fetcher: :token_instances) - - {:error, truncate_error(result)} - end - end - - defp fetch_json_from_uri({:ok, ["'" <> token_uri]}, ipfs?, token_id, hex_token_id, from_base_uri?) do - token_uri = token_uri |> String.split("'") |> List.first() - fetch_metadata_inner(token_uri, ipfs?, token_id, hex_token_id, from_base_uri?) - end - - defp fetch_json_from_uri({:ok, ["http://" <> _ = token_uri]}, ipfs?, token_id, hex_token_id, from_base_uri?) do - fetch_metadata_inner(token_uri, ipfs?, token_id, hex_token_id, from_base_uri?) - end - - defp fetch_json_from_uri({:ok, ["https://" <> _ = token_uri]}, ipfs?, token_id, hex_token_id, from_base_uri?) do - fetch_metadata_inner(token_uri, ipfs?, token_id, hex_token_id, from_base_uri?) - end - - defp fetch_json_from_uri( - {:ok, [type = "data:application/json;utf8," <> json]}, - ipfs?, - token_id, - hex_token_id, - from_base_uri? - ) do - fetch_json_from_json_string(json, ipfs?, token_id, hex_token_id, from_base_uri?, type) - end - - defp fetch_json_from_uri( - {:ok, [type = "data:application/json," <> json]}, - ipfs?, - token_id, - hex_token_id, - from_base_uri? - ) do - fetch_json_from_json_string(json, ipfs?, token_id, hex_token_id, from_base_uri?, type) - end - - defp fetch_json_from_uri( - {:ok, ["data:application/json;base64," <> base64_encoded_json]}, - ipfs?, - token_id, - hex_token_id, - from_base_uri? - ) do - case Base.decode64(base64_encoded_json) do - {:ok, base64_decoded} -> - fetch_json_from_uri({:ok, [base64_decoded]}, ipfs?, token_id, hex_token_id, from_base_uri?) - - _ -> - {:error, @invalid_base64_data} - end - rescue - e -> - Logger.warn( - [ - "Unknown metadata format base64 #{inspect(base64_encoded_json)}.", - Exception.format(:error, e, __STACKTRACE__) - ], - fetcher: :token_instances - ) - - {:error, @invalid_base64_data} - end - - defp fetch_json_from_uri({:ok, ["#{@ipfs_protocol}ipfs/" <> right]}, _ipfs?, _token_id, hex_token_id, _from_base_uri?) do - fetch_from_ipfs(right, hex_token_id) - end - - defp fetch_json_from_uri({:ok, ["ipfs/" <> right]}, _ipfs?, _token_id, hex_token_id, _from_base_uri?) do - fetch_from_ipfs(right, hex_token_id) - end - - defp fetch_json_from_uri({:ok, [@ipfs_protocol <> right]}, _ipfs?, _token_id, hex_token_id, _from_base_uri?) do - fetch_from_ipfs(right, hex_token_id) - end - - defp fetch_json_from_uri({:ok, [json]}, _ipfs?, _token_id, hex_token_id, _from_base_uri?) do - json = ExplorerHelper.decode_json(json, true) - - check_type(json, hex_token_id) - rescue - e -> - Logger.warn(["Unknown metadata format #{inspect(json)}.", Exception.format(:error, e, __STACKTRACE__)], - fetcher: :token_instances - ) - - {:error, "invalid json"} - end - - defp fetch_json_from_uri(uri, _ipfs?, _token_id, _hex_token_id, _from_base_uri?) do - Logger.warn(["Unknown metadata uri format #{inspect(uri)}."], fetcher: :token_instances) - - {:error, "unknown metadata uri format"} - end - - defp fetch_json_from_json_string(json, ipfs?, token_id, hex_token_id, from_base_uri?, type) do - decoded_json = URI.decode(json) - - fetch_json_from_uri({:ok, [decoded_json]}, ipfs?, token_id, hex_token_id, from_base_uri?) - rescue - e -> - Logger.warn(["Unknown metadata format #{inspect(json)}.", Exception.format(:error, e, __STACKTRACE__)], - fetcher: :token_instances - ) - - {:error, "invalid #{type}"} - end - - defp fetch_from_ipfs(ipfs_uid, hex_token_id) do - ipfs_url = ipfs_link(ipfs_uid) - ipfs? = true - fetch_metadata_inner(ipfs_url, ipfs?, nil, hex_token_id) - end - - defp fetch_metadata_inner(uri, ipfs?, token_id, hex_token_id, from_base_uri? \\ false) - - defp fetch_metadata_inner(uri, ipfs?, token_id, hex_token_id, from_base_uri?) do - prepared_uri = substitute_token_id_to_token_uri(uri, token_id, hex_token_id, from_base_uri?) - fetch_metadata_from_uri(prepared_uri, ipfs?, hex_token_id) - rescue - e -> - Logger.warn( - ["Could not prepare token uri #{inspect(uri)}.", Exception.format(:error, e, __STACKTRACE__)], - fetcher: :token_instances - ) - - {:error, "preparation error"} - end - - def fetch_metadata_from_uri(uri, ipfs?, hex_token_id \\ nil) do - case Mix.env() != :test && URI.parse(uri) do - %URI{host: host} when host in @ignored_hosts -> - {:error, "ignored host #{host}"} - - _ -> - fetch_metadata_from_uri_request(uri, hex_token_id, ipfs?) - end - end - - defp fetch_metadata_from_uri_request(uri, hex_token_id, ipfs?) do - headers = if ipfs?, do: ipfs_headers(), else: [] - - case Application.get_env(:explorer, :http_adapter).get(uri, headers, - recv_timeout: 30_000, - follow_redirect: true, - hackney: [pool: :token_instance_fetcher] - ) do - {:ok, %Response{body: body, status_code: 200, headers: response_headers}} -> - content_type = get_content_type_from_headers(response_headers) - - check_content_type(content_type, uri, hex_token_id, body) - - {:ok, %Response{body: body, status_code: code}} -> - Logger.debug( - ["Request to token uri: #{inspect(uri)} failed with code #{code}. Body:", inspect(body)], - fetcher: :token_instances - ) - - {:error_code, code} - - {:error, %Error{reason: reason}} -> - Logger.warn( - ["Request to token uri failed: #{inspect(uri)}.", inspect(reason)], - fetcher: :token_instances - ) - - {:error, reason |> inspect() |> truncate_error()} - end - rescue - e -> - Logger.warn( - ["Could not send request to token uri #{inspect(uri)}.", Exception.format(:error, e, __STACKTRACE__)], - fetcher: :token_instances - ) - - {:error, "request error"} - end - - defp check_content_type(content_type, uri, hex_token_id, body) do - image = image?(content_type) - video = video?(content_type) - - if content_type && (image || video) do - json = if image, do: %{"image" => uri}, else: %{"animation_url" => uri} - - check_type(json, nil) - else - json = ExplorerHelper.decode_json(body, true) - - check_type(json, hex_token_id) - end - end - - defp get_content_type_from_headers(headers) do - {_, content_type} = - Enum.find(headers, fn {header_name, _header_value} -> - header_name == "Content-Type" - end) || {nil, nil} - - content_type - end - - defp image?(content_type) do - content_type && String.starts_with?(content_type, "image/") - end - - defp video?(content_type) do - content_type && String.starts_with?(content_type, "video/") - end - - defp check_type(json, nil) when is_map(json) do - {:ok, %{metadata: json}} - end - - defp check_type(json, hex_token_id) when is_map(json) do - metadata = - case json - |> Jason.encode!() - |> String.replace(@erc1155_token_id_placeholder, hex_token_id) - |> Jason.decode() do - {:ok, map} -> - map - - _ -> - json - end - - {:ok, %{metadata: metadata}} - end - - defp check_type(_, _) do - {:error, "wrong metadata type"} - end - - defp substitute_token_id_to_token_uri(base_uri, token_id, _empty_token_id, true) do - if String.ends_with?(base_uri, "/") do - base_uri <> to_string(token_id) - else - base_uri <> "/" <> to_string(token_id) - end - end - - defp substitute_token_id_to_token_uri(token_uri, _token_id, empty_token_id, _from_base_uri?) - when empty_token_id in [nil, ""], - do: token_uri - - defp substitute_token_id_to_token_uri(token_uri, _token_id, hex_token_id, _from_base_uri?) do - String.replace(token_uri, @erc1155_token_id_placeholder, hex_token_id) - end - - @doc """ - Truncate error string to @max_error_length symbols - """ - @spec truncate_error(binary()) :: binary() - def truncate_error(error) do - if String.length(error) > @max_error_length - 2 do - String.slice(error, 0, @max_error_length - 3) <> "..." - else - error - end - end -end diff --git a/apps/indexer/lib/indexer/fetcher/token_updater.ex b/apps/indexer/lib/indexer/fetcher/token_updater.ex index 3a7acba4fc8e..0aa42f1d6f24 100644 --- a/apps/indexer/lib/indexer/fetcher/token_updater.ex +++ b/apps/indexer/lib/indexer/fetcher/token_updater.ex @@ -52,7 +52,7 @@ defmodule Indexer.Fetcher.TokenUpdater do |> Duration.to_minutes() |> trunc() - {:ok, tokens} = Chain.stream_cataloged_token_contract_address_hashes(initial, reducer, interval_in_minutes, true) + {:ok, tokens} = Chain.stream_cataloged_tokens(initial, reducer, interval_in_minutes, true) tokens end @@ -62,7 +62,6 @@ defmodule Indexer.Fetcher.TokenUpdater do Logger.debug("updating tokens") entries - |> Enum.map(&to_string/1) |> MetadataRetriever.get_functions_of() |> case do {:ok, params} -> diff --git a/apps/indexer/test/indexer/fetcher/token_instance/metadata_retriever_test.exs b/apps/indexer/test/indexer/fetcher/token_instance/metadata_retriever_test.exs deleted file mode 100644 index 071fa859ac63..000000000000 --- a/apps/indexer/test/indexer/fetcher/token_instance/metadata_retriever_test.exs +++ /dev/null @@ -1,485 +0,0 @@ -defmodule Indexer.Fetcher.TokenInstance.MetadataRetrieverTest do - use EthereumJSONRPC.Case - - alias Indexer.Fetcher.TokenInstance.MetadataRetriever - alias Plug.Conn - - import Mox - - setup :verify_on_exit! - setup :set_mox_global - - describe "fetch_json/4" do - setup do - bypass = Bypass.open() - - {:ok, bypass: bypass} - end - - test "returns {:error, @no_uri_error} when empty uri is passed" do - error = {:error, "no uri"} - token_id = "TOKEN_ID" - hex_token_id = "HEX_TOKEN_ID" - from_base_uri = true - - result = MetadataRetriever.fetch_json({:ok, [""]}, token_id, hex_token_id, from_base_uri) - - assert result == error - end - - test "returns {:error, @vm_execution_error} when 'execution reverted' error passed in uri" do - uri_error = {:error, "something happened: execution reverted"} - token_id = "TOKEN_ID" - hex_token_id = "HEX_TOKEN_ID" - from_base_uri = true - result_error = {:error, "VM execution error"} - - result = MetadataRetriever.fetch_json(uri_error, token_id, hex_token_id, from_base_uri) - - assert result == result_error - end - - test "returns {:error, @vm_execution_error} when 'VM execution error' error passed in uri" do - error = {:error, "VM execution error"} - token_id = "TOKEN_ID" - hex_token_id = "HEX_TOKEN_ID" - from_base_uri = true - - result = MetadataRetriever.fetch_json(error, token_id, hex_token_id, from_base_uri) - - assert result == error - end - - test "returns {:error, error} when all other errors passed in uri" do - error = {:error, "Some error"} - token_id = "TOKEN_ID" - hex_token_id = "HEX_TOKEN_ID" - from_base_uri = true - - result = MetadataRetriever.fetch_json(error, token_id, hex_token_id, from_base_uri) - - assert result == error - end - - test "returns {:error, truncated_error} when long error passed in uri" do - error = - {:error, - "ERROR: Unable to establish a connection to the database server. The database server may be offline, or there could be a network issue preventing access. Please ensure that the database server is running and that the network configuration is correct. Additionally, check the database credentials and permissions to ensure they are valid. If the issue persists, contact your system administrator for further assistance. Error code: DB_CONN_FAILED_101234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890"} - - token_id = "TOKEN_ID" - hex_token_id = "HEX_TOKEN_ID" - from_base_uri = true - - truncated_error = - {:error, - "ERROR: Unable to establish a connection to the database server. The database server may be offline, or there could be a network issue preventing access. Please ensure that the database server is running and that the network configuration is correct. Ad..."} - - result = MetadataRetriever.fetch_json(error, token_id, hex_token_id, from_base_uri) - - assert result == truncated_error - end - - test "Constructs IPFS link with query param" do - configuration = Application.get_env(:indexer, :ipfs) - - Application.put_env(:indexer, :ipfs, - gateway_url: Keyword.get(configuration, :gateway_url), - gateway_url_param_location: :query, - gateway_url_param_key: "x-apikey", - gateway_url_param_value: "mykey" - ) - - data = "QmT1Yz43R1PLn2RVovAnEM5dHQEvpTcnwgX8zftvY1FcjP" - - result = %{ - "name" => "asda", - "description" => "asda", - "salePrice" => 34, - "img_hash" => "QmUfW3PVnh9GGuHcQgc3ZeNEbhwp5HE8rS5ac9MDWWQebz", - "collectionId" => "1871_1665123820823" - } - - Application.put_env(:explorer, :http_adapter, Explorer.Mox.HTTPoison) - - Explorer.Mox.HTTPoison - |> expect(:get, fn "https://ipfs.io/ipfs/QmT1Yz43R1PLn2RVovAnEM5dHQEvpTcnwgX8zftvY1FcjP?x-apikey=mykey", - _headers, - _options -> - {:ok, %HTTPoison.Response{status_code: 200, body: Jason.encode!(result)}} - end) - - assert {:ok, - %{ - metadata: %{ - "collectionId" => "1871_1665123820823", - "description" => "asda", - "img_hash" => "QmUfW3PVnh9GGuHcQgc3ZeNEbhwp5HE8rS5ac9MDWWQebz", - "name" => "asda", - "salePrice" => 34 - } - }} == MetadataRetriever.fetch_json({:ok, [data]}) - - Application.put_env(:explorer, :http_adapter, HTTPoison) - Application.put_env(:indexer, :ipfs, configuration) - end - - test "Constructs IPFS link with no query param, if gateway_url_param_location is invalid" do - configuration = Application.get_env(:indexer, :ipfs) - - Application.put_env(:indexer, :ipfs, - gateway_url: Keyword.get(configuration, :gateway_url), - gateway_url_param_location: :query2, - gateway_url_param_key: "x-apikey", - gateway_url_param_value: "mykey" - ) - - data = "QmT1Yz43R1PLn2RVovAnEM5dHQEvpTcnwgX8zftvY1FcjP" - - result = %{ - "name" => "asda", - "description" => "asda", - "salePrice" => 34, - "img_hash" => "QmUfW3PVnh9GGuHcQgc3ZeNEbhwp5HE8rS5ac9MDWWQebz", - "collectionId" => "1871_1665123820823" - } - - Application.put_env(:explorer, :http_adapter, Explorer.Mox.HTTPoison) - - Explorer.Mox.HTTPoison - |> expect(:get, fn "https://ipfs.io/ipfs/QmT1Yz43R1PLn2RVovAnEM5dHQEvpTcnwgX8zftvY1FcjP", _headers, _options -> - {:ok, %HTTPoison.Response{status_code: 200, body: Jason.encode!(result)}} - end) - - assert {:ok, - %{ - metadata: %{ - "collectionId" => "1871_1665123820823", - "description" => "asda", - "img_hash" => "QmUfW3PVnh9GGuHcQgc3ZeNEbhwp5HE8rS5ac9MDWWQebz", - "name" => "asda", - "salePrice" => 34 - } - }} == MetadataRetriever.fetch_json({:ok, [data]}) - - Application.put_env(:explorer, :http_adapter, HTTPoison) - Application.put_env(:indexer, :ipfs, configuration) - end - - test "Constructs IPFS link with additional header" do - configuration = Application.get_env(:indexer, :ipfs) - - Application.put_env(:indexer, :ipfs, - gateway_url: Keyword.get(configuration, :gateway_url), - gateway_url_param_location: :header, - gateway_url_param_key: "x-apikey", - gateway_url_param_value: "mykey" - ) - - data = "QmT1Yz43R1PLn2RVovAnEM5dHQEvpTcnwgX8zftvY1FcjP" - - result = %{ - "name" => "asda", - "description" => "asda", - "salePrice" => 34, - "img_hash" => "QmUfW3PVnh9GGuHcQgc3ZeNEbhwp5HE8rS5ac9MDWWQebz", - "collectionId" => "1871_1665123820823" - } - - Application.put_env(:explorer, :http_adapter, Explorer.Mox.HTTPoison) - - Explorer.Mox.HTTPoison - |> expect(:get, fn "https://ipfs.io/ipfs/QmT1Yz43R1PLn2RVovAnEM5dHQEvpTcnwgX8zftvY1FcjP", - [{"x-apikey", "mykey"}], - _options -> - {:ok, %HTTPoison.Response{status_code: 200, body: Jason.encode!(result)}} - end) - - assert {:ok, - %{ - metadata: %{ - "collectionId" => "1871_1665123820823", - "description" => "asda", - "img_hash" => "QmUfW3PVnh9GGuHcQgc3ZeNEbhwp5HE8rS5ac9MDWWQebz", - "name" => "asda", - "salePrice" => 34 - } - }} == MetadataRetriever.fetch_json({:ok, [data]}) - - Application.put_env(:explorer, :http_adapter, HTTPoison) - Application.put_env(:indexer, :ipfs, configuration) - end - - test "fetches json with latin1 encoding", %{bypass: bypass} do - path = "/api/card/55265" - - json = """ - { - "name": "Sérgio Mendonça" - } - """ - - Bypass.expect(bypass, "GET", path, fn conn -> - Conn.resp(conn, 200, json) - end) - - assert {:ok, %{metadata: %{"name" => "Sérgio Mendonça"}}} == - MetadataRetriever.fetch_json({:ok, ["http://localhost:#{bypass.port}#{path}"]}) - end - - test "fetches json metadata when HTTP status 301", %{bypass: bypass} do - path = "/1302" - - attributes = """ - [ - {"trait_type": "Mouth", "value": "Discomfort"}, - {"trait_type": "Background", "value": "Army Green"}, - {"trait_type": "Eyes", "value": "Wide Eyed"}, - {"trait_type": "Fur", "value": "Black"}, - {"trait_type": "Earring", "value": "Silver Hoop"}, - {"trait_type": "Hat", "value": "Sea Captain's Hat"} - ] - """ - - json = """ - { - "attributes": #{attributes} - } - """ - - Bypass.expect(bypass, "GET", path, fn conn -> - Conn.resp(conn, 200, json) - end) - - {:ok, %{metadata: metadata}} = - MetadataRetriever.fetch_metadata_from_uri("http://localhost:#{bypass.port}#{path}", []) - - assert Map.get(metadata, "attributes") == Jason.decode!(attributes) - end - - test "decodes json file in tokenURI" do - data = - {:ok, - [ - "data:application/json,{\"name\":\"Home%20Address%20-%200x0000000000C1A6066c6c8B9d63e9B6E8865dC117\",\"description\":\"This%20NFT%20can%20be%20redeemed%20on%20HomeWork%20to%20grant%20a%20controller%20the%20exclusive%20right%20to%20deploy%20contracts%20with%20arbitrary%20bytecode%20to%20the%20designated%20home%20address.\",\"image\":\"data:image/svg+xml;charset=utf-8;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHZpZXdCb3g9IjAgMCAxNDQgNzIiPjxzdHlsZT48IVtDREFUQVsuQntzdHJva2UtbGluZWpvaW46cm91bmR9LkN7c3Ryb2tlLW1pdGVybGltaXQ6MTB9LkR7c3Ryb2tlLXdpZHRoOjJ9LkV7ZmlsbDojOWI5YjlhfS5Ge3N0cm9rZS1saW5lY2FwOnJvdW5kfV1dPjwvc3R5bGU+PGcgdHJhbnNmb3JtPSJtYXRyaXgoMS4wMiAwIDAgMS4wMiA4LjEgMCkiPjxwYXRoIGZpbGw9IiNmZmYiIGQ9Ik0xOSAzMmgzNHYyNEgxOXoiLz48ZyBzdHJva2U9IiMwMDAiIGNsYXNzPSJCIEMgRCI+PHBhdGggZmlsbD0iI2E1NzkzOSIgZD0iTTI1IDQwaDl2MTZoLTl6Ii8+PHBhdGggZmlsbD0iIzkyZDNmNSIgZD0iTTQwIDQwaDh2N2gtOHoiLz48cGF0aCBmaWxsPSIjZWE1YTQ3IiBkPSJNNTMgMzJIMTl2LTFsMTYtMTYgMTggMTZ6Ii8+PHBhdGggZmlsbD0ibm9uZSIgZD0iTTE5IDMyaDM0djI0SDE5eiIvPjxwYXRoIGZpbGw9IiNlYTVhNDciIGQ9Ik0yOSAyMWwtNSA1di05aDV6Ii8+PC9nPjwvZz48ZyB0cmFuc2Zvcm09Im1hdHJpeCguODQgMCAwIC44NCA2NSA1KSI+PHBhdGggZD0iTTkuNSAyMi45bDQuOCA2LjRhMy4xMiAzLjEyIDAgMCAxLTMgMi4ybC00LjgtNi40Yy4zLTEuNCAxLjYtMi40IDMtMi4yeiIgZmlsbD0iI2QwY2ZjZSIvPjxwYXRoIGZpbGw9IiMwMTAxMDEiIGQ9Ik00MS43IDM4LjVsNS4xLTYuNSIvPjxwYXRoIGQ9Ik00Mi45IDI3LjhMMTguNCA1OC4xIDI0IDYybDIxLjgtMjcuMyAyLjMtMi44eiIgY2xhc3M9IkUiLz48cGF0aCBmaWxsPSIjMDEwMTAxIiBkPSJNNDMuNCAyOS4zbC00LjcgNS44Ii8+PHBhdGggZD0iTTQ2LjggMzJjMy4yIDIuNiA4LjcgMS4yIDEyLjEtMy4yczMuNi05LjkuMy0xMi41bC01LjEgNi41LTIuOC0uMS0uNy0yLjcgNS4xLTYuNWMtMy4yLTIuNi04LjctMS4yLTEyLjEgMy4ycy0zLjYgOS45LS4zIDEyLjUiIGNsYXNzPSJFIi8+PHBhdGggZmlsbD0iI2E1NzkzOSIgZD0iTTI3LjMgMjZsMTEuOCAxNS43IDMuNCAyLjQgOS4xIDE0LjQtMy4yIDIuMy0xIC43LTEwLjItMTMuNi0xLjMtMy45LTExLjgtMTUuN3oiLz48cGF0aCBkPSJNMTIgMTkuOWw1LjkgNy45IDEwLjItNy42LTMuNC00LjVzNi44LTUuMSAxMC43LTQuNWMwIDAtNi42LTMtMTMuMyAxLjFTMTIgMTkuOSAxMiAxOS45eiIgY2xhc3M9IkUiLz48ZyBmaWxsPSJub25lIiBzdHJva2U9IiMwMDAiIGNsYXNzPSJCIEMgRCI+PHBhdGggZD0iTTUyIDU4LjlMNDAuOSA0My4ybC0zLjEtMi4zLTEwLjYtMTQuNy0yLjkgMi4yIDEwLjYgMTQuNyAxLjEgMy42IDExLjUgMTUuNXpNMTIuNSAxOS44bDUuOCA4IDEwLjMtNy40LTMuMy00LjZzNi45LTUgMTAuOC00LjNjMCAwLTYuNi0zLjEtMTMuMy45cy0xMC4zIDcuNC0xMC4zIDcuNHptLTIuNiAyLjlsNC43IDYuNWMtLjUgMS4zLTEuNyAyLjEtMyAyLjJsLTQuNy02LjVjLjMtMS40IDEuNi0yLjQgMy0yLjJ6Ii8+PHBhdGggZD0iTTQxLjMgMzguNWw1LjEtNi41bS0zLjUtMi43bC00LjYgNS44bTguMS0zLjFjMy4yIDIuNiA4LjcgMS4yIDEyLjEtMy4yczMuNi05LjkuMy0xMi41bC01LjEgNi41LTIuOC0uMS0uOC0yLjcgNS4xLTYuNWMtMy4yLTIuNi04LjctMS4yLTEyLjEgMy4yLTMuNCA0LjMtMy42IDkuOS0uMyAxMi41IiBjbGFzcz0iRiIvPjxwYXRoIGQ9Ik0zMC44IDQ0LjRMMTkgNTguOWw0IDMgMTAtMTIuNyIgY2xhc3M9IkYiLz48L2c+PC9nPjwvc3ZnPg==\"}" - ]} - - assert MetadataRetriever.fetch_json(data) == - {:ok, - %{ - metadata: %{ - "description" => - "This NFT can be redeemed on HomeWork to grant a controller the exclusive right to deploy contracts with arbitrary bytecode to the designated home address.", - "image" => - "data:image/svg+xml;charset=utf-8;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHZpZXdCb3g9IjAgMCAxNDQgNzIiPjxzdHlsZT48IVtDREFUQVsuQntzdHJva2UtbGluZWpvaW46cm91bmR9LkN7c3Ryb2tlLW1pdGVybGltaXQ6MTB9LkR7c3Ryb2tlLXdpZHRoOjJ9LkV7ZmlsbDojOWI5YjlhfS5Ge3N0cm9rZS1saW5lY2FwOnJvdW5kfV1dPjwvc3R5bGU+PGcgdHJhbnNmb3JtPSJtYXRyaXgoMS4wMiAwIDAgMS4wMiA4LjEgMCkiPjxwYXRoIGZpbGw9IiNmZmYiIGQ9Ik0xOSAzMmgzNHYyNEgxOXoiLz48ZyBzdHJva2U9IiMwMDAiIGNsYXNzPSJCIEMgRCI+PHBhdGggZmlsbD0iI2E1NzkzOSIgZD0iTTI1IDQwaDl2MTZoLTl6Ii8+PHBhdGggZmlsbD0iIzkyZDNmNSIgZD0iTTQwIDQwaDh2N2gtOHoiLz48cGF0aCBmaWxsPSIjZWE1YTQ3IiBkPSJNNTMgMzJIMTl2LTFsMTYtMTYgMTggMTZ6Ii8+PHBhdGggZmlsbD0ibm9uZSIgZD0iTTE5IDMyaDM0djI0SDE5eiIvPjxwYXRoIGZpbGw9IiNlYTVhNDciIGQ9Ik0yOSAyMWwtNSA1di05aDV6Ii8+PC9nPjwvZz48ZyB0cmFuc2Zvcm09Im1hdHJpeCguODQgMCAwIC44NCA2NSA1KSI+PHBhdGggZD0iTTkuNSAyMi45bDQuOCA2LjRhMy4xMiAzLjEyIDAgMCAxLTMgMi4ybC00LjgtNi40Yy4zLTEuNCAxLjYtMi40IDMtMi4yeiIgZmlsbD0iI2QwY2ZjZSIvPjxwYXRoIGZpbGw9IiMwMTAxMDEiIGQ9Ik00MS43IDM4LjVsNS4xLTYuNSIvPjxwYXRoIGQ9Ik00Mi45IDI3LjhMMTguNCA1OC4xIDI0IDYybDIxLjgtMjcuMyAyLjMtMi44eiIgY2xhc3M9IkUiLz48cGF0aCBmaWxsPSIjMDEwMTAxIiBkPSJNNDMuNCAyOS4zbC00LjcgNS44Ii8+PHBhdGggZD0iTTQ2LjggMzJjMy4yIDIuNiA4LjcgMS4yIDEyLjEtMy4yczMuNi05LjkuMy0xMi41bC01LjEgNi41LTIuOC0uMS0uNy0yLjcgNS4xLTYuNWMtMy4yLTIuNi04LjctMS4yLTEyLjEgMy4ycy0zLjYgOS45LS4zIDEyLjUiIGNsYXNzPSJFIi8+PHBhdGggZmlsbD0iI2E1NzkzOSIgZD0iTTI3LjMgMjZsMTEuOCAxNS43IDMuNCAyLjQgOS4xIDE0LjQtMy4yIDIuMy0xIC43LTEwLjItMTMuNi0xLjMtMy45LTExLjgtMTUuN3oiLz48cGF0aCBkPSJNMTIgMTkuOWw1LjkgNy45IDEwLjItNy42LTMuNC00LjVzNi44LTUuMSAxMC43LTQuNWMwIDAtNi42LTMtMTMuMyAxLjFTMTIgMTkuOSAxMiAxOS45eiIgY2xhc3M9IkUiLz48ZyBmaWxsPSJub25lIiBzdHJva2U9IiMwMDAiIGNsYXNzPSJCIEMgRCI+PHBhdGggZD0iTTUyIDU4LjlMNDAuOSA0My4ybC0zLjEtMi4zLTEwLjYtMTQuNy0yLjkgMi4yIDEwLjYgMTQuNyAxLjEgMy42IDExLjUgMTUuNXpNMTIuNSAxOS44bDUuOCA4IDEwLjMtNy40LTMuMy00LjZzNi45LTUgMTAuOC00LjNjMCAwLTYuNi0zLjEtMTMuMy45cy0xMC4zIDcuNC0xMC4zIDcuNHptLTIuNiAyLjlsNC43IDYuNWMtLjUgMS4zLTEuNyAyLjEtMyAyLjJsLTQuNy02LjVjLjMtMS40IDEuNi0yLjQgMy0yLjJ6Ii8+PHBhdGggZD0iTTQxLjMgMzguNWw1LjEtNi41bS0zLjUtMi43bC00LjYgNS44bTguMS0zLjFjMy4yIDIuNiA4LjcgMS4yIDEyLjEtMy4yczMuNi05LjkuMy0xMi41bC01LjEgNi41LTIuOC0uMS0uOC0yLjcgNS4xLTYuNWMtMy4yLTIuNi04LjctMS4yLTEyLjEgMy4yLTMuNCA0LjMtMy42IDkuOS0uMyAxMi41IiBjbGFzcz0iRiIvPjxwYXRoIGQ9Ik0zMC44IDQ0LjRMMTkgNTguOWw0IDMgMTAtMTIuNyIgY2xhc3M9IkYiLz48L2c+PC9nPjwvc3ZnPg==", - "name" => "Home Address - 0x0000000000C1A6066c6c8B9d63e9B6E8865dC117" - } - }} - end - - test "decodes base64 encoded json file in tokenURI" do - data = - {:ok, - [ - "data:application/json;base64,eyJuYW1lIjogIi54ZGFpIiwgImRlc2NyaXB0aW9uIjogIlB1bmsgRG9tYWlucyBkaWdpdGFsIGlkZW50aXR5LiBWaXNpdCBodHRwczovL3B1bmsuZG9tYWlucy8iLCAiaW1hZ2UiOiAiZGF0YTppbWFnZS9zdmcreG1sO2Jhc2U2NCxQSE4yWnlCNGJXeHVjejBpYUhSMGNEb3ZMM2QzZHk1M015NXZjbWN2TWpBd01DOXpkbWNpSUhacFpYZENiM2c5SWpBZ01DQTFNREFnTlRBd0lpQjNhV1IwYUQwaU5UQXdJaUJvWldsbmFIUTlJalV3TUNJK1BHUmxabk0rUEd4cGJtVmhja2R5WVdScFpXNTBJR2xrUFNKbmNtRmtJaUI0TVQwaU1DVWlJSGt4UFNJd0pTSWdlREk5SWpFd01DVWlJSGt5UFNJd0pTSStQSE4wYjNBZ2IyWm1jMlYwUFNJd0pTSWdjM1I1YkdVOUluTjBiM0F0WTI5c2IzSTZjbWRpS0RVNExERTNMREV4TmlrN2MzUnZjQzF2Y0dGamFYUjVPakVpSUM4K1BITjBiM0FnYjJabWMyVjBQU0l4TURBbElpQnpkSGxzWlQwaWMzUnZjQzFqYjJ4dmNqcHlaMklvTVRFMkxESTFMREUzS1R0emRHOXdMVzl3WVdOcGRIazZNU0lnTHo0OEwyeHBibVZoY2tkeVlXUnBaVzUwUGp3dlpHVm1jejQ4Y21WamRDQjRQU0l3SWlCNVBTSXdJaUIzYVdSMGFEMGlOVEF3SWlCb1pXbG5hSFE5SWpVd01DSWdabWxzYkQwaWRYSnNLQ05uY21Ga0tTSXZQangwWlhoMElIZzlJalV3SlNJZ2VUMGlOVEFsSWlCa2IyMXBibUZ1ZEMxaVlYTmxiR2x1WlQwaWJXbGtaR3hsSWlCbWFXeHNQU0ozYUdsMFpTSWdkR1Y0ZEMxaGJtTm9iM0k5SW0xcFpHUnNaU0lnWm05dWRDMXphWHBsUFNKNExXeGhjbWRsSWo0dWVHUmhhVHd2ZEdWNGRENDhkR1Y0ZENCNFBTSTFNQ1VpSUhrOUlqY3dKU0lnWkc5dGFXNWhiblF0WW1GelpXeHBibVU5SW0xcFpHUnNaU0lnWm1sc2JEMGlkMmhwZEdVaUlIUmxlSFF0WVc1amFHOXlQU0p0YVdSa2JHVWlQbkIxYm1zdVpHOXRZV2x1Y3p3dmRHVjRkRDQ4TDNOMlp6ND0ifQ==" - ]} - - assert MetadataRetriever.fetch_json(data) == - {:ok, - %{ - metadata: %{ - "name" => ".xdai", - "description" => "Punk Domains digital identity. Visit https://punk.domains/", - "image" => - "data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHZpZXdCb3g9IjAgMCA1MDAgNTAwIiB3aWR0aD0iNTAwIiBoZWlnaHQ9IjUwMCI+PGRlZnM+PGxpbmVhckdyYWRpZW50IGlkPSJncmFkIiB4MT0iMCUiIHkxPSIwJSIgeDI9IjEwMCUiIHkyPSIwJSI+PHN0b3Agb2Zmc2V0PSIwJSIgc3R5bGU9InN0b3AtY29sb3I6cmdiKDU4LDE3LDExNik7c3RvcC1vcGFjaXR5OjEiIC8+PHN0b3Agb2Zmc2V0PSIxMDAlIiBzdHlsZT0ic3RvcC1jb2xvcjpyZ2IoMTE2LDI1LDE3KTtzdG9wLW9wYWNpdHk6MSIgLz48L2xpbmVhckdyYWRpZW50PjwvZGVmcz48cmVjdCB4PSIwIiB5PSIwIiB3aWR0aD0iNTAwIiBoZWlnaHQ9IjUwMCIgZmlsbD0idXJsKCNncmFkKSIvPjx0ZXh0IHg9IjUwJSIgeT0iNTAlIiBkb21pbmFudC1iYXNlbGluZT0ibWlkZGxlIiBmaWxsPSJ3aGl0ZSIgdGV4dC1hbmNob3I9Im1pZGRsZSIgZm9udC1zaXplPSJ4LWxhcmdlIj4ueGRhaTwvdGV4dD48dGV4dCB4PSI1MCUiIHk9IjcwJSIgZG9taW5hbnQtYmFzZWxpbmU9Im1pZGRsZSIgZmlsbD0id2hpdGUiIHRleHQtYW5jaG9yPSJtaWRkbGUiPnB1bmsuZG9tYWluczwvdGV4dD48L3N2Zz4=" - } - }} - end - - test "decodes base64 encoded json file (with unicode string) in tokenURI" do - data = - {:ok, - [ - "data:application/json;base64,eyJkZXNjcmlwdGlvbiI6ICJQdW5rIERvbWFpbnMgZGlnaXRhbCBpZGVudGl0eSDDry4gVmlzaXQgaHR0cHM6Ly9wdW5rLmRvbWFpbnMvIn0=" - ]} - - assert MetadataRetriever.fetch_json(data) == - {:ok, - %{ - metadata: %{ - "description" => "Punk Domains digital identity ï. Visit https://punk.domains/" - } - }} - end - - test "fetches image from ipfs link directly", %{bypass: bypass} do - path = "/ipfs/bafybeig6nlmyzui7llhauc52j2xo5hoy4lzp6442lkve5wysdvjkizxonu" - - json = """ - { - "image": "https://ipfs.io/ipfs/bafybeig6nlmyzui7llhauc52j2xo5hoy4lzp6442lkve5wysdvjkizxonu" - } - """ - - Bypass.expect(bypass, "GET", path, fn conn -> - Conn.resp(conn, 200, json) - end) - - data = - {:ok, - [ - "http://localhost:#{bypass.port}#{path}" - ]} - - assert {:ok, - %{ - metadata: %{ - "image" => "https://ipfs.io/ipfs/bafybeig6nlmyzui7llhauc52j2xo5hoy4lzp6442lkve5wysdvjkizxonu" - } - }} == MetadataRetriever.fetch_json(data) - end - - test "Fetches metadata from ipfs", %{bypass: bypass} do - path = "/ipfs/bafybeid4ed2ua7fwupv4nx2ziczr3edhygl7ws3yx6y2juon7xakgj6cfm/51.json" - - json = """ - { - "image": "ipfs://bafybeihxuj3gxk7x5p36amzootyukbugmx3pw7dyntsrohg3se64efkuga/51.png" - } - """ - - Bypass.expect(bypass, "GET", path, fn conn -> - Conn.resp(conn, 200, json) - end) - - data = - {:ok, - [ - "http://localhost:#{bypass.port}#{path}" - ]} - - {:ok, - %{ - metadata: metadata - }} = MetadataRetriever.fetch_json(data) - - assert "ipfs://bafybeihxuj3gxk7x5p36amzootyukbugmx3pw7dyntsrohg3se64efkuga/51.png" == Map.get(metadata, "image") - end - - test "Fetches metadata from '${url}'", %{bypass: bypass} do - path = "/data/8/8578.json" - - data = - {:ok, - [ - "'http://localhost:#{bypass.port}#{path}'" - ]} - - json = """ - { - "attributes": [ - {"trait_type": "Character", "value": "Blue Suit Boxing Glove"}, - {"trait_type": "Face", "value": "Wink"}, - {"trait_type": "Hat", "value": "Blue"}, - {"trait_type": "Background", "value": "Red Carpet"} - ], - "image": "https://cards.collecttrumpcards.com/cards/0c68b1ab6.jpg", - "name": "Trump Digital Trading Card #8578", - "tokeId": 8578 - } - """ - - Bypass.expect(bypass, "GET", path, fn conn -> - Conn.resp(conn, 200, json) - end) - - assert {:ok, - %{ - metadata: Jason.decode!(json) - }} == MetadataRetriever.fetch_json(data) - end - - test "Process custom execution reverted" do - data = - {:error, - "(3) execution reverted: Nonexistent token (0x08c379a0000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000114e6f6e6578697374656e7420746f6b656e000000000000000000000000000000)"} - - assert {:error, "VM execution error"} == MetadataRetriever.fetch_json(data) - end - - test "Process CIDv0 IPFS links" do - data = "QmT1Yz43R1PLn2RVovAnEM5dHQEvpTcnwgX8zftvY1FcjP" - - result = %{ - "name" => "asda", - "description" => "asda", - "salePrice" => 34, - "img_hash" => "QmUfW3PVnh9GGuHcQgc3ZeNEbhwp5HE8rS5ac9MDWWQebz", - "collectionId" => "1871_1665123820823" - } - - Application.put_env(:explorer, :http_adapter, Explorer.Mox.HTTPoison) - - Explorer.Mox.HTTPoison - |> expect(:get, fn "https://ipfs.io/ipfs/QmT1Yz43R1PLn2RVovAnEM5dHQEvpTcnwgX8zftvY1FcjP", _headers, _options -> - {:ok, %HTTPoison.Response{status_code: 200, body: Jason.encode!(result)}} - end) - - assert {:ok, - %{ - metadata: %{ - "collectionId" => "1871_1665123820823", - "description" => "asda", - "img_hash" => "QmUfW3PVnh9GGuHcQgc3ZeNEbhwp5HE8rS5ac9MDWWQebz", - "name" => "asda", - "salePrice" => 34 - } - }} == MetadataRetriever.fetch_json({:ok, [data]}) - - Application.put_env(:explorer, :http_adapter, HTTPoison) - end - - test "Process URI directly from link", %{bypass: bypass} do - path = "/api/dejobio/v1/nftproduct/1" - - json = """ - { - "image": "https:\/\/cdn.discordapp.com\/attachments\/1008567215739650078\/1080111780858187796\/savechives_a_dragon_playing_football_in_a_city_full_of_flowers__0739cc42-aae1-4909-a964-3f9c0ed1a9ed.png", - "external_url": "https:\/\/dejob.io\/blue-reign-the-dragon-football-champion-of-the-floral-city\/", - "name": "Blue Reign: The Dragon Football Champion of the Floral City", - "description": "Test", - "attributes": [ - { - "trait_type": "Product Type", - "value": "Book" - }, - { - "display_type": "number", - "trait_type": "Total Sold", - "value": "0" - }, - { - "display_type": "number", - "trait_type": "Success Sold", - "value": "0" - }, - { - "max_value": "100", - "trait_type": "Success Rate", - "value": "0" - } - ] - } - """ - - Bypass.expect(bypass, "GET", path, fn conn -> - Conn.resp(conn, 200, json) - end) - - assert {:ok, - %{ - metadata: Jason.decode!(json) - }} == - MetadataRetriever.fetch_json({:ok, ["http://localhost:#{bypass.port}#{path}"]}) - end - end -end From b8730cdfe03233e55d6e60bba25933bb258dacb6 Mon Sep 17 00:00:00 2001 From: NBMXyeu <121313170+xyeuu@users.noreply.github.com> Date: Fri, 7 Jun 2024 15:30:36 +0200 Subject: [PATCH 072/150] feat: Adding Mobula price source (#9971) * Adding Mobula as data provider * push Mobula source * add chain setup * remove useless params * remove useless functions * add mobula price history provider * adding mobula history url * add possibility to fetch_market_data_for_token_addresses with Mobula on source.ex * update chain => chain_id * mix format * mix credo * removed useless alias Helper & Chain * Adding "Mobula" to cspell.json * Adding Mobula to config_helper * Fix dialyzer * mix format * Set Mobula as a default source if the EXCHANGE_RATES_COINGECKO_API_KEY is not set * fix compilation error on config_helper * Remove Mobula fallback on config_helper exchange_rates_source * Update apps/explorer/lib/explorer/exchange_rates/source/mobula.ex Co-authored-by: Fedor Ivanov * Update mobula.ex * add Mobula to exchange_rates_market_cap_source * Adding secondary_coin support * EXCHANGE_RATES_MOBULA_PLATFORM_ID into EXCHANGE_RATES_MOBULA_CHAIN_ID * should fix mix credo * adding mobula secondary id env * update env in runtime.exs * Push requests --------- Co-authored-by: Fedor Ivanov --- .../lib/explorer/exchange_rates/source.ex | 7 +- .../explorer/exchange_rates/source/mobula.ex | 174 ++++++++++++++++++ .../history/source/market_cap/mobula.ex | 54 ++++++ .../market/history/source/price/mobula.ex | 48 +++++ config/config_helper.exs | 14 +- config/runtime.exs | 7 + cspell.json | 1 + 7 files changed, 297 insertions(+), 8 deletions(-) create mode 100644 apps/explorer/lib/explorer/exchange_rates/source/mobula.ex create mode 100644 apps/explorer/lib/explorer/market/history/source/market_cap/mobula.ex create mode 100644 apps/explorer/lib/explorer/market/history/source/price/mobula.ex diff --git a/apps/explorer/lib/explorer/exchange_rates/source.ex b/apps/explorer/lib/explorer/exchange_rates/source.ex index 0c7a0929651f..52b846b3a75b 100644 --- a/apps/explorer/lib/explorer/exchange_rates/source.ex +++ b/apps/explorer/lib/explorer/exchange_rates/source.ex @@ -35,10 +35,9 @@ defmodule Explorer.ExchangeRates.Source do @spec fetch_market_data_for_token_addresses([Hash.Address.t()]) :: {:ok, %{Hash.Address.t() => %{fiat_value: float() | nil, circulating_market_cap: float() | nil}}} | {:error, any} - def fetch_market_data_for_token_addresses(address_hashes) do - source_url = CoinGecko.source_url(address_hashes) - headers = CoinGecko.headers() - fetch_exchange_rates_request(CoinGecko, source_url, headers) + def fetch_market_data_for_token_addresses(address_hashes, source \\ exchange_rates_source()) do + source_url = source.source_url(address_hashes) + fetch_exchange_rates_request(source, source_url, source.headers()) end @spec fetch_token_hashes_with_market_data :: {:ok, [String.t()]} | {:error, any} diff --git a/apps/explorer/lib/explorer/exchange_rates/source/mobula.ex b/apps/explorer/lib/explorer/exchange_rates/source/mobula.ex new file mode 100644 index 000000000000..9e5484c56fc5 --- /dev/null +++ b/apps/explorer/lib/explorer/exchange_rates/source/mobula.ex @@ -0,0 +1,174 @@ +defmodule Explorer.ExchangeRates.Source.Mobula do + @moduledoc """ + Adapter for fetching exchange rates from https://mobula.io + """ + + require Logger + alias Explorer.ExchangeRates.{Source, Token} + + import Source, only: [to_decimal: 1] + + @behaviour Source + + @impl Source + def format_data(%{"data" => %{"market_cap" => _} = market_data}) do + current_price = market_data["price"] + image_url = market_data["logo"] + id = market_data["symbol"] + + btc_value = + if Application.get_env(:explorer, Explorer.ExchangeRates)[:fetch_btc_value], do: get_btc_value(id, market_data) + + [ + %Token{ + available_supply: to_decimal(market_data["circulating_supply"]), + total_supply: to_decimal(market_data["total_supply"]) || to_decimal(market_data["circulating_supply"]), + btc_value: btc_value, + id: id, + last_updated: nil, + market_cap_usd: to_decimal(market_data["market_cap"]), + tvl_usd: nil, + name: market_data["name"], + symbol: String.upcase(market_data["symbol"]), + usd_value: current_price, + volume_24h_usd: to_decimal(market_data["volume"]), + image_url: image_url + } + ] + end + + @impl Source + def format_data(%{"data" => data}) do + data + |> Enum.reduce(%{}, fn + {address_hash_string, market_data}, acc -> + case Explorer.Chain.Hash.Address.cast(address_hash_string) do + {:ok, address_hash} -> + acc + |> Map.put(address_hash, %{ + fiat_value: Map.get(market_data, "price"), + circulating_market_cap: Map.get(market_data, "market_cap"), + volume_24h: Map.get(market_data, "volume") + }) + + _ -> + acc + end + + _, acc -> + acc + end) + end + + @impl Source + def format_data(_), do: [] + + @impl Source + def source_url do + "#{base_url()}/market/data?asset=#{Explorer.coin()}" + end + + @impl Source + def source_url(token_addresses) when is_list(token_addresses) do + joined_addresses = token_addresses |> Enum.map_join(",", &to_string/1) + + "#{base_url()}/market/multi-data?blockchains=#{chain()}&assets=#{joined_addresses}" + end + + @impl Source + def source_url(input) do + symbol = input + "#{base_url()}/market/data&asset=#{symbol}" + end + + @spec secondary_history_source_url() :: String.t() + def secondary_history_source_url do + id = config(:secondary_coin_id) + + if id, do: "#{base_url()}/market/history?asset=#{id}", else: nil + end + + @spec history_source_url() :: String.t() + def history_source_url do + "#{base_url()}/market/history?asset=#{Explorer.coin()}" + end + + @spec history_url(non_neg_integer(), boolean()) :: String.t() + def history_url(previous_days, secondary_coin?) do + now = DateTime.utc_now() + date_days_ago = DateTime.add(now, -previous_days, :day) + timestamp_ms = DateTime.to_unix(date_days_ago) * 1000 + + source_url = if secondary_coin?, do: secondary_history_source_url(), else: history_source_url() + + "#{source_url}&from=#{timestamp_ms}" + end + + @spec market_cap_history_url(non_neg_integer()) :: String.t() + def market_cap_history_url(previous_days) do + now = DateTime.utc_now() + date_days_ago = DateTime.add(now, -previous_days, :day) + timestamp_ms = DateTime.to_unix(date_days_ago) * 1000 + + "#{history_source_url()}&from=#{timestamp_ms}&period=5" + end + + @impl Source + def headers do + if config(:api_key) do + [{"Authorization", "#{config(:api_key)}"}] + else + [] + end + end + + defp get_current_price(market_data) do + if market_data["price"] do + to_decimal(market_data["price"]) + else + 1 + end + end + + defp get_btc_value(id, market_data) do + case get_btc_price() do + {:ok, price} -> + btc_price = to_decimal(price) + current_price = get_current_price(market_data) + + if id != "btc" && current_price && btc_price do + Decimal.div(current_price, btc_price) + else + 1 + end + + _ -> + 1 + end + end + + defp chain do + config(:platform) || "ethereum" + end + + defp base_url do + config(:base_url) + end + + defp get_btc_price do + url = "#{base_url()}/market/data?asset=Bitcoin" + + case Source.http_request(url, headers()) do + {:ok, %{"price" => current_price}} -> + {:ok, current_price} + + resp -> + resp + end + end + + @spec config(atom()) :: term + defp config(key) do + Application.get_env(:explorer, __MODULE__, [])[key] + end +end diff --git a/apps/explorer/lib/explorer/market/history/source/market_cap/mobula.ex b/apps/explorer/lib/explorer/market/history/source/market_cap/mobula.ex new file mode 100644 index 000000000000..f5195e58de9a --- /dev/null +++ b/apps/explorer/lib/explorer/market/history/source/market_cap/mobula.ex @@ -0,0 +1,54 @@ +defmodule Explorer.Market.History.Source.MarketCap.Mobula do + @moduledoc """ + Adapter for fetching current market from Mobula. + + The current market is fetched for the configured coin. You can specify a + different coin by changing the targeted coin. + + # In config.exs + config :explorer, coin: "POA" + + """ + + require Logger + + alias Explorer.ExchangeRates.Source + alias Explorer.ExchangeRates.Source.Mobula, as: ExchangeRatesSourceMobula + alias Explorer.Market.History.Source.MarketCap, as: SourceMarketCap + alias Explorer.Market.History.Source.Price.CryptoCompare + + @behaviour SourceMarketCap + + @impl SourceMarketCap + def fetch_market_cap(previous_days) do + url = ExchangeRatesSourceMobula.market_cap_history_url(previous_days) + + case Source.http_request(url, ExchangeRatesSourceMobula.headers()) do + {:ok, data} -> + result = + data + |> format_data() + + {:ok, result} + + _ -> + :error + end + end + + @spec format_data(term()) :: SourceMarketCap.record() | nil + defp format_data(nil), do: nil + + defp format_data(data) do + market_caps = data["data"]["market_cap_history"] + + for [date, market_cap] <- market_caps do + date = Decimal.to_integer(Decimal.round(Decimal.from_float(date / 1000))) + + %{ + market_cap: Decimal.new(to_string(market_cap)), + date: CryptoCompare.date(date) + } + end + end +end diff --git a/apps/explorer/lib/explorer/market/history/source/price/mobula.ex b/apps/explorer/lib/explorer/market/history/source/price/mobula.ex new file mode 100644 index 000000000000..1f799777adec --- /dev/null +++ b/apps/explorer/lib/explorer/market/history/source/price/mobula.ex @@ -0,0 +1,48 @@ +defmodule Explorer.Market.History.Source.Price.Mobula do + @moduledoc """ + Adapter for fetching current market from Mobula. + """ + + require Logger + alias Explorer.ExchangeRates.Source + alias Explorer.ExchangeRates.Source.Mobula, as: ExchangeRatesSourceMobula + alias Explorer.Market.History.Source.Price, as: SourcePrice + alias Explorer.Market.History.Source.Price.CryptoCompare + + @behaviour SourcePrice + + @impl SourcePrice + def fetch_price_history(previous_days, secondary_coin? \\ false) do + url = ExchangeRatesSourceMobula.history_url(previous_days, secondary_coin?) + + case Source.http_request(url, ExchangeRatesSourceMobula.headers()) do + {:ok, data} -> + result = + data + |> format_data(secondary_coin?) + + {:ok, result} + + _ -> + :error + end + end + + @spec format_data(term(), boolean()) :: SourcePrice.record() | nil + defp format_data(nil, _), do: nil + + defp format_data(data, secondary_coin?) do + prices = data["data"]["price_history"] + + for [date, price] <- prices do + date = Decimal.to_integer(Decimal.round(Decimal.from_float(date / 1000))) + + %{ + closing_price: Decimal.new(to_string(price)), + date: CryptoCompare.date(date), + opening_price: Decimal.new(to_string(price)), + secondary_coin: secondary_coin? + } + end + end +end diff --git a/config/config_helper.exs b/config/config_helper.exs index bccd722b8b78..d75f9aaba7e2 100644 --- a/config/config_helper.exs +++ b/config/config_helper.exs @@ -170,20 +170,22 @@ defmodule ConfigHelper do end end - @spec exchange_rates_source() :: Source.CoinGecko | Source.CoinMarketCap + @spec exchange_rates_source() :: Source.CoinGecko | Source.CoinMarketCap | Source.Mobula def exchange_rates_source do case System.get_env("EXCHANGE_RATES_MARKET_CAP_SOURCE") do "coin_gecko" -> Source.CoinGecko "coin_market_cap" -> Source.CoinMarketCap + "mobula" -> Source.Mobula _ -> Source.CoinGecko end end - @spec exchange_rates_market_cap_source() :: MarketCap.CoinGecko | MarketCap.CoinMarketCap + @spec exchange_rates_market_cap_source() :: MarketCap.CoinGecko | MarketCap.CoinMarketCap | MarketCap.Mobula def exchange_rates_market_cap_source do case System.get_env("EXCHANGE_RATES_MARKET_CAP_SOURCE") do "coin_gecko" -> MarketCap.CoinGecko "coin_market_cap" -> MarketCap.CoinMarketCap + "mobula" -> MarketCap.Mobula _ -> MarketCap.CoinGecko end end @@ -196,26 +198,30 @@ defmodule ConfigHelper do end end - @spec exchange_rates_price_source() :: Price.CoinGecko | Price.CoinMarketCap | Price.CryptoCompare + @spec exchange_rates_price_source() :: Price.CoinGecko | Price.CoinMarketCap | Price.CryptoCompare | Price.Mobula def exchange_rates_price_source do case System.get_env("EXCHANGE_RATES_PRICE_SOURCE") do "coin_gecko" -> Price.CoinGecko "coin_market_cap" -> Price.CoinMarketCap "crypto_compare" -> Price.CryptoCompare + "mobula" -> Price.Mobula _ -> Price.CryptoCompare end end - @spec exchange_rates_secondary_coin_price_source() :: Price.CoinGecko | Price.CoinMarketCap | Price.CryptoCompare + @spec exchange_rates_secondary_coin_price_source() :: + Price.CoinGecko | Price.CoinMarketCap | Price.CryptoCompare | Price.Mobula def exchange_rates_secondary_coin_price_source do cmc_secondary_coin_id = System.get_env("EXCHANGE_RATES_COINMARKETCAP_SECONDARY_COIN_ID") cg_secondary_coin_id = System.get_env("EXCHANGE_RATES_COINGECKO_SECONDARY_COIN_ID") cc_secondary_coin_symbol = System.get_env("EXCHANGE_RATES_CRYPTOCOMPARE_SECONDARY_COIN_SYMBOL") + mobula_secondary_coin_id = System.get_env("EXCHANGE_RATES_MOBULA_SECONDARY_COIN_ID") cond do cg_secondary_coin_id && cg_secondary_coin_id !== "" -> Price.CoinGecko cmc_secondary_coin_id && cmc_secondary_coin_id !== "" -> Price.CoinMarketCap cc_secondary_coin_symbol && cc_secondary_coin_symbol !== "" -> Price.CryptoCompare + mobula_secondary_coin_id && mobula_secondary_coin_id !== "" -> Price.Mobula true -> Price.CryptoCompare end end diff --git a/config/runtime.exs b/config/runtime.exs index cd2072166e89..f00a02c4f91f 100644 --- a/config/runtime.exs +++ b/config/runtime.exs @@ -367,6 +367,13 @@ config :explorer, Explorer.ExchangeRates.Source.CoinGecko, coin_id: System.get_env("EXCHANGE_RATES_COINGECKO_COIN_ID"), secondary_coin_id: cg_secondary_coin_id +config :explorer, Explorer.ExchangeRates.Source.Mobula, + platform: System.get_env("EXCHANGE_RATES_MOBULA_CHAIN_ID"), + base_url: System.get_env("EXCHANGE_RATES_MOBULA_BASE_URL", "https://api.mobula.io/api/1"), + api_key: System.get_env("EXCHANGE_RATES_MOBULA_API_KEY"), + coin_id: System.get_env("EXCHANGE_RATES_MOBULA_COIN_ID"), + secondary_coin_id: System.get_env("EXCHANGE_RATES_MOBULA_SECONDARY_COIN_ID") + config :explorer, Explorer.ExchangeRates.Source.DefiLlama, coin_id: System.get_env("EXCHANGE_RATES_DEFILLAMA_COIN_ID") cc_secondary_coin_symbol = System.get_env("EXCHANGE_RATES_CRYPTOCOMPARE_SECONDARY_COIN_SYMBOL") diff --git a/cspell.json b/cspell.json index 0f0cf285ed89..aa1edcf326aa 100644 --- a/cspell.json +++ b/cspell.json @@ -54,6 +54,7 @@ "LUKSO", "Limegreen", "MARKETCAP", + "Mobula", "MDWW", "Mainnets", "Mendonça", From d0ec50eb951b131c0549a42760f3709eb0dc8ead Mon Sep 17 00:00:00 2001 From: Kirill Fedoseev Date: Fri, 7 Jun 2024 16:16:17 +0200 Subject: [PATCH 073/150] feat(ci): use remote arm64 builder (#9468) * feat(ci): multi-platform remote build * set arm runner secret, update runner hosts * select one builder and persist it * select one builder and persist it * find the least busy arm builder * chore: update ci configs post rebase * enable remote build in every workflow --------- Co-authored-by: aagaev --- .../setup-repo-and-short-sha/action.yml | 23 ------- .github/actions/setup-repo/action.yml | 65 ++++++++++++++++++- .github/scripts/select-builder.sh | 46 +++++++++++++ .github/workflows/pre-release-eth.yml | 10 +++ .github/workflows/pre-release-optimism.yml | 10 +++ .github/workflows/pre-release-shibarium.yml | 10 +++ .github/workflows/pre-release-zksync.yml | 7 ++ .github/workflows/pre-release.yml | 7 ++ .../publish-docker-image-every-push.yml | 32 +++++---- .../publish-docker-image-for-arbitrum.yml | 7 +- .../publish-docker-image-for-core.yml | 10 ++- .../publish-docker-image-for-eth-sepolia.yml | 18 ++++- .../publish-docker-image-for-eth.yml | 10 ++- .../publish-docker-image-for-filecoin.yml | 10 ++- .../publish-docker-image-for-fuse.yml | 10 ++- .../publish-docker-image-for-gnosis-chain.yml | 10 ++- .../publish-docker-image-for-l2-staging.yml | 10 ++- .../publish-docker-image-for-lukso.yml | 10 ++- .../publish-docker-image-for-optimism.yml | 7 +- .../publish-docker-image-for-polygon-edge.yml | 10 ++- .../publish-docker-image-for-redstone.yml | 7 +- .../publish-docker-image-for-rootstock.yml | 10 ++- .../publish-docker-image-for-shibarium.yml | 10 ++- .../publish-docker-image-for-stability.yml | 10 ++- .../publish-docker-image-for-suave.yml | 7 +- .../publish-docker-image-for-zetachain.yml | 10 ++- .../publish-docker-image-for-zkevm.yml | 10 ++- .../publish-docker-image-for-zksync.yml | 10 ++- ...publish-docker-image-staging-on-demand.yml | 19 +++--- .github/workflows/release-arbitrum.yml | 7 ++ .github/workflows/release-eth.yml | 7 ++ .github/workflows/release-filecoin.yml | 7 ++ .github/workflows/release-fuse.yml | 7 ++ .github/workflows/release-gnosis.yml | 7 ++ .github/workflows/release-optimism.yml | 7 ++ .github/workflows/release-polygon-edge.yml | 7 ++ .github/workflows/release-polygon-zkevm.yml | 7 ++ .github/workflows/release-redstone.yml | 7 ++ .github/workflows/release-rootstock.yml | 7 ++ .github/workflows/release-shibarium.yml | 7 ++ .github/workflows/release-stability.yml | 7 ++ .github/workflows/release-suave.yml | 7 ++ .github/workflows/release-zetachain.yml | 7 ++ .github/workflows/release-zksync.yml | 7 ++ .github/workflows/release.yml | 7 ++ 45 files changed, 461 insertions(+), 66 deletions(-) delete mode 100644 .github/actions/setup-repo-and-short-sha/action.yml create mode 100755 .github/scripts/select-builder.sh diff --git a/.github/actions/setup-repo-and-short-sha/action.yml b/.github/actions/setup-repo-and-short-sha/action.yml deleted file mode 100644 index d3cce9891a3a..000000000000 --- a/.github/actions/setup-repo-and-short-sha/action.yml +++ /dev/null @@ -1,23 +0,0 @@ -name: 'Setup repo and calc short SHA commit' -description: 'Setup repo: checkout/login/extract metadata, Set up Docker Buildx and calculate short SHA commit' -inputs: - docker-username: - description: 'Docker username' - required: true - docker-password: - description: 'Docker password' - required: true -runs: - using: "composite" - - steps: - - uses: actions/checkout@v4 - - name: Setup repo - uses: ./.github/actions/setup-repo - with: - docker-username: ${{ inputs.docker-username }} - docker-password: ${{ inputs.docker-password }} - - - name: Add SHORT_SHA env property with commit short sha - shell: bash - run: echo "SHORT_SHA=`echo ${GITHUB_SHA} | cut -c1-8`" >> $GITHUB_ENV \ No newline at end of file diff --git a/.github/actions/setup-repo/action.yml b/.github/actions/setup-repo/action.yml index 2c3533159e15..0465ac9ce2c2 100644 --- a/.github/actions/setup-repo/action.yml +++ b/.github/actions/setup-repo/action.yml @@ -7,14 +7,69 @@ inputs: docker-password: description: 'Docker password' required: true + docker-remote-multi-platform: + description: 'Docker remote multi-platform builder' + required: true + default: 'false' + docker-arm-host: + description: 'Docker remote arm builder' + required: false + docker-arm-host-key: + description: 'Docker remote arm builder ssh private key' + required: false + docker-image: + description: 'Docker image' + required: true + default: blockscout/blockscout +outputs: + docker-builder: + description: 'Docker builder' + value: ${{ steps.builder_local.outputs.name || steps.builder_multi.outputs.name }} + docker-tags: + description: 'Docker metadata tags' + value: ${{ steps.meta.outputs.tags }} + docker-labels: + description: 'Docker metadata labels' + value: ${{ steps.meta.outputs.labels }} + docker-platforms: + description: 'Docker build platforms' + value: ${{ steps.builder_local.outputs.platforms || steps.builder_multi.outputs.platforms }} runs: using: "composite" steps: - - name: Check out the repo - uses: actions/checkout@v4 + - name: Set up SSH key + shell: bash + run: | + mkdir -p ~/.ssh + echo "${{ inputs.docker-arm-host-key }}" > ~/.ssh/id_rsa + chmod 600 ~/.ssh/id_rsa + - name: Find builder + if: ${{ inputs.docker-remote-multi-platform }} + shell: bash + run: echo "BUILDER_IP=$(./.github/scripts/select-builder.sh ${{ inputs.docker-arm-host }} ubuntu ~/.ssh/id_rsa)" >> $GITHUB_ENV + - name: Set up SSH + if: ${{ inputs.docker-remote-multi-platform }} + uses: MrSquaare/ssh-setup-action@523473d91581ccbf89565e12b40faba93f2708bd # v1.1.0 + with: + host: ${{ env.BUILDER_IP }} + private-key: ${{ inputs.docker-arm-host-key }} - name: Set up Docker Buildx + if: ${{ !inputs.docker-remote-multi-platform }} + uses: docker/setup-buildx-action@v3 + id: builder_local + with: + platforms: linux/amd64 + + - name: Set up Multi-platform Docker Buildx + if: ${{ inputs.docker-remote-multi-platform }} uses: docker/setup-buildx-action@v3 + id: builder_multi + with: + platforms: linux/amd64 + append: | + - endpoint: ssh://ubuntu@${{ env.BUILDER_IP }} + platforms: linux/arm64/v8 - name: Log in to Docker Hub uses: docker/login-action@v3 @@ -26,4 +81,8 @@ runs: id: meta uses: docker/metadata-action@v5 with: - images: blockscout/blockscout \ No newline at end of file + images: ${{ inputs.docker-image }} + + - name: Add SHORT_SHA env property with commit short sha + shell: bash + run: echo "SHORT_SHA=`echo ${GITHUB_SHA} | cut -c1-8`" >> $GITHUB_ENV \ No newline at end of file diff --git a/.github/scripts/select-builder.sh b/.github/scripts/select-builder.sh new file mode 100755 index 000000000000..a86f5b8852e3 --- /dev/null +++ b/.github/scripts/select-builder.sh @@ -0,0 +1,46 @@ +#!/bin/bash + +# Check if a domain is provided as an argument +if [ -z "$1" ]; then + echo "Usage: $0 " + exit 1 +fi + +DOMAIN=$1 +SSH_USER=$2 +SSH_KEY=$3 + +# Resolve A records +IP_LIST=$(dig +short A $DOMAIN) +if [ -z "$IP_LIST" ]; then + echo "No IPs found for domain $DOMAIN" + exit 1 +fi + +MIN_LA=1000000 +BEST_BUILDER="" + +for IP in $IP_LIST; do + # Check if the host is reachable via SSH + ssh -o StrictHostKeychecking=no -o ConnectTimeout=5 -o BatchMode=yes -i $SSH_KEY $SSH_USER@$IP "exit" 2>/dev/null + if [ $? -eq 0 ]; then + # Get the load average + LA=$(ssh -o StrictHostKeychecking=no -i $SSH_KEY $SSH_USER@$IP "uptime | awk -F'load average:' '{ print \$2 }' | cut -d, -f1" 2>/dev/null) + if [ $? -eq 0 ]; then + # Compare and find the minimum load average + LA=$(echo $LA | xargs) # Trim whitespace + if (( $(echo "$LA < $MIN_LA" | bc -l) )); then + MIN_LA=$LA + BEST_BUILDER=$IP + fi + fi + else + echo "Host $IP is unreachable, skipping." + fi +done + +if [ -n "$BEST_BUILDER" ]; then + echo "$BEST_BUILDER" +else + echo "No reachable hosts found." +fi diff --git a/.github/workflows/pre-release-eth.yml b/.github/workflows/pre-release-eth.yml index be8212be70e6..07c436f5727c 100644 --- a/.github/workflows/pre-release-eth.yml +++ b/.github/workflows/pre-release-eth.yml @@ -21,9 +21,13 @@ jobs: - uses: actions/checkout@v4 - name: Setup repo uses: ./.github/actions/setup-repo + id: setup with: docker-username: ${{ secrets.DOCKER_USERNAME }} docker-password: ${{ secrets.DOCKER_PASSWORD }} + docker-remote-multi-platform: true + docker-arm-host: ${{ secrets.ARM_RUNNER_HOSTNAME }} + docker-arm-host-key: ${{ secrets.ARM_RUNNER_KEY }} - name: Build and push Docker image for Ethereum (indexer + API) uses: docker/build-push-action@v5 @@ -32,8 +36,10 @@ jobs: file: ./docker/Dockerfile push: true tags: blockscout/blockscout-ethereum:${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }} + labels: ${{ steps.setup.outputs.docker-labels }} platforms: | linux/amd64 + linux/arm64/v8 build-args: | DISABLE_WEBAPP=false API_V1_READ_METHODS_DISABLED=false @@ -53,8 +59,10 @@ jobs: file: ./docker/Dockerfile push: true tags: blockscout/blockscout-ethereum:${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }}-indexer + labels: ${{ steps.setup.outputs.docker-labels }} platforms: | linux/amd64 + linux/arm64/v8 build-args: | DISABLE_API=true DISABLE_WEBAPP=true @@ -73,8 +81,10 @@ jobs: file: ./docker/Dockerfile push: true tags: blockscout/blockscout-ethereum:${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }}-api + labels: ${{ steps.setup.outputs.docker-labels }} platforms: | linux/amd64 + linux/arm64/v8 build-args: | DISABLE_INDEXER=true DISABLE_WEBAPP=true diff --git a/.github/workflows/pre-release-optimism.yml b/.github/workflows/pre-release-optimism.yml index 2005777fced3..44683dc335c9 100644 --- a/.github/workflows/pre-release-optimism.yml +++ b/.github/workflows/pre-release-optimism.yml @@ -21,9 +21,13 @@ jobs: - uses: actions/checkout@v4 - name: Setup repo uses: ./.github/actions/setup-repo + id: setup with: docker-username: ${{ secrets.DOCKER_USERNAME }} docker-password: ${{ secrets.DOCKER_PASSWORD }} + docker-remote-multi-platform: true + docker-arm-host: ${{ secrets.ARM_RUNNER_HOSTNAME }} + docker-arm-host-key: ${{ secrets.ARM_RUNNER_KEY }} - name: Build and push Docker image for Optimism (indexer + API) uses: docker/build-push-action@v5 @@ -32,8 +36,10 @@ jobs: file: ./docker/Dockerfile push: true tags: blockscout/blockscout-optimism:${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }} + labels: ${{ steps.setup.outputs.docker-labels }} platforms: | linux/amd64 + linux/arm64/v8 build-args: | DISABLE_WEBAPP=false API_V1_READ_METHODS_DISABLED=false @@ -53,8 +59,10 @@ jobs: file: ./docker/Dockerfile push: true tags: blockscout/blockscout-optimism:${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }}-indexer + labels: ${{ steps.setup.outputs.docker-labels }} platforms: | linux/amd64 + linux/arm64/v8 build-args: | DISABLE_API=true DISABLE_WEBAPP=true @@ -73,8 +81,10 @@ jobs: file: ./docker/Dockerfile push: true tags: blockscout/blockscout-optimism:${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }}-api + labels: ${{ steps.setup.outputs.docker-labels }} platforms: | linux/amd64 + linux/arm64/v8 build-args: | DISABLE_INDEXER=true DISABLE_WEBAPP=true diff --git a/.github/workflows/pre-release-shibarium.yml b/.github/workflows/pre-release-shibarium.yml index 36080b3090ce..8258205844b7 100644 --- a/.github/workflows/pre-release-shibarium.yml +++ b/.github/workflows/pre-release-shibarium.yml @@ -21,9 +21,13 @@ jobs: - uses: actions/checkout@v4 - name: Setup repo uses: ./.github/actions/setup-repo + id: setup with: docker-username: ${{ secrets.DOCKER_USERNAME }} docker-password: ${{ secrets.DOCKER_PASSWORD }} + docker-remote-multi-platform: true + docker-arm-host: ${{ secrets.ARM_RUNNER_HOSTNAME }} + docker-arm-host-key: ${{ secrets.ARM_RUNNER_KEY }} - name: Build and push Docker image for Shibarium (indexer + API) uses: docker/build-push-action@v5 @@ -32,8 +36,10 @@ jobs: file: ./docker/Dockerfile push: true tags: blockscout/blockscout-shibarium:${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }} + labels: ${{ steps.setup.outputs.docker-labels }} platforms: | linux/amd64 + linux/arm64/v8 build-args: | DISABLE_WEBAPP=false API_V1_READ_METHODS_DISABLED=false @@ -53,8 +59,10 @@ jobs: file: ./docker/Dockerfile push: true tags: blockscout/blockscout-shibarium:${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }}-indexer + labels: ${{ steps.setup.outputs.docker-labels }} platforms: | linux/amd64 + linux/arm64/v8 build-args: | DISABLE_API=true DISABLE_WEBAPP=true @@ -73,8 +81,10 @@ jobs: file: ./docker/Dockerfile push: true tags: blockscout/blockscout-shibarium:${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }}-api + labels: ${{ steps.setup.outputs.docker-labels }} platforms: | linux/amd64 + linux/arm64/v8 build-args: | DISABLE_INDEXER=true DISABLE_WEBAPP=true diff --git a/.github/workflows/pre-release-zksync.yml b/.github/workflows/pre-release-zksync.yml index 84cf12af3651..dc5b3b582100 100644 --- a/.github/workflows/pre-release-zksync.yml +++ b/.github/workflows/pre-release-zksync.yml @@ -21,9 +21,13 @@ jobs: - uses: actions/checkout@v4 - name: Setup repo uses: ./.github/actions/setup-repo + id: setup with: docker-username: ${{ secrets.DOCKER_USERNAME }} docker-password: ${{ secrets.DOCKER_PASSWORD }} + docker-remote-multi-platform: true + docker-arm-host: ${{ secrets.ARM_RUNNER_HOSTNAME }} + docker-arm-host-key: ${{ secrets.ARM_RUNNER_KEY }} - name: Build and push Docker image for ZkSync (indexer + API) uses: docker/build-push-action@v5 @@ -32,6 +36,7 @@ jobs: file: ./docker/Dockerfile push: true tags: blockscout/blockscout-zksync:${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }} + labels: ${{ steps.setup.outputs.docker-labels }} platforms: | linux/amd64 linux/arm64/v8 @@ -54,6 +59,7 @@ jobs: file: ./docker/Dockerfile push: true tags: blockscout/blockscout-zksync:${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }}-indexer + labels: ${{ steps.setup.outputs.docker-labels }} platforms: | linux/amd64 linux/arm64/v8 @@ -75,6 +81,7 @@ jobs: file: ./docker/Dockerfile push: true tags: blockscout/blockscout-zksync:${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }}-api + labels: ${{ steps.setup.outputs.docker-labels }} platforms: | linux/amd64 linux/arm64/v8 diff --git a/.github/workflows/pre-release.yml b/.github/workflows/pre-release.yml index 1d971c883edc..2a83968a043e 100644 --- a/.github/workflows/pre-release.yml +++ b/.github/workflows/pre-release.yml @@ -21,9 +21,13 @@ jobs: - uses: actions/checkout@v4 - name: Setup repo uses: ./.github/actions/setup-repo + id: setup with: docker-username: ${{ secrets.DOCKER_USERNAME }} docker-password: ${{ secrets.DOCKER_PASSWORD }} + docker-remote-multi-platform: true + docker-arm-host: ${{ secrets.ARM_RUNNER_HOSTNAME }} + docker-arm-host-key: ${{ secrets.ARM_RUNNER_KEY }} - name: Build & Push Core Docker image (indexer + API) uses: docker/build-push-action@v5 @@ -34,6 +38,7 @@ jobs: cache-from: type=registry,ref=blockscout/blockscout:buildcache cache-to: type=registry,ref=blockscout/blockscout:buildcache,mode=max tags: blockscout/blockscout:master, blockscout/blockscout:${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }} + labels: ${{ steps.setup.outputs.docker-labels }} platforms: | linux/amd64 linux/arm64/v8 @@ -62,6 +67,7 @@ jobs: cache-from: type=registry,ref=blockscout/blockscout:buildcache cache-to: type=registry,ref=blockscout/blockscout:buildcache,mode=max tags: blockscout/blockscout:${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }}-indexer + labels: ${{ steps.setup.outputs.docker-labels }} platforms: | linux/amd64 linux/arm64/v8 @@ -89,6 +95,7 @@ jobs: cache-from: type=registry,ref=blockscout/blockscout:buildcache cache-to: type=registry,ref=blockscout/blockscout:buildcache,mode=max tags: blockscout/blockscout:${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }}-api + labels: ${{ steps.setup.outputs.docker-labels }} platforms: | linux/amd64 linux/arm64/v8 diff --git a/.github/workflows/publish-docker-image-every-push.yml b/.github/workflows/publish-docker-image-every-push.yml index 17213d3d0b22..dea446dd47ee 100644 --- a/.github/workflows/publish-docker-image-every-push.yml +++ b/.github/workflows/publish-docker-image-every-push.yml @@ -17,23 +17,17 @@ jobs: push_to_registry: name: Push Docker image to Docker Hub runs-on: ubuntu-latest - outputs: - release-version: ${{ steps.output-step.outputs.release-version }} - short-sha: ${{ steps.output-step.outputs.short-sha }} steps: - uses: actions/checkout@v4 - name: Setup repo - uses: ./.github/actions/setup-repo-and-short-sha + uses: ./.github/actions/setup-repo + id: setup with: docker-username: ${{ secrets.DOCKER_USERNAME }} docker-password: ${{ secrets.DOCKER_PASSWORD }} - - - - name: Add outputs - run: | - echo "::set-output name=release-version::${{ env.NEXT_RELEASE_VERSION }}" - echo "::set-output name=short-sha::${{ env.SHORT_SHA }}" - id: output-step + docker-remote-multi-platform: true + docker-arm-host: ${{ secrets.ARM_RUNNER_HOSTNAME }} + docker-arm-host-key: ${{ secrets.ARM_RUNNER_KEY }} - name: Build and push Docker image (indexer + API) uses: docker/build-push-action@v5 @@ -44,6 +38,10 @@ jobs: cache-from: type=registry,ref=blockscout/blockscout:buildcache cache-to: type=registry,ref=blockscout/blockscout:buildcache,mode=max tags: blockscout/blockscout:master, blockscout/blockscout:${{ env.RELEASE_VERSION }}.commit.${{ env.SHORT_SHA }} + labels: ${{ steps.setup.outputs.docker-labels }} + platforms: | + linux/amd64 + linux/arm64/v8 build-args: | DISABLE_WEBAPP=false API_V1_READ_METHODS_DISABLED=false @@ -67,6 +65,10 @@ jobs: file: ./docker/Dockerfile push: true tags: blockscout/blockscout:${{ env.RELEASE_VERSION }}.commit.${{ env.SHORT_SHA }}-indexer + labels: ${{ steps.setup.outputs.docker-labels }} + platforms: | + linux/amd64 + linux/arm64/v8 build-args: | DISABLE_API=true DISABLE_WEBAPP=true @@ -89,6 +91,10 @@ jobs: file: ./docker/Dockerfile push: true tags: blockscout/blockscout:${{ env.RELEASE_VERSION }}.commit.${{ env.SHORT_SHA }}-api + labels: ${{ steps.setup.outputs.docker-labels }} + platforms: | + linux/amd64 + linux/arm64/v8 build-args: | DISABLE_INDEXER=true DISABLE_WEBAPP=true @@ -112,6 +118,10 @@ jobs: push: true cache-from: type=registry,ref=blockscout/blockscout:buildcache tags: blockscout/blockscout:frontend-main + labels: ${{ steps.setup.outputs.docker-labels }} + platforms: | + linux/amd64 + linux/arm64/v8 build-args: | CACHE_EXCHANGE_RATES_PERIOD= API_V1_READ_METHODS_DISABLED=false diff --git a/.github/workflows/publish-docker-image-for-arbitrum.yml b/.github/workflows/publish-docker-image-for-arbitrum.yml index c5cc0d164e18..c870634a1751 100644 --- a/.github/workflows/publish-docker-image-for-arbitrum.yml +++ b/.github/workflows/publish-docker-image-for-arbitrum.yml @@ -15,10 +15,14 @@ jobs: steps: - uses: actions/checkout@v4 - name: Setup repo - uses: ./.github/actions/setup-repo-and-short-sha + uses: ./.github/actions/setup-repo + id: setup with: docker-username: ${{ secrets.DOCKER_USERNAME }} docker-password: ${{ secrets.DOCKER_PASSWORD }} + docker-remote-multi-platform: true + docker-arm-host: ${{ secrets.ARM_RUNNER_HOSTNAME }} + docker-arm-host-key: ${{ secrets.ARM_RUNNER_KEY }} - name: Build and push Docker image uses: docker/build-push-action@v5 @@ -27,6 +31,7 @@ jobs: file: ./docker/Dockerfile push: true tags: blockscout/blockscout-${{ env.DOCKER_CHAIN_NAME }}:${{ env.RELEASE_VERSION }}-postrelease-${{ env.SHORT_SHA }} + labels: ${{ steps.setup.outputs.docker-labels }} platforms: | linux/amd64 linux/arm64/v8 diff --git a/.github/workflows/publish-docker-image-for-core.yml b/.github/workflows/publish-docker-image-for-core.yml index 9f726bfbbd56..07db87141aa3 100644 --- a/.github/workflows/publish-docker-image-for-core.yml +++ b/.github/workflows/publish-docker-image-for-core.yml @@ -15,10 +15,14 @@ jobs: steps: - uses: actions/checkout@v4 - name: Setup repo - uses: ./.github/actions/setup-repo-and-short-sha + uses: ./.github/actions/setup-repo + id: setup with: docker-username: ${{ secrets.DOCKER_USERNAME }} docker-password: ${{ secrets.DOCKER_PASSWORD }} + docker-remote-multi-platform: true + docker-arm-host: ${{ secrets.ARM_RUNNER_HOSTNAME }} + docker-arm-host-key: ${{ secrets.ARM_RUNNER_KEY }} - name: Build and push Docker image uses: docker/build-push-action@v5 @@ -27,6 +31,10 @@ jobs: file: ./docker/Dockerfile push: true tags: blockscout/blockscout-${{ env.DOCKER_CHAIN_NAME }}:latest, blockscout/blockscout-${{ env.DOCKER_CHAIN_NAME }}:${{ env.RELEASE_VERSION }}-postrelease-${{ env.SHORT_SHA }} + labels: ${{ steps.setup.outputs.docker-labels }} + platforms: | + linux/amd64 + linux/arm64/v8 build-args: | CACHE_EXCHANGE_RATES_PERIOD= API_V1_READ_METHODS_DISABLED=false diff --git a/.github/workflows/publish-docker-image-for-eth-sepolia.yml b/.github/workflows/publish-docker-image-for-eth-sepolia.yml index a929c1d70127..b389c94ada74 100644 --- a/.github/workflows/publish-docker-image-for-eth-sepolia.yml +++ b/.github/workflows/publish-docker-image-for-eth-sepolia.yml @@ -15,10 +15,14 @@ jobs: steps: - uses: actions/checkout@v4 - name: Setup repo - uses: ./.github/actions/setup-repo-and-short-sha + uses: ./.github/actions/setup-repo + id: setup with: docker-username: ${{ secrets.DOCKER_USERNAME }} docker-password: ${{ secrets.DOCKER_PASSWORD }} + docker-remote-multi-platform: true + docker-arm-host: ${{ secrets.ARM_RUNNER_HOSTNAME }} + docker-arm-host-key: ${{ secrets.ARM_RUNNER_KEY }} - name: Build and push Docker image (indexer + API) uses: docker/build-push-action@v5 @@ -27,6 +31,10 @@ jobs: file: ./docker/Dockerfile push: true tags: blockscout/blockscout-${{ env.DOCKER_CHAIN_NAME }}:latest, blockscout/blockscout-${{ env.DOCKER_CHAIN_NAME }}:${{ env.RELEASE_VERSION }}-postrelease-${{ env.SHORT_SHA }} + labels: ${{ steps.setup.outputs.docker-labels }} + platforms: | + linux/amd64 + linux/arm64/v8 build-args: | CHAIN_TYPE=ethereum CACHE_EXCHANGE_RATES_PERIOD= @@ -46,6 +54,10 @@ jobs: file: ./docker/Dockerfile push: true tags: blockscout/blockscout-${{ env.DOCKER_CHAIN_NAME }}:${{ env.RELEASE_VERSION }}-postrelease-${{ env.SHORT_SHA }}-indexer + labels: ${{ steps.setup.outputs.docker-labels }} + platforms: | + linux/amd64 + linux/arm64/v8 build-args: | CHAIN_TYPE=ethereum CACHE_EXCHANGE_RATES_PERIOD= @@ -64,6 +76,10 @@ jobs: file: ./docker/Dockerfile push: true tags: blockscout/blockscout-${{ env.DOCKER_CHAIN_NAME }}:${{ env.RELEASE_VERSION }}-postrelease-${{ env.SHORT_SHA }}-api + labels: ${{ steps.setup.outputs.docker-labels }} + platforms: | + linux/amd64 + linux/arm64/v8 build-args: | CHAIN_TYPE=ethereum CACHE_EXCHANGE_RATES_PERIOD= diff --git a/.github/workflows/publish-docker-image-for-eth.yml b/.github/workflows/publish-docker-image-for-eth.yml index 3e3bed50190d..b6819c515b5f 100644 --- a/.github/workflows/publish-docker-image-for-eth.yml +++ b/.github/workflows/publish-docker-image-for-eth.yml @@ -15,10 +15,14 @@ jobs: steps: - uses: actions/checkout@v4 - name: Setup repo - uses: ./.github/actions/setup-repo-and-short-sha + uses: ./.github/actions/setup-repo + id: setup with: docker-username: ${{ secrets.DOCKER_USERNAME }} docker-password: ${{ secrets.DOCKER_PASSWORD }} + docker-remote-multi-platform: true + docker-arm-host: ${{ secrets.ARM_RUNNER_HOSTNAME }} + docker-arm-host-key: ${{ secrets.ARM_RUNNER_KEY }} - name: Build and push Docker image uses: docker/build-push-action@v5 @@ -27,6 +31,10 @@ jobs: file: ./docker/Dockerfile push: true tags: blockscout/blockscout-${{ env.DOCKER_CHAIN_NAME }}:${{ env.RELEASE_VERSION }}-postrelease-${{ env.SHORT_SHA }}-experimental + labels: ${{ steps.setup.outputs.docker-labels }} + platforms: | + linux/amd64 + linux/arm64/v8 build-args: | CHAIN_TYPE=ethereum CACHE_EXCHANGE_RATES_PERIOD= diff --git a/.github/workflows/publish-docker-image-for-filecoin.yml b/.github/workflows/publish-docker-image-for-filecoin.yml index ca720971fbbc..efc5fcfbd324 100644 --- a/.github/workflows/publish-docker-image-for-filecoin.yml +++ b/.github/workflows/publish-docker-image-for-filecoin.yml @@ -14,10 +14,14 @@ jobs: steps: - uses: actions/checkout@v4 - name: Setup repo - uses: ./.github/actions/setup-repo-and-short-sha + uses: ./.github/actions/setup-repo + id: setup with: docker-username: ${{ secrets.DOCKER_USERNAME }} docker-password: ${{ secrets.DOCKER_PASSWORD }} + docker-remote-multi-platform: true + docker-arm-host: ${{ secrets.ARM_RUNNER_HOSTNAME }} + docker-arm-host-key: ${{ secrets.ARM_RUNNER_KEY }} - name: Build and push Docker image uses: docker/build-push-action@v5 @@ -26,6 +30,10 @@ jobs: file: ./docker/Dockerfile push: true tags: blockscout/blockscout-${{ env.DOCKER_CHAIN_NAME }}:${{ env.RELEASE_VERSION }}-postrelease-${{ env.SHORT_SHA }} + labels: ${{ steps.setup.outputs.docker-labels }} + platforms: | + linux/amd64 + linux/arm64/v8 build-args: | CACHE_EXCHANGE_RATES_PERIOD= API_V1_READ_METHODS_DISABLED=false diff --git a/.github/workflows/publish-docker-image-for-fuse.yml b/.github/workflows/publish-docker-image-for-fuse.yml index 04fc5a81e5d3..9f19e1ef7b58 100644 --- a/.github/workflows/publish-docker-image-for-fuse.yml +++ b/.github/workflows/publish-docker-image-for-fuse.yml @@ -15,10 +15,14 @@ jobs: steps: - uses: actions/checkout@v4 - name: Setup repo - uses: ./.github/actions/setup-repo-and-short-sha + uses: ./.github/actions/setup-repo + id: setup with: docker-username: ${{ secrets.DOCKER_USERNAME }} docker-password: ${{ secrets.DOCKER_PASSWORD }} + docker-remote-multi-platform: true + docker-arm-host: ${{ secrets.ARM_RUNNER_HOSTNAME }} + docker-arm-host-key: ${{ secrets.ARM_RUNNER_KEY }} - name: Build and push Docker image uses: docker/build-push-action@v5 @@ -27,6 +31,10 @@ jobs: file: ./docker/Dockerfile push: true tags: blockscout/blockscout-${{ env.DOCKER_CHAIN_NAME }}:${{ env.RELEASE_VERSION }}-postrelease-${{ env.SHORT_SHA }} + labels: ${{ steps.setup.outputs.docker-labels }} + platforms: | + linux/amd64 + linux/arm64/v8 build-args: | BRIDGED_TOKENS_ENABLED=true CACHE_EXCHANGE_RATES_PERIOD= diff --git a/.github/workflows/publish-docker-image-for-gnosis-chain.yml b/.github/workflows/publish-docker-image-for-gnosis-chain.yml index 125f6dc5376a..88fff9f709b6 100644 --- a/.github/workflows/publish-docker-image-for-gnosis-chain.yml +++ b/.github/workflows/publish-docker-image-for-gnosis-chain.yml @@ -15,10 +15,14 @@ jobs: steps: - uses: actions/checkout@v4 - name: Setup repo - uses: ./.github/actions/setup-repo-and-short-sha + uses: ./.github/actions/setup-repo + id: setup with: docker-username: ${{ secrets.DOCKER_USERNAME }} docker-password: ${{ secrets.DOCKER_PASSWORD }} + docker-remote-multi-platform: true + docker-arm-host: ${{ secrets.ARM_RUNNER_HOSTNAME }} + docker-arm-host-key: ${{ secrets.ARM_RUNNER_KEY }} - name: Build and push Docker image uses: docker/build-push-action@v5 @@ -27,6 +31,10 @@ jobs: file: ./docker/Dockerfile push: true tags: blockscout/blockscout-${{ env.DOCKER_CHAIN_NAME }}:${{ env.RELEASE_VERSION }}-postrelease-${{ env.SHORT_SHA }} + labels: ${{ steps.setup.outputs.docker-labels }} + platforms: | + linux/amd64 + linux/arm64/v8 build-args: | BRIDGED_TOKENS_ENABLED=true CACHE_EXCHANGE_RATES_PERIOD= diff --git a/.github/workflows/publish-docker-image-for-l2-staging.yml b/.github/workflows/publish-docker-image-for-l2-staging.yml index 4ced3d5a35d1..c3bdc7522ad6 100644 --- a/.github/workflows/publish-docker-image-for-l2-staging.yml +++ b/.github/workflows/publish-docker-image-for-l2-staging.yml @@ -15,10 +15,14 @@ jobs: steps: - uses: actions/checkout@v4 - name: Setup repo - uses: ./.github/actions/setup-repo-and-short-sha + uses: ./.github/actions/setup-repo + id: setup with: docker-username: ${{ secrets.DOCKER_USERNAME }} docker-password: ${{ secrets.DOCKER_PASSWORD }} + docker-remote-multi-platform: true + docker-arm-host: ${{ secrets.ARM_RUNNER_HOSTNAME }} + docker-arm-host-key: ${{ secrets.ARM_RUNNER_KEY }} - name: Build and push Docker image uses: docker/build-push-action@v5 @@ -27,6 +31,10 @@ jobs: file: ./docker/Dockerfile push: true tags: blockscout/blockscout-${{ env.DOCKER_CHAIN_NAME }}:latest, blockscout/blockscout-${{ env.DOCKER_CHAIN_NAME }}:${{ env.RELEASE_VERSION }}-postrelease-${{ env.SHORT_SHA }} + labels: ${{ steps.setup.outputs.docker-labels }} + platforms: | + linux/amd64 + linux/arm64/v8 build-args: | CACHE_EXCHANGE_RATES_PERIOD= API_V1_READ_METHODS_DISABLED=false diff --git a/.github/workflows/publish-docker-image-for-lukso.yml b/.github/workflows/publish-docker-image-for-lukso.yml index 35e01599c831..01f5238a733b 100644 --- a/.github/workflows/publish-docker-image-for-lukso.yml +++ b/.github/workflows/publish-docker-image-for-lukso.yml @@ -15,10 +15,14 @@ jobs: steps: - uses: actions/checkout@v4 - name: Setup repo - uses: ./.github/actions/setup-repo-and-short-sha + uses: ./.github/actions/setup-repo + id: setup with: docker-username: ${{ secrets.DOCKER_USERNAME }} docker-password: ${{ secrets.DOCKER_PASSWORD }} + docker-remote-multi-platform: true + docker-arm-host: ${{ secrets.ARM_RUNNER_HOSTNAME }} + docker-arm-host-key: ${{ secrets.ARM_RUNNER_KEY }} - name: Build and push Docker image uses: docker/build-push-action@v5 @@ -27,6 +31,10 @@ jobs: file: ./docker/Dockerfile push: true tags: blockscout/blockscout-${{ env.DOCKER_CHAIN_NAME }}:latest, blockscout/blockscout-${{ env.DOCKER_CHAIN_NAME }}:${{ env.RELEASE_VERSION }}-postrelease-${{ env.SHORT_SHA }} + labels: ${{ steps.setup.outputs.docker-labels }} + platforms: | + linux/amd64 + linux/arm64/v8 build-args: | CACHE_EXCHANGE_RATES_PERIOD= API_V1_READ_METHODS_DISABLED=false diff --git a/.github/workflows/publish-docker-image-for-optimism.yml b/.github/workflows/publish-docker-image-for-optimism.yml index 2ab34fd4c7bf..1f3e81452252 100644 --- a/.github/workflows/publish-docker-image-for-optimism.yml +++ b/.github/workflows/publish-docker-image-for-optimism.yml @@ -15,10 +15,14 @@ jobs: steps: - uses: actions/checkout@v4 - name: Setup repo - uses: ./.github/actions/setup-repo-and-short-sha + uses: ./.github/actions/setup-repo + id: setup with: docker-username: ${{ secrets.DOCKER_USERNAME }} docker-password: ${{ secrets.DOCKER_PASSWORD }} + docker-remote-multi-platform: true + docker-arm-host: ${{ secrets.ARM_RUNNER_HOSTNAME }} + docker-arm-host-key: ${{ secrets.ARM_RUNNER_KEY }} - name: Build and push Docker image uses: docker/build-push-action@v5 @@ -27,6 +31,7 @@ jobs: file: ./docker/Dockerfile push: true tags: blockscout/blockscout-${{ env.DOCKER_CHAIN_NAME }}:${{ env.RELEASE_VERSION }}-postrelease-${{ env.SHORT_SHA }} + labels: ${{ steps.setup.outputs.docker-labels }} platforms: | linux/amd64 linux/arm64/v8 diff --git a/.github/workflows/publish-docker-image-for-polygon-edge.yml b/.github/workflows/publish-docker-image-for-polygon-edge.yml index e5bcbf6b2a34..4bd600b778fa 100644 --- a/.github/workflows/publish-docker-image-for-polygon-edge.yml +++ b/.github/workflows/publish-docker-image-for-polygon-edge.yml @@ -15,10 +15,14 @@ jobs: steps: - uses: actions/checkout@v4 - name: Setup repo - uses: ./.github/actions/setup-repo-and-short-sha + uses: ./.github/actions/setup-repo + id: setup with: docker-username: ${{ secrets.DOCKER_USERNAME }} docker-password: ${{ secrets.DOCKER_PASSWORD }} + docker-remote-multi-platform: true + docker-arm-host: ${{ secrets.ARM_RUNNER_HOSTNAME }} + docker-arm-host-key: ${{ secrets.ARM_RUNNER_KEY }} - name: Build and push Docker image uses: docker/build-push-action@v5 @@ -27,6 +31,10 @@ jobs: file: ./docker/Dockerfile push: true tags: blockscout/blockscout-${{ env.DOCKER_CHAIN_NAME }}:${{ env.RELEASE_VERSION }}-postrelease-${{ env.SHORT_SHA }} + labels: ${{ steps.setup.outputs.docker-labels }} + platforms: | + linux/amd64 + linux/arm64/v8 build-args: | CACHE_EXCHANGE_RATES_PERIOD= API_V1_READ_METHODS_DISABLED=false diff --git a/.github/workflows/publish-docker-image-for-redstone.yml b/.github/workflows/publish-docker-image-for-redstone.yml index 029c42bc698f..c64c06b0e3eb 100644 --- a/.github/workflows/publish-docker-image-for-redstone.yml +++ b/.github/workflows/publish-docker-image-for-redstone.yml @@ -15,10 +15,14 @@ jobs: steps: - uses: actions/checkout@v4 - name: Setup repo - uses: ./.github/actions/setup-repo-and-short-sha + uses: ./.github/actions/setup-repo + id: setup with: docker-username: ${{ secrets.DOCKER_USERNAME }} docker-password: ${{ secrets.DOCKER_PASSWORD }} + docker-remote-multi-platform: true + docker-arm-host: ${{ secrets.ARM_RUNNER_HOSTNAME }} + docker-arm-host-key: ${{ secrets.ARM_RUNNER_KEY }} - name: Build and push Docker image uses: docker/build-push-action@v5 @@ -27,6 +31,7 @@ jobs: file: ./docker/Dockerfile push: true tags: blockscout/blockscout-${{ env.DOCKER_CHAIN_NAME }}:latest, blockscout/blockscout-${{ env.DOCKER_CHAIN_NAME }}:${{ env.RELEASE_VERSION }}-postrelease-${{ env.SHORT_SHA }} + labels: ${{ steps.setup.outputs.docker-labels }} platforms: | linux/amd64 linux/arm64/v8 diff --git a/.github/workflows/publish-docker-image-for-rootstock.yml b/.github/workflows/publish-docker-image-for-rootstock.yml index 4a4c90e6f178..910c9ba2d1b7 100644 --- a/.github/workflows/publish-docker-image-for-rootstock.yml +++ b/.github/workflows/publish-docker-image-for-rootstock.yml @@ -15,10 +15,14 @@ jobs: steps: - uses: actions/checkout@v4 - name: Setup repo - uses: ./.github/actions/setup-repo-and-short-sha + uses: ./.github/actions/setup-repo + id: setup with: docker-username: ${{ secrets.DOCKER_USERNAME }} docker-password: ${{ secrets.DOCKER_PASSWORD }} + docker-remote-multi-platform: true + docker-arm-host: ${{ secrets.ARM_RUNNER_HOSTNAME }} + docker-arm-host-key: ${{ secrets.ARM_RUNNER_KEY }} - name: Build and push Docker image uses: docker/build-push-action@v5 @@ -27,6 +31,10 @@ jobs: file: ./docker/Dockerfile push: true tags: blockscout/blockscout-${{ env.DOCKER_CHAIN_NAME }}:${{ env.RELEASE_VERSION }}-postrelease-${{ env.SHORT_SHA }} + labels: ${{ steps.setup.outputs.docker-labels }} + platforms: | + linux/amd64 + linux/arm64/v8 build-args: | CACHE_EXCHANGE_RATES_PERIOD= API_V1_READ_METHODS_DISABLED=false diff --git a/.github/workflows/publish-docker-image-for-shibarium.yml b/.github/workflows/publish-docker-image-for-shibarium.yml index 8496b598eec3..3b5aaa4b9f13 100644 --- a/.github/workflows/publish-docker-image-for-shibarium.yml +++ b/.github/workflows/publish-docker-image-for-shibarium.yml @@ -18,10 +18,14 @@ jobs: steps: - uses: actions/checkout@v4 - name: Setup repo - uses: ./.github/actions/setup-repo-and-short-sha + uses: ./.github/actions/setup-repo + id: setup with: docker-username: ${{ secrets.DOCKER_USERNAME }} docker-password: ${{ secrets.DOCKER_PASSWORD }} + docker-remote-multi-platform: true + docker-arm-host: ${{ secrets.ARM_RUNNER_HOSTNAME }} + docker-arm-host-key: ${{ secrets.ARM_RUNNER_KEY }} - name: Build and push Docker image uses: docker/build-push-action@v5 @@ -30,6 +34,10 @@ jobs: file: ./docker/Dockerfile push: true tags: blockscout/blockscout-${{ env.DOCKER_CHAIN_NAME }}:${{ env.RELEASE_VERSION }}-postrelease-${{ env.SHORT_SHA }} + labels: ${{ steps.setup.outputs.docker-labels }} + platforms: | + linux/amd64 + linux/arm64/v8 build-args: | CACHE_EXCHANGE_RATES_PERIOD= API_V1_READ_METHODS_DISABLED=false diff --git a/.github/workflows/publish-docker-image-for-stability.yml b/.github/workflows/publish-docker-image-for-stability.yml index b5f486595e0e..d4bfd64a27b2 100644 --- a/.github/workflows/publish-docker-image-for-stability.yml +++ b/.github/workflows/publish-docker-image-for-stability.yml @@ -18,10 +18,14 @@ jobs: steps: - uses: actions/checkout@v4 - name: Setup repo - uses: ./.github/actions/setup-repo-and-short-sha + uses: ./.github/actions/setup-repo + id: setup with: docker-username: ${{ secrets.DOCKER_USERNAME }} docker-password: ${{ secrets.DOCKER_PASSWORD }} + docker-remote-multi-platform: true + docker-arm-host: ${{ secrets.ARM_RUNNER_HOSTNAME }} + docker-arm-host-key: ${{ secrets.ARM_RUNNER_KEY }} - name: Build and push Docker image uses: docker/build-push-action@v5 @@ -30,6 +34,10 @@ jobs: file: ./docker/Dockerfile push: true tags: blockscout/blockscout-${{ env.DOCKER_CHAIN_NAME }}:${{ env.RELEASE_VERSION }}-postrelease-${{ env.SHORT_SHA }} + labels: ${{ steps.setup.outputs.docker-labels }} + platforms: | + linux/amd64 + linux/arm64/v8 build-args: | CACHE_EXCHANGE_RATES_PERIOD= API_V1_READ_METHODS_DISABLED=false diff --git a/.github/workflows/publish-docker-image-for-suave.yml b/.github/workflows/publish-docker-image-for-suave.yml index d7d28a9e0fa2..fccbaee55ccb 100644 --- a/.github/workflows/publish-docker-image-for-suave.yml +++ b/.github/workflows/publish-docker-image-for-suave.yml @@ -18,10 +18,14 @@ jobs: steps: - uses: actions/checkout@v4 - name: Setup repo - uses: ./.github/actions/setup-repo-and-short-sha + uses: ./.github/actions/setup-repo + id: setup with: docker-username: ${{ secrets.DOCKER_USERNAME }} docker-password: ${{ secrets.DOCKER_PASSWORD }} + docker-remote-multi-platform: true + docker-arm-host: ${{ secrets.ARM_RUNNER_HOSTNAME }} + docker-arm-host-key: ${{ secrets.ARM_RUNNER_KEY }} - name: Build and push Docker image uses: docker/build-push-action@v5 @@ -30,6 +34,7 @@ jobs: file: ./docker/Dockerfile push: true tags: blockscout/blockscout-${{ env.DOCKER_CHAIN_NAME }}:${{ env.RELEASE_VERSION }}-postrelease-${{ env.SHORT_SHA }} + labels: ${{ steps.setup.outputs.docker-labels }} platforms: | linux/amd64 linux/arm64/v8 diff --git a/.github/workflows/publish-docker-image-for-zetachain.yml b/.github/workflows/publish-docker-image-for-zetachain.yml index 0abd04fe2cca..d04c69973293 100644 --- a/.github/workflows/publish-docker-image-for-zetachain.yml +++ b/.github/workflows/publish-docker-image-for-zetachain.yml @@ -15,10 +15,14 @@ jobs: steps: - uses: actions/checkout@v4 - name: Setup repo - uses: ./.github/actions/setup-repo-and-short-sha + uses: ./.github/actions/setup-repo + id: setup with: docker-username: ${{ secrets.DOCKER_USERNAME }} docker-password: ${{ secrets.DOCKER_PASSWORD }} + docker-remote-multi-platform: true + docker-arm-host: ${{ secrets.ARM_RUNNER_HOSTNAME }} + docker-arm-host-key: ${{ secrets.ARM_RUNNER_KEY }} - name: Build and push Docker image uses: docker/build-push-action@v5 @@ -27,6 +31,10 @@ jobs: file: ./docker/Dockerfile push: true tags: blockscout/blockscout-${{ env.DOCKER_CHAIN_NAME }}:${{ env.RELEASE_VERSION }}-postrelease-${{ env.SHORT_SHA }} + labels: ${{ steps.setup.outputs.docker-labels }} + platforms: | + linux/amd64 + linux/arm64/v8 build-args: | CACHE_EXCHANGE_RATES_PERIOD= API_V1_READ_METHODS_DISABLED=false diff --git a/.github/workflows/publish-docker-image-for-zkevm.yml b/.github/workflows/publish-docker-image-for-zkevm.yml index 74ab92177a9f..30270a10c6a9 100644 --- a/.github/workflows/publish-docker-image-for-zkevm.yml +++ b/.github/workflows/publish-docker-image-for-zkevm.yml @@ -15,10 +15,14 @@ jobs: steps: - uses: actions/checkout@v4 - name: Setup repo - uses: ./.github/actions/setup-repo-and-short-sha + uses: ./.github/actions/setup-repo + id: setup with: docker-username: ${{ secrets.DOCKER_USERNAME }} docker-password: ${{ secrets.DOCKER_PASSWORD }} + docker-remote-multi-platform: true + docker-arm-host: ${{ secrets.ARM_RUNNER_HOSTNAME }} + docker-arm-host-key: ${{ secrets.ARM_RUNNER_KEY }} - name: Build and push Docker image uses: docker/build-push-action@v5 @@ -27,6 +31,10 @@ jobs: file: ./docker/Dockerfile push: true tags: blockscout/blockscout-${{ env.DOCKER_CHAIN_NAME }}:${{ env.RELEASE_VERSION }}-postrelease-${{ env.SHORT_SHA }} + labels: ${{ steps.setup.outputs.docker-labels }} + platforms: | + linux/amd64 + linux/arm64/v8 build-args: | CACHE_EXCHANGE_RATES_PERIOD= API_V1_READ_METHODS_DISABLED=false diff --git a/.github/workflows/publish-docker-image-for-zksync.yml b/.github/workflows/publish-docker-image-for-zksync.yml index 932ab4b061f5..bcf784f35cb0 100644 --- a/.github/workflows/publish-docker-image-for-zksync.yml +++ b/.github/workflows/publish-docker-image-for-zksync.yml @@ -14,10 +14,14 @@ jobs: steps: - uses: actions/checkout@v4 - name: Setup repo - uses: ./.github/actions/setup-repo-and-short-sha + uses: ./.github/actions/setup-repo + id: setup with: docker-username: ${{ secrets.DOCKER_USERNAME }} docker-password: ${{ secrets.DOCKER_PASSWORD }} + docker-remote-multi-platform: true + docker-arm-host: ${{ secrets.ARM_RUNNER_HOSTNAME }} + docker-arm-host-key: ${{ secrets.ARM_RUNNER_KEY }} - name: Build and push Docker image uses: docker/build-push-action@v5 @@ -26,6 +30,10 @@ jobs: file: ./docker/Dockerfile push: true tags: blockscout/blockscout-${{ env.DOCKER_CHAIN_NAME }}:${{ env.RELEASE_VERSION }}-postrelease-${{ env.SHORT_SHA }} + labels: ${{ steps.setup.outputs.docker-labels }} + platforms: | + linux/amd64 + linux/arm64/v8 build-args: | CACHE_EXCHANGE_RATES_PERIOD= API_V1_READ_METHODS_DISABLED=false diff --git a/.github/workflows/publish-docker-image-staging-on-demand.yml b/.github/workflows/publish-docker-image-staging-on-demand.yml index bf3f48c890bc..f06df3348ba7 100644 --- a/.github/workflows/publish-docker-image-staging-on-demand.yml +++ b/.github/workflows/publish-docker-image-staging-on-demand.yml @@ -18,22 +18,17 @@ jobs: push_to_registry: name: Push Docker image to Docker Hub runs-on: ubuntu-latest - outputs: - release-version: ${{ steps.output-step.outputs.release-version }} - short-sha: ${{ steps.output-step.outputs.short-sha }} steps: - uses: actions/checkout@v4 - name: Setup repo - uses: ./.github/actions/setup-repo-and-short-sha + uses: ./.github/actions/setup-repo + id: setup with: docker-username: ${{ secrets.DOCKER_USERNAME }} docker-password: ${{ secrets.DOCKER_PASSWORD }} - - - name: Add outputs - run: | - echo "::set-output name=release-version::${{ env.NEXT_RELEASE_VERSION }}" - echo "::set-output name=short-sha::${{ env.SHORT_SHA }}" - id: output-step + docker-remote-multi-platform: true + docker-arm-host: ${{ secrets.ARM_RUNNER_HOSTNAME }} + docker-arm-host-key: ${{ secrets.ARM_RUNNER_KEY }} - name: Build and push Docker image uses: docker/build-push-action@v5 @@ -44,6 +39,10 @@ jobs: cache-from: type=registry,ref=blockscout/blockscout:buildcache cache-to: type=registry,ref=blockscout/blockscout:buildcache,mode=max tags: blockscout/blockscout-staging:latest, blockscout/blockscout-staging:${{ env.RELEASE_VERSION }}.commit.${{ env.SHORT_SHA }} + labels: ${{ steps.setup.outputs.docker-labels }} + platforms: | + linux/amd64 + linux/arm64/v8 build-args: | CACHE_EXCHANGE_RATES_PERIOD= API_V1_READ_METHODS_DISABLED=false diff --git a/.github/workflows/release-arbitrum.yml b/.github/workflows/release-arbitrum.yml index 2f1147d2a6a5..68334f9d55b0 100644 --- a/.github/workflows/release-arbitrum.yml +++ b/.github/workflows/release-arbitrum.yml @@ -18,9 +18,13 @@ jobs: - uses: actions/checkout@v4 - name: Setup repo uses: ./.github/actions/setup-repo + id: setup with: docker-username: ${{ secrets.DOCKER_USERNAME }} docker-password: ${{ secrets.DOCKER_PASSWORD }} + docker-remote-multi-platform: true + docker-arm-host: ${{ secrets.ARM_RUNNER_HOSTNAME }} + docker-arm-host-key: ${{ secrets.ARM_RUNNER_KEY }} - name: Build and push Docker image for Arbitrum (indexer + API) uses: docker/build-push-action@v5 @@ -29,6 +33,7 @@ jobs: file: ./docker/Dockerfile push: true tags: blockscout/blockscout-arbitrum:latest, blockscout/blockscout-arbitrum:${{ env.RELEASE_VERSION }} + labels: ${{ steps.setup.outputs.docker-labels }} platforms: | linux/amd64 linux/arm64/v8 @@ -51,6 +56,7 @@ jobs: file: ./docker/Dockerfile push: true tags: blockscout/blockscout-arbitrum:${{ env.RELEASE_VERSION }}-indexer + labels: ${{ steps.setup.outputs.docker-labels }} platforms: | linux/amd64 linux/arm64/v8 @@ -72,6 +78,7 @@ jobs: file: ./docker/Dockerfile push: true tags: blockscout/blockscout-arbitrum:${{ env.RELEASE_VERSION }}-api + labels: ${{ steps.setup.outputs.docker-labels }} platforms: | linux/amd64 linux/arm64/v8 diff --git a/.github/workflows/release-eth.yml b/.github/workflows/release-eth.yml index 90f35e2fc1ea..bc139d870b10 100644 --- a/.github/workflows/release-eth.yml +++ b/.github/workflows/release-eth.yml @@ -18,9 +18,13 @@ jobs: - uses: actions/checkout@v4 - name: Setup repo uses: ./.github/actions/setup-repo + id: setup with: docker-username: ${{ secrets.DOCKER_USERNAME }} docker-password: ${{ secrets.DOCKER_PASSWORD }} + docker-remote-multi-platform: true + docker-arm-host: ${{ secrets.ARM_RUNNER_HOSTNAME }} + docker-arm-host-key: ${{ secrets.ARM_RUNNER_KEY }} - name: Build and push Docker image for Ethereum (indexer + API) uses: docker/build-push-action@v5 @@ -29,6 +33,7 @@ jobs: file: ./docker/Dockerfile push: true tags: blockscout/blockscout-ethereum:latest, blockscout/blockscout-ethereum:${{ env.RELEASE_VERSION }} + labels: ${{ steps.setup.outputs.docker-labels }} platforms: | linux/amd64 linux/arm64/v8 @@ -51,6 +56,7 @@ jobs: file: ./docker/Dockerfile push: true tags: blockscout/blockscout-ethereum:${{ env.RELEASE_VERSION }}-indexer + labels: ${{ steps.setup.outputs.docker-labels }} platforms: | linux/amd64 linux/arm64/v8 @@ -72,6 +78,7 @@ jobs: file: ./docker/Dockerfile push: true tags: blockscout/blockscout-ethereum:${{ env.RELEASE_VERSION }}-api + labels: ${{ steps.setup.outputs.docker-labels }} platforms: | linux/amd64 linux/arm64/v8 diff --git a/.github/workflows/release-filecoin.yml b/.github/workflows/release-filecoin.yml index 60fbea53c7cb..2992a3c2a930 100644 --- a/.github/workflows/release-filecoin.yml +++ b/.github/workflows/release-filecoin.yml @@ -18,9 +18,13 @@ jobs: - uses: actions/checkout@v4 - name: Setup repo uses: ./.github/actions/setup-repo + id: setup with: docker-username: ${{ secrets.DOCKER_USERNAME }} docker-password: ${{ secrets.DOCKER_PASSWORD }} + docker-remote-multi-platform: true + docker-arm-host: ${{ secrets.ARM_RUNNER_HOSTNAME }} + docker-arm-host-key: ${{ secrets.ARM_RUNNER_KEY }} - name: Build and push Docker image for Filecoin (indexer + API) uses: docker/build-push-action@v5 @@ -29,6 +33,7 @@ jobs: file: ./docker/Dockerfile push: true tags: blockscout/blockscout-filecoin:latest, blockscout/blockscout-filecoin:${{ env.RELEASE_VERSION }} + labels: ${{ steps.setup.outputs.docker-labels }} platforms: | linux/amd64 linux/arm64/v8 @@ -51,6 +56,7 @@ jobs: file: ./docker/Dockerfile push: true tags: blockscout/blockscout-filecoin:${{ env.RELEASE_VERSION }}-indexer + labels: ${{ steps.setup.outputs.docker-labels }} platforms: | linux/amd64 linux/arm64/v8 @@ -72,6 +78,7 @@ jobs: file: ./docker/Dockerfile push: true tags: blockscout/blockscout-filecoin:${{ env.RELEASE_VERSION }}-api + labels: ${{ steps.setup.outputs.docker-labels }} platforms: | linux/amd64 linux/arm64/v8 diff --git a/.github/workflows/release-fuse.yml b/.github/workflows/release-fuse.yml index 3bd751519452..1a9e41004e3a 100644 --- a/.github/workflows/release-fuse.yml +++ b/.github/workflows/release-fuse.yml @@ -19,9 +19,13 @@ jobs: - uses: actions/checkout@v4 - name: Setup repo uses: ./.github/actions/setup-repo + id: setup with: docker-username: ${{ secrets.DOCKER_USERNAME }} docker-password: ${{ secrets.DOCKER_PASSWORD }} + docker-remote-multi-platform: true + docker-arm-host: ${{ secrets.ARM_RUNNER_HOSTNAME }} + docker-arm-host-key: ${{ secrets.ARM_RUNNER_KEY }} - name: Build and push Docker image for Fuse (indexer + API) uses: docker/build-push-action@v5 @@ -30,6 +34,7 @@ jobs: file: ./docker/Dockerfile push: true tags: blockscout/blockscout-fuse:latest, blockscout/blockscout-fuse:${{ env.RELEASE_VERSION }} + labels: ${{ steps.setup.outputs.docker-labels }} platforms: | linux/amd64 linux/arm64/v8 @@ -52,6 +57,7 @@ jobs: file: ./docker/Dockerfile push: true tags: blockscout/blockscout-fuse:${{ env.RELEASE_VERSION }}-indexer + labels: ${{ steps.setup.outputs.docker-labels }} platforms: | linux/amd64 linux/arm64/v8 @@ -73,6 +79,7 @@ jobs: file: ./docker/Dockerfile push: true tags: blockscout/blockscout-fuse:${{ env.RELEASE_VERSION }}-api + labels: ${{ steps.setup.outputs.docker-labels }} platforms: | linux/amd64 linux/arm64/v8 diff --git a/.github/workflows/release-gnosis.yml b/.github/workflows/release-gnosis.yml index e39c327e84a5..3755089991af 100644 --- a/.github/workflows/release-gnosis.yml +++ b/.github/workflows/release-gnosis.yml @@ -18,9 +18,13 @@ jobs: - uses: actions/checkout@v4 - name: Setup repo uses: ./.github/actions/setup-repo + id: setup with: docker-username: ${{ secrets.DOCKER_USERNAME }} docker-password: ${{ secrets.DOCKER_PASSWORD }} + docker-remote-multi-platform: true + docker-arm-host: ${{ secrets.ARM_RUNNER_HOSTNAME }} + docker-arm-host-key: ${{ secrets.ARM_RUNNER_KEY }} - name: Build and push Docker image for Gnosis chain (indexer + API) uses: docker/build-push-action@v5 @@ -29,6 +33,7 @@ jobs: file: ./docker/Dockerfile push: true tags: blockscout/blockscout-xdai:latest, blockscout/blockscout-xdai:${{ env.RELEASE_VERSION }} + labels: ${{ steps.setup.outputs.docker-labels }} platforms: | linux/amd64 linux/arm64/v8 @@ -52,6 +57,7 @@ jobs: file: ./docker/Dockerfile push: true tags: blockscout/blockscout-xdai:${{ env.RELEASE_VERSION }}-indexer + labels: ${{ steps.setup.outputs.docker-labels }} platforms: | linux/amd64 linux/arm64/v8 @@ -74,6 +80,7 @@ jobs: file: ./docker/Dockerfile push: true tags: blockscout/blockscout-xdai:${{ env.RELEASE_VERSION }}-api + labels: ${{ steps.setup.outputs.docker-labels }} platforms: | linux/amd64 linux/arm64/v8 diff --git a/.github/workflows/release-optimism.yml b/.github/workflows/release-optimism.yml index fea08eada83b..0f9977675978 100644 --- a/.github/workflows/release-optimism.yml +++ b/.github/workflows/release-optimism.yml @@ -18,9 +18,13 @@ jobs: - uses: actions/checkout@v4 - name: Setup repo uses: ./.github/actions/setup-repo + id: setup with: docker-username: ${{ secrets.DOCKER_USERNAME }} docker-password: ${{ secrets.DOCKER_PASSWORD }} + docker-remote-multi-platform: true + docker-arm-host: ${{ secrets.ARM_RUNNER_HOSTNAME }} + docker-arm-host-key: ${{ secrets.ARM_RUNNER_KEY }} - name: Build and push Docker image for Optimism (indexer + API) uses: docker/build-push-action@v5 @@ -29,6 +33,7 @@ jobs: file: ./docker/Dockerfile push: true tags: blockscout/blockscout-optimism:latest, blockscout/blockscout-optimism:${{ env.RELEASE_VERSION }} + labels: ${{ steps.setup.outputs.docker-labels }} platforms: | linux/amd64 linux/arm64/v8 @@ -51,6 +56,7 @@ jobs: file: ./docker/Dockerfile push: true tags: blockscout/blockscout-optimism:${{ env.RELEASE_VERSION }}-indexer + labels: ${{ steps.setup.outputs.docker-labels }} platforms: | linux/amd64 linux/arm64/v8 @@ -72,6 +78,7 @@ jobs: file: ./docker/Dockerfile push: true tags: blockscout/blockscout-optimism:${{ env.RELEASE_VERSION }}-api + labels: ${{ steps.setup.outputs.docker-labels }} platforms: | linux/amd64 linux/arm64/v8 diff --git a/.github/workflows/release-polygon-edge.yml b/.github/workflows/release-polygon-edge.yml index 58f92636e208..b2a3ee4c3737 100644 --- a/.github/workflows/release-polygon-edge.yml +++ b/.github/workflows/release-polygon-edge.yml @@ -19,9 +19,13 @@ jobs: - uses: actions/checkout@v4 - name: Setup repo uses: ./.github/actions/setup-repo + id: setup with: docker-username: ${{ secrets.DOCKER_USERNAME }} docker-password: ${{ secrets.DOCKER_PASSWORD }} + docker-remote-multi-platform: true + docker-arm-host: ${{ secrets.ARM_RUNNER_HOSTNAME }} + docker-arm-host-key: ${{ secrets.ARM_RUNNER_KEY }} - name: Build and push Docker image for Polygon Edge (indexer + api) uses: docker/build-push-action@v5 @@ -30,6 +34,7 @@ jobs: file: ./docker/Dockerfile push: true tags: blockscout/blockscout-polygon-edge:latest, blockscout/blockscout-polygon-edge:${{ env.RELEASE_VERSION }} + labels: ${{ steps.setup.outputs.docker-labels }} platforms: | linux/amd64 linux/arm64/v8 @@ -52,6 +57,7 @@ jobs: file: ./docker/Dockerfile push: true tags: blockscout/blockscout-polygon-edge:${{ env.RELEASE_VERSION }}-indexer + labels: ${{ steps.setup.outputs.docker-labels }} platforms: | linux/amd64 linux/arm64/v8 @@ -73,6 +79,7 @@ jobs: file: ./docker/Dockerfile push: true tags: blockscout/blockscout-polygon-edge:${{ env.RELEASE_VERSION }}-api + labels: ${{ steps.setup.outputs.docker-labels }} platforms: | linux/amd64 linux/arm64/v8 diff --git a/.github/workflows/release-polygon-zkevm.yml b/.github/workflows/release-polygon-zkevm.yml index c3d1be27d15a..101068fa38bb 100644 --- a/.github/workflows/release-polygon-zkevm.yml +++ b/.github/workflows/release-polygon-zkevm.yml @@ -19,9 +19,13 @@ jobs: - uses: actions/checkout@v4 - name: Setup repo uses: ./.github/actions/setup-repo + id: setup with: docker-username: ${{ secrets.DOCKER_USERNAME }} docker-password: ${{ secrets.DOCKER_PASSWORD }} + docker-remote-multi-platform: true + docker-arm-host: ${{ secrets.ARM_RUNNER_HOSTNAME }} + docker-arm-host-key: ${{ secrets.ARM_RUNNER_KEY }} - name: Build and push Docker image for Polygon zkEVM (indexer + API) uses: docker/build-push-action@v5 @@ -30,6 +34,7 @@ jobs: file: ./docker/Dockerfile push: true tags: blockscout/blockscout-zkevm:latest, blockscout/blockscout-zkevm:${{ env.RELEASE_VERSION }} + labels: ${{ steps.setup.outputs.docker-labels }} platforms: | linux/amd64 linux/arm64/v8 @@ -52,6 +57,7 @@ jobs: file: ./docker/Dockerfile push: true tags: blockscout/blockscout-zkevm:${{ env.RELEASE_VERSION }}-indexer + labels: ${{ steps.setup.outputs.docker-labels }} platforms: | linux/amd64 linux/arm64/v8 @@ -73,6 +79,7 @@ jobs: file: ./docker/Dockerfile push: true tags: blockscout/blockscout-zkevm:${{ env.RELEASE_VERSION }}-api + labels: ${{ steps.setup.outputs.docker-labels }} platforms: | linux/amd64 linux/arm64/v8 diff --git a/.github/workflows/release-redstone.yml b/.github/workflows/release-redstone.yml index 8f0e22bc059e..d85d7c74dfd6 100644 --- a/.github/workflows/release-redstone.yml +++ b/.github/workflows/release-redstone.yml @@ -18,9 +18,13 @@ jobs: - uses: actions/checkout@v4 - name: Setup repo uses: ./.github/actions/setup-repo + id: setup with: docker-username: ${{ secrets.DOCKER_USERNAME }} docker-password: ${{ secrets.DOCKER_PASSWORD }} + docker-remote-multi-platform: true + docker-arm-host: ${{ secrets.ARM_RUNNER_HOSTNAME }} + docker-arm-host-key: ${{ secrets.ARM_RUNNER_KEY }} - name: Build and push Docker image for Redstone uses: docker/build-push-action@v5 @@ -29,6 +33,7 @@ jobs: file: ./docker/Dockerfile push: true tags: blockscout/blockscout-redstone:latest, blockscout/blockscout-redstone:${{ env.RELEASE_VERSION }} + labels: ${{ steps.setup.outputs.docker-labels }} platforms: | linux/amd64 linux/arm64/v8 @@ -52,6 +57,7 @@ jobs: file: ./docker/Dockerfile push: true tags: blockscout/blockscout-redstone:${{ env.RELEASE_VERSION }}-indexer + labels: ${{ steps.setup.outputs.docker-labels }} platforms: | linux/amd64 linux/arm64/v8 @@ -74,6 +80,7 @@ jobs: file: ./docker/Dockerfile push: true tags: blockscout/blockscout-redstone:${{ env.RELEASE_VERSION }}-api + labels: ${{ steps.setup.outputs.docker-labels }} platforms: | linux/amd64 linux/arm64/v8 diff --git a/.github/workflows/release-rootstock.yml b/.github/workflows/release-rootstock.yml index b017003ea4ac..a7ea06531804 100644 --- a/.github/workflows/release-rootstock.yml +++ b/.github/workflows/release-rootstock.yml @@ -18,9 +18,13 @@ jobs: - uses: actions/checkout@v4 - name: Setup repo uses: ./.github/actions/setup-repo + id: setup with: docker-username: ${{ secrets.DOCKER_USERNAME }} docker-password: ${{ secrets.DOCKER_PASSWORD }} + docker-remote-multi-platform: true + docker-arm-host: ${{ secrets.ARM_RUNNER_HOSTNAME }} + docker-arm-host-key: ${{ secrets.ARM_RUNNER_KEY }} - name: Build and push Docker image for Rootstock (indexer + API) uses: docker/build-push-action@v5 @@ -29,6 +33,7 @@ jobs: file: ./docker/Dockerfile push: true tags: blockscout/blockscout-rsk:latest, blockscout/blockscout-rsk:${{ env.RELEASE_VERSION }} + labels: ${{ steps.setup.outputs.docker-labels }} platforms: | linux/amd64 linux/arm64/v8 @@ -51,6 +56,7 @@ jobs: file: ./docker/Dockerfile push: true tags: blockscout/blockscout-rsk:${{ env.RELEASE_VERSION }}-indexer + labels: ${{ steps.setup.outputs.docker-labels }} platforms: | linux/amd64 linux/arm64/v8 @@ -72,6 +78,7 @@ jobs: file: ./docker/Dockerfile push: true tags: blockscout/blockscout-rsk:${{ env.RELEASE_VERSION }}-api + labels: ${{ steps.setup.outputs.docker-labels }} platforms: | linux/amd64 linux/arm64/v8 diff --git a/.github/workflows/release-shibarium.yml b/.github/workflows/release-shibarium.yml index 2f147322bd29..2147eb436fc3 100644 --- a/.github/workflows/release-shibarium.yml +++ b/.github/workflows/release-shibarium.yml @@ -18,9 +18,13 @@ jobs: - uses: actions/checkout@v4 - name: Setup repo uses: ./.github/actions/setup-repo + id: setup with: docker-username: ${{ secrets.DOCKER_USERNAME }} docker-password: ${{ secrets.DOCKER_PASSWORD }} + docker-remote-multi-platform: true + docker-arm-host: ${{ secrets.ARM_RUNNER_HOSTNAME }} + docker-arm-host-key: ${{ secrets.ARM_RUNNER_KEY }} - name: Build and push Docker image for Shibarium (indexer + API) uses: docker/build-push-action@v5 @@ -29,6 +33,7 @@ jobs: file: ./docker/Dockerfile push: true tags: blockscout/blockscout-shibarium:latest, blockscout/blockscout-shibarium:${{ env.RELEASE_VERSION }} + labels: ${{ steps.setup.outputs.docker-labels }} platforms: | linux/amd64 linux/arm64/v8 @@ -51,6 +56,7 @@ jobs: file: ./docker/Dockerfile push: true tags: blockscout/blockscout-shibarium:${{ env.RELEASE_VERSION }}-indexer + labels: ${{ steps.setup.outputs.docker-labels }} platforms: | linux/amd64 linux/arm64/v8 @@ -72,6 +78,7 @@ jobs: file: ./docker/Dockerfile push: true tags: blockscout/blockscout-shibarium:${{ env.RELEASE_VERSION }}-api + labels: ${{ steps.setup.outputs.docker-labels }} platforms: | linux/amd64 linux/arm64/v8 diff --git a/.github/workflows/release-stability.yml b/.github/workflows/release-stability.yml index af32ab8c8375..2d6b9284b358 100644 --- a/.github/workflows/release-stability.yml +++ b/.github/workflows/release-stability.yml @@ -19,9 +19,13 @@ jobs: - uses: actions/checkout@v4 - name: Setup repo uses: ./.github/actions/setup-repo + id: setup with: docker-username: ${{ secrets.DOCKER_USERNAME }} docker-password: ${{ secrets.DOCKER_PASSWORD }} + docker-remote-multi-platform: true + docker-arm-host: ${{ secrets.ARM_RUNNER_HOSTNAME }} + docker-arm-host-key: ${{ secrets.ARM_RUNNER_KEY }} - name: Build and push Docker image for Stability (indexer + API) uses: docker/build-push-action@v5 @@ -30,6 +34,7 @@ jobs: file: ./docker/Dockerfile push: true tags: blockscout/blockscout-stability:latest, blockscout/blockscout-stability:${{ env.RELEASE_VERSION }} + labels: ${{ steps.setup.outputs.docker-labels }} platforms: | linux/amd64 linux/arm64/v8 @@ -52,6 +57,7 @@ jobs: file: ./docker/Dockerfile push: true tags: blockscout/blockscout-stability:${{ env.RELEASE_VERSION }}-indexer + labels: ${{ steps.setup.outputs.docker-labels }} platforms: | linux/amd64 linux/arm64/v8 @@ -73,6 +79,7 @@ jobs: file: ./docker/Dockerfile push: true tags: blockscout/blockscout-stability:${{ env.RELEASE_VERSION }}-api + labels: ${{ steps.setup.outputs.docker-labels }} platforms: | linux/amd64 linux/arm64/v8 diff --git a/.github/workflows/release-suave.yml b/.github/workflows/release-suave.yml index b521e158caec..8ac9dadc3e5b 100644 --- a/.github/workflows/release-suave.yml +++ b/.github/workflows/release-suave.yml @@ -19,9 +19,13 @@ jobs: - uses: actions/checkout@v4 - name: Setup repo uses: ./.github/actions/setup-repo + id: setup with: docker-username: ${{ secrets.DOCKER_USERNAME }} docker-password: ${{ secrets.DOCKER_PASSWORD }} + docker-remote-multi-platform: true + docker-arm-host: ${{ secrets.ARM_RUNNER_HOSTNAME }} + docker-arm-host-key: ${{ secrets.ARM_RUNNER_KEY }} - name: Build and push Docker image for SUAVE (indexer + API) uses: docker/build-push-action@v5 @@ -30,6 +34,7 @@ jobs: file: ./docker/Dockerfile push: true tags: blockscout/blockscout-suave:latest, blockscout/blockscout-suave:${{ env.RELEASE_VERSION }} + labels: ${{ steps.setup.outputs.docker-labels }} platforms: | linux/amd64 linux/arm64/v8 @@ -52,6 +57,7 @@ jobs: file: ./docker/Dockerfile push: true tags: blockscout/blockscout-suave:${{ env.RELEASE_VERSION }}-indexer + labels: ${{ steps.setup.outputs.docker-labels }} platforms: | linux/amd64 linux/arm64/v8 @@ -73,6 +79,7 @@ jobs: file: ./docker/Dockerfile push: true tags: blockscout/blockscout-suave:${{ env.RELEASE_VERSION }}-api + labels: ${{ steps.setup.outputs.docker-labels }} platforms: | linux/amd64 linux/arm64/v8 diff --git a/.github/workflows/release-zetachain.yml b/.github/workflows/release-zetachain.yml index 2f021f9c743e..6d497fd0536f 100644 --- a/.github/workflows/release-zetachain.yml +++ b/.github/workflows/release-zetachain.yml @@ -18,9 +18,13 @@ jobs: - uses: actions/checkout@v4 - name: Setup repo uses: ./.github/actions/setup-repo + id: setup with: docker-username: ${{ secrets.DOCKER_USERNAME }} docker-password: ${{ secrets.DOCKER_PASSWORD }} + docker-remote-multi-platform: true + docker-arm-host: ${{ secrets.ARM_RUNNER_HOSTNAME }} + docker-arm-host-key: ${{ secrets.ARM_RUNNER_KEY }} - name: Build and push Docker image for Zetachain (indexer + API) uses: docker/build-push-action@v5 @@ -29,6 +33,7 @@ jobs: file: ./docker/Dockerfile push: true tags: blockscout/blockscout-zetachain:latest, blockscout/blockscout-zetachain:${{ env.RELEASE_VERSION }} + labels: ${{ steps.setup.outputs.docker-labels }} platforms: | linux/amd64 linux/arm64/v8 @@ -51,6 +56,7 @@ jobs: file: ./docker/Dockerfile push: true tags: blockscout/blockscout-zetachain:${{ env.RELEASE_VERSION }}-indexer + labels: ${{ steps.setup.outputs.docker-labels }} platforms: | linux/amd64 linux/arm64/v8 @@ -72,6 +78,7 @@ jobs: file: ./docker/Dockerfile push: true tags: blockscout/blockscout-zetachain:${{ env.RELEASE_VERSION }}-api + labels: ${{ steps.setup.outputs.docker-labels }} platforms: | linux/amd64 linux/arm64/v8 diff --git a/.github/workflows/release-zksync.yml b/.github/workflows/release-zksync.yml index 3216baf4ca92..e0d01276387e 100644 --- a/.github/workflows/release-zksync.yml +++ b/.github/workflows/release-zksync.yml @@ -19,9 +19,13 @@ jobs: - uses: actions/checkout@v4 - name: Setup repo uses: ./.github/actions/setup-repo + id: setup with: docker-username: ${{ secrets.DOCKER_USERNAME }} docker-password: ${{ secrets.DOCKER_PASSWORD }} + docker-remote-multi-platform: true + docker-arm-host: ${{ secrets.ARM_RUNNER_HOSTNAME }} + docker-arm-host-key: ${{ secrets.ARM_RUNNER_KEY }} - name: Build and push Docker image for ZkSync (indexer + API) uses: docker/build-push-action@v5 @@ -30,6 +34,7 @@ jobs: file: ./docker/Dockerfile push: true tags: blockscout/blockscout-zksync:latest, blockscout/blockscout-zksync:${{ env.RELEASE_VERSION }} + labels: ${{ steps.setup.outputs.docker-labels }} platforms: | linux/amd64 linux/arm64/v8 @@ -52,6 +57,7 @@ jobs: file: ./docker/Dockerfile push: true tags: blockscout/blockscout-zksync:${{ env.RELEASE_VERSION }}-indexer + labels: ${{ steps.setup.outputs.docker-labels }} platforms: | linux/amd64 linux/arm64/v8 @@ -73,6 +79,7 @@ jobs: file: ./docker/Dockerfile push: true tags: blockscout/blockscout-zksync:${{ env.RELEASE_VERSION }}-api + labels: ${{ steps.setup.outputs.docker-labels }} platforms: | linux/amd64 linux/arm64/v8 diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 84cc52b18f45..385b836cb907 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -18,9 +18,13 @@ jobs: - uses: actions/checkout@v4 - name: Setup repo uses: ./.github/actions/setup-repo + id: setup with: docker-username: ${{ secrets.DOCKER_USERNAME }} docker-password: ${{ secrets.DOCKER_PASSWORD }} + docker-remote-multi-platform: true + docker-arm-host: ${{ secrets.ARM_RUNNER_HOSTNAME }} + docker-arm-host-key: ${{ secrets.ARM_RUNNER_KEY }} - name: Build & Push Core Docker image (indexer + API) uses: docker/build-push-action@v5 @@ -31,6 +35,7 @@ jobs: cache-from: type=registry,ref=blockscout/blockscout:buildcache cache-to: type=registry,ref=blockscout/blockscout:buildcache,mode=max tags: blockscout/blockscout:latest, blockscout/blockscout:${{ env.RELEASE_VERSION }} + labels: ${{ steps.setup.outputs.docker-labels }} platforms: | linux/amd64 linux/arm64/v8 @@ -59,6 +64,7 @@ jobs: cache-from: type=registry,ref=blockscout/blockscout:buildcache cache-to: type=registry,ref=blockscout/blockscout:buildcache,mode=max tags: blockscout/blockscout:${{ env.RELEASE_VERSION }}-indexer + labels: ${{ steps.setup.outputs.docker-labels }} platforms: | linux/amd64 linux/arm64/v8 @@ -86,6 +92,7 @@ jobs: cache-from: type=registry,ref=blockscout/blockscout:buildcache cache-to: type=registry,ref=blockscout/blockscout:buildcache,mode=max tags: blockscout/blockscout:${{ env.RELEASE_VERSION }}-api + labels: ${{ steps.setup.outputs.docker-labels }} platforms: | linux/amd64 linux/arm64/v8 From 5992623146a315e8779c870120a4ca3b30beeb28 Mon Sep 17 00:00:00 2001 From: Alexander Kolotov Date: Mon, 10 Jun 2024 13:08:02 +0400 Subject: [PATCH 074/150] fixed the field name (#10216) --- .../lib/block_scout_web/views/api/v2/arbitrum_view.ex | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apps/block_scout_web/lib/block_scout_web/views/api/v2/arbitrum_view.ex b/apps/block_scout_web/lib/block_scout_web/views/api/v2/arbitrum_view.ex index 5a4458402b37..a70c5fa5cc0c 100644 --- a/apps/block_scout_web/lib/block_scout_web/views/api/v2/arbitrum_view.ex +++ b/apps/block_scout_web/lib/block_scout_web/views/api/v2/arbitrum_view.ex @@ -125,7 +125,7 @@ defmodule BlockScoutWeb.API.V2.ArbitrumView do %{ "number" => batch.number, "transactions_count" => batch.transactions_count, - "block_count" => batch.end_block - batch.start_block + 1 + "blocks_count" => batch.end_block - batch.start_block + 1 } |> add_l1_tx_info(batch) end) From 2f20ff6ed4e4c61232666186970beb75ed57b8b3 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 10 Jun 2024 22:30:58 +0300 Subject: [PATCH 075/150] chore(deps): bump remote_ip from 1.1.0 to 1.2.0 (#10224) Bumps [remote_ip](https://github.com/ajvondrak/remote_ip) from 1.1.0 to 1.2.0. - [Release notes](https://github.com/ajvondrak/remote_ip/releases) - [Commits](https://github.com/ajvondrak/remote_ip/compare/v1.1.0...v1.2.0) --- updated-dependencies: - dependency-name: remote_ip dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- mix.lock | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mix.lock b/mix.lock index e6e73650f6dc..ae2e612bcabd 100644 --- a/mix.lock +++ b/mix.lock @@ -122,7 +122,7 @@ "ranch": {:hex, :ranch, "1.8.0", "8c7a100a139fd57f17327b6413e4167ac559fbc04ca7448e9be9057311597a1d", [:make, :rebar3], [], "hexpm", "49fbcfd3682fab1f5d109351b61257676da1a2fdbe295904176d5e521a2ddfe5"}, "ratio": {:hex, :ratio, "2.4.2", "c8518f3536d49b1b00d88dd20d49f8b11abb7819638093314a6348139f14f9f9", [:mix], [{:decimal, "~> 1.6 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: true]}, {:numbers, "~> 5.2.0", [hex: :numbers, repo: "hexpm", optional: false]}], "hexpm", "441ef6f73172a3503de65ccf1769030997b0d533b1039422f1e5e0e0b4cbf89e"}, "redix": {:hex, :redix, "1.5.1", "a2386971e69bf23630fb3a215a831b5478d2ee7dc9ea7ac811ed89186ab5d7b7", [:mix], [{:castore, "~> 0.1.0 or ~> 1.0", [hex: :castore, repo: "hexpm", optional: true]}, {:nimble_options, "~> 0.5.0 or ~> 1.0", [hex: :nimble_options, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4.0 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "85224eb2b683c516b80d472eb89b76067d5866913bf0be59d646f550de71f5c4"}, - "remote_ip": {:hex, :remote_ip, "1.1.0", "cb308841595d15df3f9073b7c39243a1dd6ca56e5020295cb012c76fbec50f2d", [:mix], [{:combine, "~> 0.10", [hex: :combine, repo: "hexpm", optional: false]}, {:plug, "~> 1.14", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "616ffdf66aaad6a72fc546dabf42eed87e2a99e97b09cbd92b10cc180d02ed74"}, + "remote_ip": {:hex, :remote_ip, "1.2.0", "fb078e12a44414f4cef5a75963c33008fe169b806572ccd17257c208a7bc760f", [:mix], [{:combine, "~> 0.10", [hex: :combine, repo: "hexpm", optional: false]}, {:plug, "~> 1.14", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "2ff91de19c48149ce19ed230a81d377186e4412552a597d6a5137373e5877cb7"}, "rustler_precompiled": {:hex, :rustler_precompiled, "0.7.1", "ecadf02cc59a0eccbaed6c1937303a5827fbcf60010c541595e6d3747d3d0f9f", [:mix], [{:castore, "~> 0.1 or ~> 1.0", [hex: :castore, repo: "hexpm", optional: false]}, {:rustler, "~> 0.23", [hex: :rustler, repo: "hexpm", optional: true]}], "hexpm", "b9e4657b99a1483ea31502e1d58c464bedebe9028808eda45c3a429af4550c66"}, "sobelow": {:hex, :sobelow, "0.13.0", "218afe9075904793f5c64b8837cc356e493d88fddde126a463839351870b8d1e", [:mix], [{:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "cd6e9026b85fc35d7529da14f95e85a078d9dd1907a9097b3ba6ac7ebbe34a0d"}, "spandex": {:hex, :spandex, "3.2.0", "f8cd40146ea988c87f3c14054150c9a47ba17e53cd4515c00e1f93c29c45404d", [:mix], [{:decorator, "~> 1.2", [hex: :decorator, repo: "hexpm", optional: true]}, {:optimal, "~> 0.3.3", [hex: :optimal, repo: "hexpm", optional: false]}, {:plug, "~> 1.0", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "d0a7d5aef4c5af9cf5467f2003e8a5d8d2bdae3823a6cc95d776b9a2251d4d03"}, From 8c03e73bb7159339b19dbe990b3714a8c61de3c6 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 10 Jun 2024 22:31:18 +0300 Subject: [PATCH 076/150] chore(deps): bump cldr_utils from 2.25.0 to 2.26.0 (#10223) Bumps [cldr_utils](https://github.com/elixir-cldr/cldr_utils) from 2.25.0 to 2.26.0. - [Release notes](https://github.com/elixir-cldr/cldr_utils/releases) - [Changelog](https://github.com/elixir-cldr/cldr_utils/blob/main/CHANGELOG.md) - [Commits](https://github.com/elixir-cldr/cldr_utils/compare/v2.25.0...v2.26.0) --- updated-dependencies: - dependency-name: cldr_utils dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- mix.lock | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mix.lock b/mix.lock index ae2e612bcabd..ccfc14c13726 100644 --- a/mix.lock +++ b/mix.lock @@ -15,7 +15,7 @@ "castore": {:hex, :castore, "1.0.7", "b651241514e5f6956028147fe6637f7ac13802537e895a724f90bf3e36ddd1dd", [:mix], [], "hexpm", "da7785a4b0d2a021cd1292a60875a784b6caef71e76bf4917bdee1f390455cf5"}, "cbor": {:hex, :cbor, "1.0.1", "39511158e8ea5a57c1fcb9639aaa7efde67129678fee49ebbda780f6f24959b0", [:mix], [], "hexpm", "5431acbe7a7908f17f6a9cd43311002836a34a8ab01876918d8cfb709cd8b6a2"}, "certifi": {:hex, :certifi, "2.12.0", "2d1cca2ec95f59643862af91f001478c9863c2ac9cb6e2f89780bfd8de987329", [:rebar3], [], "hexpm", "ee68d85df22e554040cdb4be100f33873ac6051387baf6a8f6ce82272340ff1c"}, - "cldr_utils": {:hex, :cldr_utils, "2.25.0", "3cc2ab6e9e4f855ba78a3f3fc4963ccf7b68b731f4e91de3d9b310adddb96b62", [:mix], [{:castore, "~> 0.1 or ~> 1.0", [hex: :castore, repo: "hexpm", optional: true]}, {:certifi, "~> 2.5", [hex: :certifi, repo: "hexpm", optional: true]}, {:decimal, "~> 1.9 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}], "hexpm", "9041660356ffa1129e0d87d110e188f5da0e0bba94fb915e11275e04ace066e1"}, + "cldr_utils": {:hex, :cldr_utils, "2.26.0", "be9e573965c4822b5e172b4af3346e26ca02dd5432a50871479c9396f4c8c087", [:mix], [{:castore, "~> 0.1 or ~> 1.0", [hex: :castore, repo: "hexpm", optional: true]}, {:certifi, "~> 2.5", [hex: :certifi, repo: "hexpm", optional: true]}, {:decimal, "~> 1.9 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}], "hexpm", "a56a21321c88d955741764f2a53109706fba5a2dcd13ca590ef7d3651698ba4c"}, "cloak": {:hex, :cloak, "1.1.4", "aba387b22ea4d80d92d38ab1890cc528b06e0e7ef2a4581d71c3fdad59e997e7", [:mix], [{:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}], "hexpm", "92b20527b9aba3d939fab0dd32ce592ff86361547cfdc87d74edce6f980eb3d7"}, "cloak_ecto": {:hex, :cloak_ecto, "1.3.0", "0de127c857d7452ba3c3367f53fb814b0410ff9c680a8d20fbe8b9a3c57a1118", [:mix], [{:cloak, "~> 1.1.1", [hex: :cloak, repo: "hexpm", optional: false]}, {:ecto, "~> 3.0", [hex: :ecto, repo: "hexpm", optional: false]}], "hexpm", "314beb0c123b8a800418ca1d51065b27ba3b15f085977e65c0f7b2adab2de1cc"}, "coerce": {:hex, :coerce, "1.0.1", "211c27386315dc2894ac11bc1f413a0e38505d808153367bd5c6e75a4003d096", [:mix], [], "hexpm", "b44a691700f7a1a15b4b7e2ff1fa30bebd669929ac8aa43cffe9e2f8bf051cf1"}, From e0b43fdbe692060058501d2ea1ff339de576d030 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 10 Jun 2024 22:31:30 +0300 Subject: [PATCH 077/150] chore(deps-dev): bump wallaby from 0.30.6 to 0.30.7 (#10221) Bumps [wallaby](https://github.com/elixir-wallaby/wallaby) from 0.30.6 to 0.30.7. - [Release notes](https://github.com/elixir-wallaby/wallaby/releases) - [Changelog](https://github.com/elixir-wallaby/wallaby/blob/main/CHANGELOG.md) - [Commits](https://github.com/elixir-wallaby/wallaby/compare/v0.30.6...v0.30.7) --- updated-dependencies: - dependency-name: wallaby dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- mix.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/mix.lock b/mix.lock index ccfc14c13726..e9a3c4262261 100644 --- a/mix.lock +++ b/mix.lock @@ -84,7 +84,7 @@ "memento": {:hex, :memento, "0.3.2", "38cfc8ff9bcb1adff7cbd0f3b78a762636b86dff764729d1c82d0464c539bdd0", [:mix], [], "hexpm", "25cf691a98a0cb70262f4a7543c04bab24648cb2041d937eb64154a8d6f8012b"}, "metrics": {:hex, :metrics, "1.0.1", "25f094dea2cda98213cecc3aeff09e940299d950904393b2a29d191c346a8486", [:rebar3], [], "hexpm", "69b09adddc4f74a40716ae54d140f93beb0fb8978d8636eaded0c31b6f099f16"}, "mime": {:hex, :mime, "2.0.5", "dc34c8efd439abe6ae0343edbb8556f4d63f178594894720607772a041b04b02", [:mix], [], "hexpm", "da0d64a365c45bc9935cc5c8a7fc5e49a0e0f9932a761c55d6c52b142780a05c"}, - "mimerl": {:hex, :mimerl, "1.2.0", "67e2d3f571088d5cfd3e550c383094b47159f3eee8ffa08e64106cdf5e981be3", [:rebar3], [], "hexpm", "f278585650aa581986264638ebf698f8bb19df297f66ad91b18910dfc6e19323"}, + "mimerl": {:hex, :mimerl, "1.3.0", "d0cd9fc04b9061f82490f6581e0128379830e78535e017f7780f37fea7545726", [:rebar3], [], "hexpm", "a1e15a50d1887217de95f0b9b0793e32853f7c258a5cd227650889b38839fe9d"}, "mimetype_parser": {:hex, :mimetype_parser, "0.1.3", "628ac9fe56aa7edcedb534d68397dd66674ab82493c8ebe39acb9a19b666099d", [:mix], [], "hexpm", "7d8f80c567807ce78cd93c938e7f4b0a20b1aaaaab914bf286f68457d9f7a852"}, "mix_erlang_tasks": {:hex, :mix_erlang_tasks, "0.1.0", "36819fec60b80689eb1380938675af215565a89320a9e29c72c70d97512e4649", [:mix], [], "hexpm", "95d2839c422c482a70c08a8702da8242f86b773f8ab6e8602a4eb72da8da04ed"}, "mock": {:hex, :mock, "0.3.8", "7046a306b71db2488ef54395eeb74df0a7f335a7caca4a3d3875d1fc81c884dd", [:mix], [{:meck, "~> 0.9.2", [hex: :meck, repo: "hexpm", optional: false]}], "hexpm", "7fa82364c97617d79bb7d15571193fc0c4fe5afd0c932cef09426b3ee6fe2022"}, @@ -141,7 +141,7 @@ "ueberauth_auth0": {:hex, :ueberauth_auth0, "2.1.0", "0632d5844049fa2f26823f15e1120aa32f27df6f27ce515a4b04641736594bf4", [:mix], [{:oauth2, "~> 2.0", [hex: :oauth2, repo: "hexpm", optional: false]}, {:ueberauth, "~> 0.7", [hex: :ueberauth, repo: "hexpm", optional: false]}], "hexpm", "8d3b30fa27c95c9e82c30c4afb016251405706d2e9627e603c3c9787fd1314fc"}, "unicode_util_compat": {:hex, :unicode_util_compat, "0.7.0", "bc84380c9ab48177092f43ac89e4dfa2c6d62b40b8bd132b1059ecc7232f9a78", [:rebar3], [], "hexpm", "25eee6d67df61960cf6a794239566599b09e17e668d3700247bc498638152521"}, "varint": {:hex, :varint, "1.4.0", "b7405c8a99db7b95d4341fa9cb15e7c3af6c8dda43e21bbe1c4a9cdff50b6502", [:mix], [], "hexpm", "0fd461901b7120c03467530dff3c58fa3475328fd75ba72c7d3cbf13bce6b0d2"}, - "wallaby": {:hex, :wallaby, "0.30.6", "7dc4c1213f3b52c4152581d126632bc7e06892336d3a0f582853efeeabd45a71", [:mix], [{:ecto_sql, ">= 3.0.0", [hex: :ecto_sql, repo: "hexpm", optional: true]}, {:httpoison, "~> 0.12 or ~> 1.0 or ~> 2.0", [hex: :httpoison, repo: "hexpm", optional: false]}, {:jason, "~> 1.1", [hex: :jason, repo: "hexpm", optional: false]}, {:phoenix_ecto, ">= 3.0.0", [hex: :phoenix_ecto, repo: "hexpm", optional: true]}, {:web_driver_client, "~> 0.2.0", [hex: :web_driver_client, repo: "hexpm", optional: false]}], "hexpm", "50950c1d968549b54c20e16175c68c7fc0824138e2bb93feb11ef6add8eb23d4"}, + "wallaby": {:hex, :wallaby, "0.30.7", "ba91325af8a9fd72f0b7f93410033853c9b820e3a97d08e2c6c5f6dfdbc7bac9", [:mix], [{:ecto_sql, ">= 3.0.0", [hex: :ecto_sql, repo: "hexpm", optional: true]}, {:httpoison, "~> 0.12 or ~> 1.0 or ~> 2.0", [hex: :httpoison, repo: "hexpm", optional: false]}, {:jason, "~> 1.1", [hex: :jason, repo: "hexpm", optional: false]}, {:phoenix_ecto, ">= 3.0.0", [hex: :phoenix_ecto, repo: "hexpm", optional: true]}, {:web_driver_client, "~> 0.2.0", [hex: :web_driver_client, repo: "hexpm", optional: false]}], "hexpm", "a8ebde97dbf99bf31ee9bfe258e897fef8fd5f19ba41f7574ea4ab9bb5da607b"}, "web_driver_client": {:hex, :web_driver_client, "0.2.0", "63b76cd9eb3b0716ec5467a0f8bead73d3d9612e63f7560d21357f03ad86e31a", [:mix], [{:hackney, "~> 1.6", [hex: :hackney, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}, {:tesla, "~> 1.3", [hex: :tesla, repo: "hexpm", optional: false]}], "hexpm", "83cc6092bc3e74926d1c8455f0ce927d5d1d36707b74d9a65e38c084aab0350f"}, "websocket_client": {:git, "https://github.com/blockscout/websocket_client.git", "0b4ecc5b1fb8a0bd1c8352728da787c20add53aa", [branch: "master"]}, } From e292873f793092ed439a68d13732e4ebd9132a93 Mon Sep 17 00:00:00 2001 From: nikitosing <32202610+nikitosing@users.noreply.github.com> Date: Tue, 11 Jun 2024 11:53:42 +0300 Subject: [PATCH 078/150] fix: Fix unknown UID bug (#10226) * fix: Fix unknown UID bug * Fix timestamp comparison --- .../explorer/chain/smart_contract/verification_status.ex | 8 ++++---- .../lib/explorer/smart_contract/solidity/publisher.ex | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/apps/explorer/lib/explorer/chain/smart_contract/verification_status.ex b/apps/explorer/lib/explorer/chain/smart_contract/verification_status.ex index 48fcc772da77..4ababe2d5824 100644 --- a/apps/explorer/lib/explorer/chain/smart_contract/verification_status.ex +++ b/apps/explorer/lib/explorer/chain/smart_contract/verification_status.ex @@ -114,7 +114,7 @@ defmodule Explorer.Chain.SmartContract.VerificationStatus do def validate_uid(<<_address::binary-size(40), timestamp_hex::binary>> = uid) do case Integer.parse(timestamp_hex, 16) do {timestamp, ""} -> - if DateTime.utc_now() |> DateTime.to_unix() > timestamp do + if DateTime.utc_now() |> DateTime.to_unix() >= timestamp do {:ok, uid} else :error @@ -130,7 +130,7 @@ defmodule Explorer.Chain.SmartContract.VerificationStatus do defp mb_find_uid_in_queue(:unknown_uid, uid) do SolidityPublisherWorker |> QuePersistence.all() - |> Enum.any?(fn + |> Enum.find_value(fn %Que.Job{arguments: {"flattened_api", _, _, ^uid}} -> :pending @@ -138,8 +138,8 @@ defmodule Explorer.Chain.SmartContract.VerificationStatus do :pending _ -> - :unknown_uid - end) + nil + end) || :unknown_uid end defp mb_find_uid_in_queue(other_status, _), do: other_status diff --git a/apps/explorer/lib/explorer/smart_contract/solidity/publisher.ex b/apps/explorer/lib/explorer/smart_contract/solidity/publisher.ex index 2cebbaa9b089..1dae69055e40 100644 --- a/apps/explorer/lib/explorer/smart_contract/solidity/publisher.ex +++ b/apps/explorer/lib/explorer/smart_contract/solidity/publisher.ex @@ -253,7 +253,7 @@ defmodule Explorer.SmartContract.Solidity.Publisher do verification_with_files? ) - Logger.error("Solidity smart-contract verification #{address_hash} failed because of the error #{error}") + Logger.error("Solidity smart-contract verification #{address_hash} failed because of the error #{inspect(error)}") %{changeset | action: :insert} end From d231ca62bd14a11a8966f9e064551c5d2f06d185 Mon Sep 17 00:00:00 2001 From: nikitosing <32202610+nikitosing@users.noreply.github.com> Date: Tue, 11 Jun 2024 11:54:07 +0300 Subject: [PATCH 079/150] fix: Replace empty arg names with argN (#9748) * fix: Replace empty arg names with argN * Fix dialyzer * Remove dbg * Fix dialyzer * Fix test * Fix test --------- Co-authored-by: Maxim Filonov <53992153+sl1depengwyn@users.noreply.github.com> --- apps/explorer/lib/explorer/chain/log.ex | 26 ++++++++- .../lib/explorer/chain/transaction.ex | 13 ++++- .../explorer/test/explorer/chain/log_test.exs | 56 +++++++++++++++++++ .../test/explorer/chain/transaction_test.exs | 34 +++++++++++ 4 files changed, 126 insertions(+), 3 deletions(-) diff --git a/apps/explorer/lib/explorer/chain/log.ex b/apps/explorer/lib/explorer/chain/log.ex index 183c93230e17..c8acde43f6ac 100644 --- a/apps/explorer/lib/explorer/chain/log.ex +++ b/apps/explorer/lib/explorer/chain/log.ex @@ -232,8 +232,8 @@ defmodule Explorer.Chain.Log do log.fourth_topic && log.fourth_topic.bytes, log.data.bytes ), - selector <- %{selector | method_id: first_four_bytes} do - {:ok, selector, mapping} + selector <- %FunctionSelector{selector | method_id: first_four_bytes} do + {:ok, alter_inputs_names(selector), alter_mapping_names(mapping)} end rescue e -> @@ -261,6 +261,28 @@ defmodule Explorer.Chain.Log do IO.iodata_to_binary([name, "(", text, ")"]) end + defp alter_inputs_names(%FunctionSelector{input_names: names} = selector) do + names = + names + |> Enum.with_index() + |> Enum.map(fn {name, index} -> + if name == "", do: "arg#{index}", else: name + end) + + %FunctionSelector{selector | input_names: names} + end + + defp alter_mapping_names(mapping) when is_list(mapping) do + mapping + |> Enum.with_index() + |> Enum.map(fn {{name, type, indexed?, value}, index} -> + name = if name == "", do: "arg#{index}", else: name + {name, type, indexed?, value} + end) + end + + defp alter_mapping_names(mapping), do: mapping + defp decode_event_via_sig_provider( log, transaction, diff --git a/apps/explorer/lib/explorer/chain/transaction.ex b/apps/explorer/lib/explorer/chain/transaction.ex index e06ba545b3cb..b943b9febd47 100644 --- a/apps/explorer/lib/explorer/chain/transaction.ex +++ b/apps/explorer/lib/explorer/chain/transaction.ex @@ -988,7 +988,7 @@ defmodule Explorer.Chain.Transaction do abi |> ABI.parse_specification() |> ABI.find_and_decode(data) do - {:ok, result} + {:ok, alter_inputs_names(result)} end rescue e -> @@ -1003,6 +1003,17 @@ defmodule Explorer.Chain.Transaction do {:error, :could_not_decode} end + defp alter_inputs_names({%FunctionSelector{input_names: names} = selector, mapping}) do + names = + names + |> Enum.with_index() + |> Enum.map(fn {name, index} -> + if name == "", do: "arg#{index}", else: name + end) + + {%FunctionSelector{selector | input_names: names}, mapping} + end + defp selector_mapping(selector, values, hash) do types = Enum.map(selector.types, &FunctionSelector.encode_type/1) diff --git a/apps/explorer/test/explorer/chain/log_test.exs b/apps/explorer/test/explorer/chain/log_test.exs index c63949833327..ce80ebade139 100644 --- a/apps/explorer/test/explorer/chain/log_test.exs +++ b/apps/explorer/test/explorer/chain/log_test.exs @@ -129,6 +129,62 @@ defmodule Explorer.Chain.LogTest do ]}, _, _} = Log.decode(log, transaction, [], false) end + test "replace arg names with argN if it's empty string" do + to_address = insert(:address, contract_code: "0x") + + insert(:smart_contract, + abi: [ + %{ + "anonymous" => false, + "inputs" => [ + %{"indexed" => true, "name" => "", "type" => "string"}, + %{"indexed" => false, "name" => "", "type" => "uint256"}, + %{"indexed" => true, "name" => "", "type" => "bool"} + ], + "name" => "WantsPets", + "type" => "event" + } + ], + address_hash: to_address.hash, + contract_code_md5: "123" + ) + + topic1_bytes = ExKeccak.hash_256("WantsPets(string,uint256,bool)") + topic1 = "0x" <> Base.encode16(topic1_bytes, case: :lower) + topic2_bytes = ExKeccak.hash_256("bob") + topic2 = "0x" <> Base.encode16(topic2_bytes, case: :lower) + topic3 = "0x0000000000000000000000000000000000000000000000000000000000000001" + data = "0x0000000000000000000000000000000000000000000000000000000000000000" + + transaction = + :transaction_to_verified_contract + |> insert(to_address: to_address) + |> Repo.preload(to_address: :smart_contract) + + log = + insert(:log, + address: to_address, + transaction: transaction, + first_topic: topic(topic1), + second_topic: topic(topic2), + third_topic: topic(topic3), + fourth_topic: nil, + data: data + ) + + TestHelper.get_eip1967_implementation_zero_addresses() + + assert {{:ok, "eb9b3c4c", "WantsPets(string indexed arg0, uint256 arg1, bool indexed arg2)", + [ + {"arg0", "string", true, + {:dynamic, + <<56, 228, 122, 123, 113, 157, 206, 99, 102, 42, 234, 244, 52, 64, 50, 111, 85, 27, 138, 126, 225, + 152, 206, 227, 92, 181, 213, 23, 242, 210, 150, 162>>}}, + {"arg1", "uint256", false, 0}, + {"arg2", "bool", true, true} + ]}, _, _} = Log.decode(log, transaction, [], false) + end + test "finds decoding candidates" do params = params_for(:smart_contract, %{ diff --git a/apps/explorer/test/explorer/chain/transaction_test.exs b/apps/explorer/test/explorer/chain/transaction_test.exs index b17d656ab3ac..85726e79451a 100644 --- a/apps/explorer/test/explorer/chain/transaction_test.exs +++ b/apps/explorer/test/explorer/chain/transaction_test.exs @@ -298,6 +298,40 @@ defmodule Explorer.Chain.TransactionTest do assert {{:ok, "60fe47b1", "set(uint256 x)", [{"x", "uint256", 10}]}, _, _} = Transaction.decoded_input_data(transaction, []) end + + test "arguments name in function call replaced with argN if it's empty string" do + contract = + insert(:smart_contract, + contract_code_md5: "123", + abi: [ + %{ + "constant" => false, + "inputs" => [%{"name" => "", "type" => "uint256"}], + "name" => "set", + "outputs" => [], + "payable" => false, + "stateMutability" => "nonpayable", + "type" => "function" + } + ] + ) + |> Repo.preload(:address) + + input_data = + "set(uint)" + |> ABI.encode([10]) + |> Base.encode16(case: :lower) + + transaction = + :transaction + |> insert(to_address: contract.address, input: "0x" <> input_data) + |> Repo.preload(to_address: :smart_contract) + + TestHelper.get_eip1967_implementation_zero_addresses() + + assert {{:ok, "60fe47b1", "set(uint256 arg0)", [{"arg0", "uint256", 10}]}, _, _} = + Transaction.decoded_input_data(transaction, []) + end end describe "Poison.encode!/1" do From 3cc62227dfe2635d46e06c053cdf27bf3026cd95 Mon Sep 17 00:00:00 2001 From: Qwerty5Uiop <105209995+Qwerty5Uiop@users.noreply.github.com> Date: Tue, 11 Jun 2024 14:35:54 +0400 Subject: [PATCH 080/150] fix: ERC-1155 tokens metadata retrieve (#10231) --- .../lib/explorer/token/metadata_retriever.ex | 25 +++++++++---------- 1 file changed, 12 insertions(+), 13 deletions(-) diff --git a/apps/explorer/lib/explorer/token/metadata_retriever.ex b/apps/explorer/lib/explorer/token/metadata_retriever.ex index b8ab4afaa510..87dcb1d058de 100644 --- a/apps/explorer/lib/explorer/token/metadata_retriever.ex +++ b/apps/explorer/lib/explorer/token/metadata_retriever.ex @@ -209,20 +209,19 @@ defmodule Explorer.Token.MetadataRetriever do if Enum.empty?(erc_1155_tokens) do fetched_result else - result = - fetched_result - |> Enum.filter(fn token -> - !Map.has_key?(token, :name) && - Enum.any?(erc_1155_tokens, fn erc_1155_token -> - erc_1155_token.contract_address_hash == token.contract_address_hash - end) - end) - - result - |> Enum.map(fn token -> - try_to_fetch_erc_1155_name(%{}, token.contract_address_hash, "ERC-1155") + fetched_result + |> Enum.reduce([], fn token, acc -> + # # credo:disable-for-lines:2 + updated_token = + if Enum.any?(erc_1155_tokens, &(&1.contract_address_hash == token.contract_address_hash)) do + try_to_fetch_erc_1155_name(token, token.contract_address_hash, "ERC-1155") + else + token + end + + [updated_token | acc] end) - |> Enum.filter(fn result -> result != %{} end) + |> Enum.reverse() end {:ok, processed_result} From 854a81bddabe8ed4ea10d63efa9f137c89c168a2 Mon Sep 17 00:00:00 2001 From: Maxim Filonov <53992153+sl1depengwyn@users.noreply.github.com> Date: Tue, 11 Jun 2024 13:37:29 +0300 Subject: [PATCH 081/150] fix: cannot truncate chardata (#10227) --- apps/explorer/lib/explorer/chain.ex | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apps/explorer/lib/explorer/chain.ex b/apps/explorer/lib/explorer/chain.ex index 80bb57d31c2a..2c0688bc5972 100644 --- a/apps/explorer/lib/explorer/chain.ex +++ b/apps/explorer/lib/explorer/chain.ex @@ -3009,7 +3009,7 @@ defmodule Explorer.Chain do {:error, reason} -> Logger.error(fn -> - ["Error while fetching first trace for tx: #{hash_string} error reason: ", reason] + ["Error while fetching first trace for tx: #{hash_string} error reason: ", to_string(reason)] end) fetch_tx_revert_reason_using_call(transaction) From a8e7ada9af06bfd98ebbdb4626b28b7bab75dfa4 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 11 Jun 2024 13:53:06 +0300 Subject: [PATCH 082/150] chore(deps-dev): bump benchee from 1.3.0 to 1.3.1 (#10222) Bumps [benchee](https://github.com/bencheeorg/benchee) from 1.3.0 to 1.3.1. - [Release notes](https://github.com/bencheeorg/benchee/releases) - [Changelog](https://github.com/bencheeorg/benchee/blob/main/CHANGELOG.md) - [Commits](https://github.com/bencheeorg/benchee/compare/1.3.0...1.3.1) --- updated-dependencies: - dependency-name: benchee dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- mix.lock | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mix.lock b/mix.lock index e9a3c4262261..fc525c443eab 100644 --- a/mix.lock +++ b/mix.lock @@ -6,7 +6,7 @@ "accept": {:hex, :accept, "0.3.5", "b33b127abca7cc948bbe6caa4c263369abf1347cfa9d8e699c6d214660f10cd1", [:rebar3], [], "hexpm", "11b18c220bcc2eab63b5470c038ef10eb6783bcb1fcdb11aa4137defa5ac1bb8"}, "bamboo": {:hex, :bamboo, "2.3.0", "d2392a2cabe91edf488553d3c70638b532e8db7b76b84b0a39e3dfe492ffd6fc", [:mix], [{:hackney, ">= 1.15.2", [hex: :hackney, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:mime, "~> 1.4 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:plug, "~> 1.0", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "dd0037e68e108fd04d0e8773921512c940e35d981e097b5793543e3b2f9cd3f6"}, "bcrypt_elixir": {:hex, :bcrypt_elixir, "3.1.0", "0b110a9a6c619b19a7f73fa3004aa11d6e719a67e672d1633dc36b6b2290a0f7", [:make, :mix], [{:comeonin, "~> 5.3", [hex: :comeonin, repo: "hexpm", optional: false]}, {:elixir_make, "~> 0.6", [hex: :elixir_make, repo: "hexpm", optional: false]}], "hexpm", "2ad2acb5a8bc049e8d5aa267802631912bb80d5f4110a178ae7999e69dca1bf7"}, - "benchee": {:hex, :benchee, "1.3.0", "f64e3b64ad3563fa9838146ddefb2d2f94cf5b473bdfd63f5ca4d0657bf96694", [:mix], [{:deep_merge, "~> 1.0", [hex: :deep_merge, repo: "hexpm", optional: false]}, {:statistex, "~> 1.0", [hex: :statistex, repo: "hexpm", optional: false]}, {:table, "~> 0.1.0", [hex: :table, repo: "hexpm", optional: true]}], "hexpm", "34f4294068c11b2bd2ebf2c59aac9c7da26ffa0068afdf3419f1b176e16c5f81"}, + "benchee": {:hex, :benchee, "1.3.1", "c786e6a76321121a44229dde3988fc772bca73ea75170a73fd5f4ddf1af95ccf", [:mix], [{:deep_merge, "~> 1.0", [hex: :deep_merge, repo: "hexpm", optional: false]}, {:statistex, "~> 1.0", [hex: :statistex, repo: "hexpm", optional: false]}, {:table, "~> 0.1.0", [hex: :table, repo: "hexpm", optional: true]}], "hexpm", "76224c58ea1d0391c8309a8ecbfe27d71062878f59bd41a390266bf4ac1cc56d"}, "benchee_csv": {:hex, :benchee_csv, "1.0.0", "0b3b9223290bfcb8003552705bec9bcf1a89b4a83b70bd686e45295c264f3d16", [:mix], [{:benchee, ">= 0.99.0 and < 2.0.0", [hex: :benchee, repo: "hexpm", optional: false]}, {:csv, "~> 2.0", [hex: :csv, repo: "hexpm", optional: false]}], "hexpm", "cdefb804c021dcf7a99199492026584be9b5a21d6644ac0d01c81c5d97c520d5"}, "briefly": {:git, "https://github.com/CargoSense/briefly.git", "4836ba322ffb504a102a15cc6e35d928ef97120e", []}, "bunt": {:hex, :bunt, "1.0.0", "081c2c665f086849e6d57900292b3a161727ab40431219529f13c4ddcf3e7a44", [:mix], [], "hexpm", "dc5f86aa08a5f6fa6b8096f0735c4e76d54ae5c9fa2c143e5a1fc7c1cd9bb6b5"}, From 68ca0ed8ff562518f9aa683db8e1942425fb5141 Mon Sep 17 00:00:00 2001 From: Maxim Filonov <53992153+sl1depengwyn@users.noreply.github.com> Date: Tue, 11 Jun 2024 16:37:15 +0300 Subject: [PATCH 083/150] fix: add proxy_implementations preloads (#10225) * fix: add proxy_implementations preloads * Add preloads to channels * Add preload to cache --- .../channels/address_channel.ex | 27 ++++++++++- .../block_scout_web/channels/block_channel.ex | 7 ++- .../controllers/api/v2/address_controller.ex | 45 +++++++++---------- .../controllers/api/v2/block_controller.ex | 27 +++++------ .../api/v2/main_page_controller.ex | 11 ++--- .../proxy/account_abstraction_controller.ex | 3 +- .../api/v2/transaction_controller.ex | 43 +++++++++--------- .../api/v2/validator_controller.ex | 2 +- .../api/v2/withdrawal_controller.ex | 8 +++- .../lib/block_scout_web/endpoint.ex | 2 +- .../lib/block_scout_web/paging_helper.ex | 6 +-- apps/explorer/lib/explorer/chain.ex | 8 ++-- 12 files changed, 108 insertions(+), 81 deletions(-) diff --git a/apps/block_scout_web/lib/block_scout_web/channels/address_channel.ex b/apps/block_scout_web/lib/block_scout_web/channels/address_channel.ex index 8f95108bb2be..a89c20de49be 100644 --- a/apps/block_scout_web/lib/block_scout_web/channels/address_channel.ex +++ b/apps/block_scout_web/lib/block_scout_web/channels/address_channel.ex @@ -146,6 +146,7 @@ defmodule BlockScoutWeb.AddressChannel do {:noreply, socket} end + # TODO: fix or remove, "internal_transaction.json" clause does not exist def handle_out( "internal_transaction", %{address: _address, internal_transaction: internal_transaction}, @@ -330,7 +331,19 @@ defmodule BlockScoutWeb.AddressChannel do event ) when is_list(transactions) do - transaction_json = TransactionViewAPI.render("transactions.json", %{transactions: transactions, conn: nil}) + transaction_json = + TransactionViewAPI.render("transactions.json", %{ + transactions: + transactions + |> Repo.preload([ + [ + from_address: [:names], + to_address: [:names, :smart_contract, :proxy_implementations], + created_contract_address: [:names, :smart_contract, :proxy_implementations] + ] + ]), + conn: nil + }) push(socket, event, %{transactions: transaction_json}) @@ -375,7 +388,17 @@ defmodule BlockScoutWeb.AddressChannel do ) when is_list(token_transfers) do token_transfer_json = - TransactionViewAPI.render("token_transfers.json", %{token_transfers: token_transfers, conn: nil}) + TransactionViewAPI.render("token_transfers.json", %{ + token_transfers: + token_transfers + |> Repo.preload([ + [ + from_address: [:names, :smart_contract, :proxy_implementations], + to_address: [:names, :smart_contract, :proxy_implementations] + ] + ]), + conn: nil + }) push(socket, event, %{token_transfers: token_transfer_json}) diff --git a/apps/block_scout_web/lib/block_scout_web/channels/block_channel.ex b/apps/block_scout_web/lib/block_scout_web/channels/block_channel.ex index 560a1d4d96c2..2340ab38121a 100644 --- a/apps/block_scout_web/lib/block_scout_web/channels/block_channel.ex +++ b/apps/block_scout_web/lib/block_scout_web/channels/block_channel.ex @@ -6,6 +6,7 @@ defmodule BlockScoutWeb.BlockChannel do alias BlockScoutWeb.API.V2.BlockView, as: BlockViewAPI alias BlockScoutWeb.{BlockView, ChainView} + alias Explorer.Repo alias Phoenix.View alias Timex.Duration @@ -24,7 +25,11 @@ defmodule BlockScoutWeb.BlockChannel do %{block: block, average_block_time: average_block_time}, %Phoenix.Socket{handler: BlockScoutWeb.UserSocketV2} = socket ) do - rendered_block = BlockViewAPI.render("block.json", %{block: block, socket: nil}) + rendered_block = + BlockViewAPI.render("block.json", %{ + block: block |> Repo.preload(miner: [:names, :smart_contract, :proxy_implementations]), + socket: nil + }) push(socket, "new_block", %{ average_block_time: to_string(Duration.to_milliseconds(average_block_time)), diff --git a/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/address_controller.ex b/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/address_controller.ex index 8170bbb9994e..0a2d71441909 100644 --- a/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/address_controller.ex +++ b/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/address_controller.ex @@ -36,23 +36,18 @@ defmodule BlockScoutWeb.API.V2.AddressController do @transaction_necessity_by_association [ necessity_by_association: %{ - [created_contract_address: :names] => :optional, - [from_address: :names] => :optional, - [to_address: [:names, :proxy_implementations]] => :optional, - :block => :optional, - [created_contract_address: :smart_contract] => :optional, - [from_address: :smart_contract] => :optional, - [to_address: :smart_contract] => :optional + [created_contract_address: [:names, :smart_contract, :proxy_implementations]] => :optional, + [from_address: [:names, :smart_contract]] => :optional, + [to_address: [:names, :smart_contract, :proxy_implementations]] => :optional, + :block => :optional }, api?: true ] @token_transfer_necessity_by_association [ necessity_by_association: %{ - [to_address: :smart_contract] => :optional, - [from_address: :smart_contract] => :optional, - [to_address: :names] => :optional, - [from_address: :names] => :optional, + [to_address: [:names, :smart_contract, :proxy_implementations]] => :optional, + [from_address: [:names, :smart_contract, :proxy_implementations]] => :optional, :block => :optional, :transaction => :optional, :token => :optional @@ -63,7 +58,8 @@ defmodule BlockScoutWeb.API.V2.AddressController do @address_options [ necessity_by_association: %{ :names => :optional, - :token => :optional + :token => :optional, + :proxy_implementations => :optional }, api?: true ] @@ -172,10 +168,8 @@ defmodule BlockScoutWeb.API.V2.AddressController do options = [ necessity_by_association: %{ - [to_address: :smart_contract] => :optional, - [from_address: :smart_contract] => :optional, - [to_address: :names] => :optional, - [from_address: :names] => :optional, + [to_address: [:names, :smart_contract, :proxy_implementations]] => :optional, + [from_address: [:names, :smart_contract, :proxy_implementations]] => :optional, :block => :optional, :token => :optional, :transaction => :optional @@ -246,12 +240,9 @@ defmodule BlockScoutWeb.API.V2.AddressController do full_options = [ necessity_by_association: %{ - [created_contract_address: :names] => :optional, - [from_address: :names] => :optional, - [to_address: :names] => :optional, - [created_contract_address: :smart_contract] => :optional, - [from_address: :smart_contract] => :optional, - [to_address: :smart_contract] => :optional + [created_contract_address: [:names, :smart_contract, :proxy_implementations]] => :optional, + [from_address: [:names, :smart_contract, :proxy_implementations]] => :optional, + [to_address: [:names, :smart_contract, :proxy_implementations]] => :optional } ] |> Keyword.merge(paging_options(params)) @@ -280,7 +271,14 @@ defmodule BlockScoutWeb.API.V2.AddressController do formatted_topic = if String.starts_with?(prepared_topic, "0x"), do: prepared_topic, else: "0x" <> prepared_topic - options = params |> paging_options() |> Keyword.merge(topic: formatted_topic) |> Keyword.merge(@api_true) + options = + params + |> paging_options() + |> Keyword.merge(topic: formatted_topic) + |> Keyword.merge( + necessity_by_association: %{[address: [:names, :smart_contract, :proxy_implementations]] => :optional} + ) + |> Keyword.merge(@api_true) results_plus_one = Chain.address_to_logs(address_hash, false, options) @@ -323,6 +321,7 @@ defmodule BlockScoutWeb.API.V2.AddressController do full_options = [ necessity_by_association: %{ + [miner: :proxy_implementations] => :optional, miner: :required, nephews: :optional, transactions: :optional, diff --git a/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/block_controller.ex b/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/block_controller.ex index c33c6fcd34db..0395d6080344 100644 --- a/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/block_controller.ex +++ b/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/block_controller.ex @@ -56,25 +56,19 @@ defmodule BlockScoutWeb.API.V2.BlockController do @transaction_necessity_by_association [ necessity_by_association: %{ - [created_contract_address: :names] => :optional, - [from_address: :names] => :optional, - [to_address: :names] => :optional, - :block => :optional, - [created_contract_address: :smart_contract] => :optional, - [from_address: :smart_contract] => :optional, - [to_address: :smart_contract] => :optional + [created_contract_address: [:names, :smart_contract, :proxy_implementations]] => :optional, + [from_address: [:names, :smart_contract, :proxy_implementations]] => :optional, + [to_address: [:names, :smart_contract, :proxy_implementations]] => :optional, + :block => :optional } |> Map.merge(@chain_type_transaction_necessity_by_association) ] @internal_transaction_necessity_by_association [ necessity_by_association: %{ - [created_contract_address: :names] => :optional, - [from_address: :names] => :optional, - [to_address: :names] => :optional, - [created_contract_address: :smart_contract] => :optional, - [from_address: :smart_contract] => :optional, - [to_address: :smart_contract] => :optional + [created_contract_address: [:names, :smart_contract, :proxy_implementations]] => :optional, + [from_address: [:names, :smart_contract, :proxy_implementations]] => :optional, + [to_address: [:names, :smart_contract, :proxy_implementations]] => :optional } ] @@ -83,7 +77,7 @@ defmodule BlockScoutWeb.API.V2.BlockController do @block_params [ necessity_by_association: %{ - [miner: :names] => :optional, + [miner: [:names, :smart_contract, :proxy_implementations]] => :optional, :uncles => :optional, :nephews => :optional, :rewards => :optional, @@ -257,7 +251,10 @@ defmodule BlockScoutWeb.API.V2.BlockController do def withdrawals(conn, %{"block_hash_or_number" => block_hash_or_number} = params) do with {:ok, block} <- block_param_to_block(block_hash_or_number) do full_options = - [necessity_by_association: %{address: :optional}, api?: true] + [ + necessity_by_association: %{[address: [:names, :smart_contract, :proxy_implementations]] => :optional}, + api?: true + ] |> Keyword.merge(paging_options(params)) withdrawals_plus_one = Chain.block_to_withdrawals(block.hash, full_options) diff --git a/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/main_page_controller.ex b/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/main_page_controller.ex index 8a39c3e36804..f37a5b5410ab 100644 --- a/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/main_page_controller.ex +++ b/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/main_page_controller.ex @@ -13,12 +13,9 @@ defmodule BlockScoutWeb.API.V2.MainPageController do @transactions_options [ necessity_by_association: %{ :block => :required, - [created_contract_address: :names] => :optional, - [from_address: :names] => :optional, - [to_address: :names] => :optional, - [created_contract_address: :smart_contract] => :optional, - [from_address: :smart_contract] => :optional, - [to_address: :smart_contract] => :optional + [created_contract_address: [:names, :smart_contract, :proxy_implementations]] => :optional, + [from_address: [:names, :smart_contract]] => :optional, + [to_address: [:names, :smart_contract, :proxy_implementations]] => :optional }, paging_options: %PagingOptions{page_size: 6}, api?: true @@ -30,7 +27,7 @@ defmodule BlockScoutWeb.API.V2.MainPageController do blocks = [paging_options: %PagingOptions{page_size: 4}, api?: true] |> Chain.list_blocks() - |> Repo.replica().preload([[miner: :names], :transactions, :rewards]) + |> Repo.replica().preload([[miner: [:names, :smart_contract, :proxy_implementations]], :transactions, :rewards]) conn |> put_status(200) diff --git a/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/proxy/account_abstraction_controller.ex b/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/proxy/account_abstraction_controller.ex index 5565f003b5e8..1b0bacf4a578 100644 --- a/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/proxy/account_abstraction_controller.ex +++ b/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/proxy/account_abstraction_controller.ex @@ -157,7 +157,8 @@ defmodule BlockScoutWeb.API.V2.Proxy.AccountAbstractionController do |> Chain.hashes_to_addresses( necessity_by_association: %{ :names => :optional, - :smart_contract => :optional + :smart_contract => :optional, + :proxy_implementations => :optional }, api?: true ) diff --git a/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/transaction_controller.ex b/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/transaction_controller.ex index 7421b3e2235e..a8ebf1197e70 100644 --- a/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/transaction_controller.ex +++ b/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/transaction_controller.ex @@ -55,38 +55,35 @@ defmodule BlockScoutWeb.API.V2.TransactionController do # TODO might be redundant to preload blob fields in some of the endpoints @transaction_necessity_by_association %{ :block => :optional, - [created_contract_address: :names] => :optional, - [created_contract_address: :token] => :optional, - [created_contract_address: :smart_contract] => :optional, + [ + created_contract_address: [ + :names, + :token, + :smart_contract, + :proxy_implementations + ] + ] => :optional, [from_address: :names] => :optional, - [to_address: :names] => :optional, - [to_address: :smart_contract] => :optional + [to_address: [:names, :smart_contract, :proxy_implementations]] => :optional } |> Map.merge(@chain_type_transaction_necessity_by_association) @token_transfers_necessity_by_association %{ - [from_address: :smart_contract] => :optional, - [to_address: :smart_contract] => :optional, - [from_address: :names] => :optional, - [to_address: :names] => :optional + [from_address: [:names, :smart_contract, :proxy_implementations]] => :optional, + [to_address: [:names, :smart_contract, :proxy_implementations]] => :optional } @token_transfers_in_tx_necessity_by_association %{ - [from_address: :smart_contract] => :optional, - [to_address: :smart_contract] => :optional, - [from_address: :names] => :optional, - [to_address: :names] => :optional, + [from_address: [:names, :smart_contract, :proxy_implementations]] => :optional, + [to_address: [:names, :smart_contract, :proxy_implementations]] => :optional, token: :required } @internal_transaction_necessity_by_association [ necessity_by_association: %{ - [created_contract_address: :names] => :optional, - [from_address: :names] => :optional, - [to_address: :names] => :optional, - [created_contract_address: :smart_contract] => :optional, - [from_address: :smart_contract] => :optional, - [to_address: :smart_contract] => :optional + [created_contract_address: [:names, :smart_contract, :proxy_implementations]] => :optional, + [from_address: [:names, :smart_contract, :proxy_implementations]] => :optional, + [to_address: [:names, :smart_contract, :proxy_implementations]] => :optional } ] @@ -387,9 +384,7 @@ defmodule BlockScoutWeb.API.V2.TransactionController do full_options = [ necessity_by_association: %{ - [address: :names] => :optional, - [address: :smart_contract] => :optional, - address: :optional + [address: [:names, :smart_contract, :proxy_implementations]] => :optional } ] |> Keyword.merge(paging_options(params)) @@ -421,7 +416,9 @@ defmodule BlockScoutWeb.API.V2.TransactionController do with {:ok, transaction, _transaction_hash} <- validate_transaction(transaction_hash_string, params, necessity_by_association: - Map.merge(@transaction_necessity_by_association, %{[block: [miner: :names]] => :optional}), + Map.merge(@transaction_necessity_by_association, %{ + [block: [miner: [:names, :smart_contract, :proxy_implementations]]] => :optional + }), api?: true ) do state_changes_plus_next_page = diff --git a/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/validator_controller.ex b/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/validator_controller.ex index 4c7883502814..78736763dbc2 100644 --- a/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/validator_controller.ex +++ b/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/validator_controller.ex @@ -28,7 +28,7 @@ defmodule BlockScoutWeb.API.V2.ValidatorController do options = [ necessity_by_association: %{ - :address => :optional + [address: [:names, :smart_contract, :proxy_implementations]] => :optional } ] |> Keyword.merge(@api_true) diff --git a/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/withdrawal_controller.ex b/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/withdrawal_controller.ex index f9f9e17e9be9..a58a84ed1599 100644 --- a/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/withdrawal_controller.ex +++ b/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/withdrawal_controller.ex @@ -12,7 +12,13 @@ defmodule BlockScoutWeb.API.V2.WithdrawalController do def withdrawals_list(conn, params) do full_options = - [necessity_by_association: %{address: :optional, block: :optional}, api?: true] + [ + necessity_by_association: %{ + [address: [:names, :smart_contract, :proxy_implementations]] => :optional, + block: :optional + }, + api?: true + ] |> Keyword.merge(paging_options(params)) withdrawals_plus_one = Chain.list_withdrawals(full_options) diff --git a/apps/block_scout_web/lib/block_scout_web/endpoint.ex b/apps/block_scout_web/lib/block_scout_web/endpoint.ex index 7d742482847d..b5bf97ee5e57 100644 --- a/apps/block_scout_web/lib/block_scout_web/endpoint.ex +++ b/apps/block_scout_web/lib/block_scout_web/endpoint.ex @@ -69,7 +69,7 @@ defmodule BlockScoutWeb.Endpoint do plug(BlockScoutWeb.Prometheus.Exporter) # 'x-apollo-tracing' header for https://www.graphqlbin.com to work with our GraphQL endpoint - plug(CORSPlug, headers: ["x-apollo-tracing" | CORSPlug.defaults()[:headers]]) + plug(CORSPlug, origin: :self, headers: ["x-apollo-tracing" | CORSPlug.defaults()[:headers]]) plug(BlockScoutWeb.Router) end diff --git a/apps/block_scout_web/lib/block_scout_web/paging_helper.ex b/apps/block_scout_web/lib/block_scout_web/paging_helper.ex index bea0045e3a8e..bcf159686983 100644 --- a/apps/block_scout_web/lib/block_scout_web/paging_helper.ex +++ b/apps/block_scout_web/lib/block_scout_web/paging_helper.ex @@ -158,7 +158,7 @@ defmodule BlockScoutWeb.PagingHelper do [ necessity_by_association: %{ :transactions => :optional, - [miner: :names] => :optional, + [miner: [:names, :smart_contract, :proxy_implementations]] => :optional, :nephews => :required, :rewards => :optional }, @@ -169,7 +169,7 @@ defmodule BlockScoutWeb.PagingHelper do [ necessity_by_association: %{ :transactions => :optional, - [miner: :names] => :optional, + [miner: [:names, :smart_contract, :proxy_implementations]] => :optional, :rewards => :optional }, block_type: "Reorg" @@ -184,7 +184,7 @@ defmodule BlockScoutWeb.PagingHelper do do: [ necessity_by_association: %{ :transactions => :optional, - [miner: :names] => :optional, + [miner: [:names, :smart_contract, :proxy_implementations]] => :optional, :rewards => :optional }, block_type: "Block" diff --git a/apps/explorer/lib/explorer/chain.ex b/apps/explorer/lib/explorer/chain.ex index 2c0688bc5972..6f7ed344b585 100644 --- a/apps/explorer/lib/explorer/chain.ex +++ b/apps/explorer/lib/explorer/chain.ex @@ -363,9 +363,10 @@ defmodule Explorer.Chain do |> select_repo(options).all() end - @spec address_to_logs(Hash.Address.t(), Keyword.t()) :: [Log.t()] + @spec address_to_logs(Hash.Address.t(), [paging_options | necessity_by_association_option | api?]) :: [Log.t()] def address_to_logs(address_hash, csv_export?, options \\ []) when is_list(options) do paging_options = Keyword.get(options, :paging_options) || %PagingOptions{page_size: 50} + necessity_by_association = Keyword.get(options, :necessity_by_association, %{}) case paging_options do %PagingOptions{key: {0, 0}} -> @@ -402,13 +403,14 @@ defmodule Explorer.Chain do base else base - |> preload(transaction: [:to_address, :from_address]) + |> preload(transaction: [:from_address, to_address: [:proxy_implementations]]) end preloaded_query |> page_logs(paging_options) |> filter_topic(Keyword.get(options, :topic)) |> where_block_number_in_period(from_block, to_block) + |> join_associations(necessity_by_association) |> select_repo(options).all() |> Enum.take(paging_options.page_size) end @@ -1674,7 +1676,7 @@ defmodule Explorer.Chain do elements blocks -> - blocks + blocks |> Repo.preload(Map.keys(necessity_by_association)) end end From 569cb8bbb6ab97154afc5f248d2aefbce6e3d0f1 Mon Sep 17 00:00:00 2001 From: Maxim Filonov <53992153+sl1depengwyn@users.noreply.github.com> Date: Tue, 11 Jun 2024 19:07:12 +0300 Subject: [PATCH 084/150] fix: add smart contracts preloads to from_address (#10236) --- .../block_scout_web/channels/address_channel.ex | 2 +- .../controllers/api/v2/address_controller.ex | 2 +- .../controllers/api/v2/main_page_controller.ex | 2 +- .../controllers/api/v2/transaction_controller.ex | 3 ++- .../lib/block_scout_web/endpoint.ex | 2 +- .../lib/block_scout_web/notifier.ex | 15 +++++---------- apps/explorer/lib/explorer/chain.ex | 2 +- 7 files changed, 12 insertions(+), 16 deletions(-) diff --git a/apps/block_scout_web/lib/block_scout_web/channels/address_channel.ex b/apps/block_scout_web/lib/block_scout_web/channels/address_channel.ex index a89c20de49be..4ea5943e0c4f 100644 --- a/apps/block_scout_web/lib/block_scout_web/channels/address_channel.ex +++ b/apps/block_scout_web/lib/block_scout_web/channels/address_channel.ex @@ -337,7 +337,7 @@ defmodule BlockScoutWeb.AddressChannel do transactions |> Repo.preload([ [ - from_address: [:names], + from_address: [:names, :smart_contract, :proxy_implementations], to_address: [:names, :smart_contract, :proxy_implementations], created_contract_address: [:names, :smart_contract, :proxy_implementations] ] diff --git a/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/address_controller.ex b/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/address_controller.ex index 0a2d71441909..dd6a721d6bf2 100644 --- a/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/address_controller.ex +++ b/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/address_controller.ex @@ -37,7 +37,7 @@ defmodule BlockScoutWeb.API.V2.AddressController do @transaction_necessity_by_association [ necessity_by_association: %{ [created_contract_address: [:names, :smart_contract, :proxy_implementations]] => :optional, - [from_address: [:names, :smart_contract]] => :optional, + [from_address: [:names, :smart_contract, :proxy_implementations]] => :optional, [to_address: [:names, :smart_contract, :proxy_implementations]] => :optional, :block => :optional }, diff --git a/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/main_page_controller.ex b/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/main_page_controller.ex index f37a5b5410ab..ecfaebd16f5c 100644 --- a/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/main_page_controller.ex +++ b/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/main_page_controller.ex @@ -14,7 +14,7 @@ defmodule BlockScoutWeb.API.V2.MainPageController do necessity_by_association: %{ :block => :required, [created_contract_address: [:names, :smart_contract, :proxy_implementations]] => :optional, - [from_address: [:names, :smart_contract]] => :optional, + [from_address: [:names, :smart_contract, :proxy_implementations]] => :optional, [to_address: [:names, :smart_contract, :proxy_implementations]] => :optional }, paging_options: %PagingOptions{page_size: 6}, diff --git a/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/transaction_controller.ex b/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/transaction_controller.ex index a8ebf1197e70..8a57e0b7169d 100644 --- a/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/transaction_controller.ex +++ b/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/transaction_controller.ex @@ -63,7 +63,8 @@ defmodule BlockScoutWeb.API.V2.TransactionController do :proxy_implementations ] ] => :optional, - [from_address: :names] => :optional, + [from_address: [:names, :smart_contract, :proxy_implementations]] => + :optional, [to_address: [:names, :smart_contract, :proxy_implementations]] => :optional } |> Map.merge(@chain_type_transaction_necessity_by_association) diff --git a/apps/block_scout_web/lib/block_scout_web/endpoint.ex b/apps/block_scout_web/lib/block_scout_web/endpoint.ex index b5bf97ee5e57..7d742482847d 100644 --- a/apps/block_scout_web/lib/block_scout_web/endpoint.ex +++ b/apps/block_scout_web/lib/block_scout_web/endpoint.ex @@ -69,7 +69,7 @@ defmodule BlockScoutWeb.Endpoint do plug(BlockScoutWeb.Prometheus.Exporter) # 'x-apollo-tracing' header for https://www.graphqlbin.com to work with our GraphQL endpoint - plug(CORSPlug, origin: :self, headers: ["x-apollo-tracing" | CORSPlug.defaults()[:headers]]) + plug(CORSPlug, headers: ["x-apollo-tracing" | CORSPlug.defaults()[:headers]]) plug(BlockScoutWeb.Router) end diff --git a/apps/block_scout_web/lib/block_scout_web/notifier.ex b/apps/block_scout_web/lib/block_scout_web/notifier.ex index 724c1e7e5bc1..9d5ecfda3a77 100644 --- a/apps/block_scout_web/lib/block_scout_web/notifier.ex +++ b/apps/block_scout_web/lib/block_scout_web/notifier.ex @@ -175,10 +175,8 @@ defmodule BlockScoutWeb.Notifier do DenormalizationHelper.extend_transaction_preload([ :token, :transaction, - from_address: :smart_contract, - to_address: :smart_contract, - from_address: :names, - to_address: :names + from_address: [:names, :smart_contract, :proxy_implementations], + to_address: [:names, :smart_contract, :proxy_implementations] ]) )) ) @@ -202,12 +200,9 @@ defmodule BlockScoutWeb.Notifier do def handle_event({:chain_event, :transactions, :realtime, transactions}) do base_preloads = [ :block, - created_contract_address: :names, - from_address: :names, - to_address: :names, - created_contract_address: :smart_contract, - from_address: :smart_contract, - to_address: :smart_contract + created_contract_address: [:names, :smart_contract, :proxy_implementations], + from_address: [:names, :smart_contract, :proxy_implementations], + to_address: [:names, :smart_contract, :proxy_implementations] ] preloads = if API_V2.enabled?(), do: [:token_transfers | base_preloads], else: base_preloads diff --git a/apps/explorer/lib/explorer/chain.ex b/apps/explorer/lib/explorer/chain.ex index 6f7ed344b585..e53187821af1 100644 --- a/apps/explorer/lib/explorer/chain.ex +++ b/apps/explorer/lib/explorer/chain.ex @@ -403,7 +403,7 @@ defmodule Explorer.Chain do base else base - |> preload(transaction: [:from_address, to_address: [:proxy_implementations]]) + |> preload(transaction: [from_address: [:proxy_implementations], to_address: [:proxy_implementations]]) end preloaded_query From e02dde7ee954c6d6f9e75ce8a6fbe7573b706826 Mon Sep 17 00:00:00 2001 From: Maxim Filonov <53992153+sl1depengwyn@users.noreply.github.com> Date: Wed, 12 Jun 2024 17:22:01 +0300 Subject: [PATCH 085/150] feat: Advanced Filters (#9769) * feat: Advanced Filters * Fix query performance * Fix timestamp filtering; Fix query construction * Add csv export * Frontend integration Add search_params to response Add limit to tokens endpoint Add fee in api response Add exclusion/inclusion of from/to addresses Remove raw_input from api response * Remove comment * Add methods search; Optimize internal txs query * Fix `method_id_to_name_from_params` * Fix filtering by amount; add filter by native * Fix review comments * Handle all token types * Optimize query * Process review comments * Process review comments --------- Co-authored-by: Viktor Baranov --- .../lib/block_scout_web/api_router.ex | 6 + .../api/v2/advanced_filter_controller.ex | 372 +++++++ .../controllers/api/v2/fallback_controller.ex | 7 + .../controllers/api/v2/token_controller.ex | 12 +- .../views/api/v2/advanced_filter_view.ex | 172 ++++ .../v2/advanced_filter_controller_test.exs | 945 ++++++++++++++++++ .../api/v2/validator_controller_test.exs | 21 +- .../lib/explorer/chain/advanced_filter.ex | 706 +++++++++++++ .../lib/explorer/chain/contract_method.ex | 49 +- .../address_transaction_csv_exporter.ex | 28 +- apps/explorer/lib/explorer/chain/token.ex | 8 + .../lib/explorer/chain/token_transfer.ex | 6 + .../lib/explorer/chain/transaction.ex | 16 +- apps/explorer/lib/explorer/helper.ex | 15 + .../lib/explorer/market/market_history.ex | 16 + cspell.json | 228 ++--- 16 files changed, 2421 insertions(+), 186 deletions(-) create mode 100644 apps/block_scout_web/lib/block_scout_web/controllers/api/v2/advanced_filter_controller.ex create mode 100644 apps/block_scout_web/lib/block_scout_web/views/api/v2/advanced_filter_view.ex create mode 100644 apps/block_scout_web/test/block_scout_web/controllers/api/v2/advanced_filter_controller_test.exs create mode 100644 apps/explorer/lib/explorer/chain/advanced_filter.ex diff --git a/apps/block_scout_web/lib/block_scout_web/api_router.ex b/apps/block_scout_web/lib/block_scout_web/api_router.ex index 24e323db8c5b..e58618a34c3c 100644 --- a/apps/block_scout_web/lib/block_scout_web/api_router.ex +++ b/apps/block_scout_web/lib/block_scout_web/api_router.ex @@ -349,6 +349,12 @@ defmodule BlockScoutWeb.ApiRouter do get("/batches/:batch_number", V2.ArbitrumController, :batch) end end + + scope "/advanced-filters" do + get("/", V2.AdvancedFilterController, :list) + get("/csv", V2.AdvancedFilterController, :list_csv) + get("/methods", V2.AdvancedFilterController, :list_methods) + end end scope "/v1/graphql" do diff --git a/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/advanced_filter_controller.ex b/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/advanced_filter_controller.ex new file mode 100644 index 000000000000..a3c2f332a26f --- /dev/null +++ b/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/advanced_filter_controller.ex @@ -0,0 +1,372 @@ +defmodule BlockScoutWeb.API.V2.AdvancedFilterController do + use BlockScoutWeb, :controller + + import BlockScoutWeb.Chain, only: [default_paging_options: 0, split_list_by_page: 1, next_page_params: 4] + + alias BlockScoutWeb.API.V2.{AdvancedFilterView, CSVExportController, TransactionView} + alias Explorer.{Chain, PagingOptions} + alias Explorer.Chain.{AdvancedFilter, ContractMethod, Data, Token, Transaction} + alias Explorer.Chain.CSVExport.Helper, as: CSVHelper + alias Plug.Conn + + action_fallback(BlockScoutWeb.API.V2.FallbackController) + + @api_true [api?: true] + + @methods [ + %{method_id: "0xa9059cbb", name: "transfer"}, + %{method_id: "0xa0712d68", name: "mint"}, + %{method_id: "0x095ea7b3", name: "approve"}, + %{method_id: "0x40993b26", name: "buy"}, + %{method_id: "0x3593564c", name: "execute"}, + %{method_id: "0x3ccfd60b", name: "withdraw"}, + %{method_id: "0xd0e30db0", name: "deposit"}, + %{method_id: "0x0a19b14a", name: "trade"}, + %{method_id: "0x4420e486", name: "register"}, + %{method_id: "0x5f575529", name: "swap"}, + %{method_id: "0xd9627aa4", name: "sellToUniswap"}, + %{method_id: "0xe9e05c42", name: "depositTransaction"}, + %{method_id: "0x23b872dd", name: "transferFrom"}, + %{method_id: "0xa22cb465", name: "setApprovalForAll"}, + %{method_id: "0x2e7ba6ef", name: "claim"}, + %{method_id: "0x0502b1c5", name: "unoswap"}, + %{method_id: "0xb2267a7b", name: "sendMessage"}, + %{method_id: "0x9871efa4", name: "unxswapByOrderId"}, + %{method_id: "0xbf6eac2f", name: "stake"}, + %{method_id: "0x3ce33bff", name: "bridge"}, + %{method_id: "0xeb672419", name: "requestL2Transaction"}, + %{method_id: "0xe449022e", name: "uniswapV3Swap"}, + %{method_id: "0x0162e2d0", name: "swapETHForExactTokens"} + ] + + @methods_id_to_name_map Map.new(@methods, fn %{method_id: method_id, name: name} -> {method_id, name} end) + @methods_name_to_id_map Map.new(@methods, fn %{method_id: method_id, name: name} -> {name, method_id} end) + + @methods_filter_limit 20 + @tokens_filter_limit 20 + + @doc """ + Function responsible for `api/v2/advanced-filters/` endpoint. + """ + @spec list(Plug.Conn.t(), map()) :: Plug.Conn.t() + def list(conn, params) do + full_options = params |> extract_filters() |> Keyword.merge(paging_options(params)) |> Keyword.merge(@api_true) + + advanced_filters_plus_one = AdvancedFilter.list(full_options) + + {advanced_filters, next_page} = split_list_by_page(advanced_filters_plus_one) + + {decoded_transactions, _abi_acc, methods_acc} = + advanced_filters + |> Enum.map(fn af -> %Transaction{to_address: af.to_address, input: af.input, hash: af.hash} end) + |> TransactionView.decode_transactions(true) + + next_page_params = + next_page |> next_page_params(advanced_filters, Map.take(params, ["items_count"]), &paging_params/1) + + render(conn, :advanced_filters, + advanced_filters: advanced_filters, + decoded_transactions: decoded_transactions, + search_params: %{ + method_ids: method_id_to_name_from_params(full_options[:methods] || [], methods_acc), + tokens: contract_address_hash_to_token_from_params(full_options[:token_contract_address_hashes]) + }, + next_page_params: next_page_params + ) + end + + @doc """ + Function responsible for `api/v2/advanced-filters/csv` endpoint. + """ + @spec list_csv(Plug.Conn.t(), map()) :: Plug.Conn.t() + def list_csv(conn, params) do + with {:recaptcha, true} <- + {:recaptcha, + Application.get_env(:block_scout_web, :recaptcha)[:is_disabled] || + CSVHelper.captcha_helper().recaptcha_passed?(params["recaptcha_response"])} do + full_options = + params + |> extract_filters() + |> Keyword.merge(paging_options(params)) + |> Keyword.update(:paging_options, %PagingOptions{page_size: CSVHelper.limit()}, fn paging_options -> + %PagingOptions{paging_options | page_size: CSVHelper.limit()} + end) + + full_options + |> AdvancedFilter.list() + |> AdvancedFilterView.to_csv_format() + |> CSVHelper.dump_to_stream() + |> Enum.reduce_while(CSVExportController.put_resp_params(conn), fn chunk, conn -> + case Conn.chunk(conn, chunk) do + {:ok, conn} -> + {:cont, conn} + + {:error, :closed} -> + {:halt, conn} + end + end) + end + end + + @doc """ + Function responsible for `api/v2/advanced-filters/methods` endpoint, + including `api/v2/advanced-filters/methods/?q=:search_string`. + """ + @spec list_methods(Plug.Conn.t(), map()) :: {:method, nil | Explorer.Chain.ContractMethod.t()} | Plug.Conn.t() + def list_methods(conn, %{"q" => query}) do + case {@methods_id_to_name_map[query], @methods_name_to_id_map[query]} do + {name, _} when is_binary(name) -> + render(conn, :methods, methods: [%{method_id: query, name: name}]) + + {_, id} when is_binary(id) -> + render(conn, :methods, methods: [%{method_id: id, name: query}]) + + _ -> + mb_contract_method = + case Data.cast(query) do + {:ok, %Data{bytes: <<_::bytes-size(4)>> = binary_method_id}} -> + ContractMethod.find_contract_method_by_selector_id(binary_method_id, @api_true) + + _ -> + ContractMethod.find_contract_method_by_name(query, @api_true) + end + + with {:method, %ContractMethod{abi: %{"name" => name}, identifier: identifier}} <- {:method, mb_contract_method} do + render(conn, :methods, methods: [%{method_id: "0x" <> Base.encode16(identifier, case: :lower), name: name}]) + end + end + end + + def list_methods(conn, _params) do + render(conn, :methods, methods: @methods) + end + + defp method_id_to_name_from_params(prepared_method_ids, methods_acc) do + {decoded_method_ids, method_ids_to_find} = + Enum.reduce(prepared_method_ids, {%{}, []}, fn method_id, {decoded, to_decode} -> + {:ok, method_id_hash} = Data.cast(method_id) + + case {Map.get(@methods_id_to_name_map, method_id), + methods_acc + |> Map.get(method_id_hash.bytes, []) + |> Enum.find( + &match?(%ContractMethod{abi: %{"type" => "function", "name" => name}} when is_binary(name), &1) + )} do + {name, _} when is_binary(name) -> + {Map.put(decoded, method_id, name), to_decode} + + {_, %ContractMethod{abi: %{"type" => "function", "name" => name}}} when is_binary(name) -> + {Map.put(decoded, method_id, name), to_decode} + + {nil, nil} -> + {decoded, [method_id_hash.bytes | to_decode]} + end + end) + + method_ids_to_find + |> ContractMethod.find_contract_methods(@api_true) + |> Enum.reduce(%{}, fn contract_method, acc -> + case contract_method do + %ContractMethod{abi: %{"name" => name}, identifier: identifier} when is_binary(name) -> + Map.put(acc, "0x" <> Base.encode16(identifier, case: :lower), name) + + _ -> + acc + end + end) + |> Map.merge(decoded_method_ids) + end + + defp contract_address_hash_to_token_from_params(tokens) do + token_contract_address_hashes_to_include = tokens[:include] || [] + + token_contract_address_hashes_to_exclude = tokens[:exclude] || [] + + token_contract_address_hashes_to_include + |> Kernel.++(token_contract_address_hashes_to_exclude) + |> Enum.reject(&(&1 == "native")) + |> Enum.uniq() + |> Enum.take(@tokens_filter_limit) + |> Token.get_by_contract_address_hashes(@api_true) + |> Map.new(fn token -> {token.contract_address_hash, token} end) + end + + defp extract_filters(params) do + [ + tx_types: prepare_tx_types(params["tx_types"]), + methods: params["methods"] |> prepare_methods(), + age: prepare_age(params["age_from"], params["age_to"]), + from_address_hashes: + prepare_include_exclude_address_hashes( + params["from_address_hashes_to_include"], + params["from_address_hashes_to_exclude"], + &prepare_address_hash/1 + ), + to_address_hashes: + prepare_include_exclude_address_hashes( + params["to_address_hashes_to_include"], + params["to_address_hashes_to_exclude"], + &prepare_address_hash/1 + ), + address_relation: prepare_address_relation(params["address_relation"]), + amount: prepare_amount(params["amount_from"], params["amount_to"]), + token_contract_address_hashes: + params["token_contract_address_hashes_to_include"] + |> prepare_include_exclude_address_hashes( + params["token_contract_address_hashes_to_exclude"], + &prepare_token_address_hash/1 + ) + |> Enum.map(fn + {key, value} when is_list(value) -> {key, Enum.take(value, @tokens_filter_limit)} + key_value -> key_value + end) + ] + end + + @allowed_tx_types ~w(COIN_TRANSFER ERC-20 ERC-404 ERC-721 ERC-1155) + + defp prepare_tx_types(tx_types) when is_binary(tx_types) do + tx_types + |> String.upcase() + |> String.split(",") + |> Enum.filter(&(&1 in @allowed_tx_types)) + end + + defp prepare_tx_types(_), do: nil + + defp prepare_methods(methods) when is_binary(methods) do + methods + |> String.downcase() + |> String.split(",") + |> Enum.filter(fn + "0x" <> method_id when byte_size(method_id) == 8 -> + case Base.decode16(method_id, case: :mixed) do + {:ok, _} -> true + _ -> false + end + + _ -> + false + end) + |> Enum.uniq() + |> Enum.take(@methods_filter_limit) + end + + defp prepare_methods(_), do: nil + + defp prepare_age(from, to), do: [from: parse_date(from), to: parse_date(to)] + + defp parse_date(string_date) do + case string_date && DateTime.from_iso8601(string_date) do + {:ok, date, _utc_offset} -> date + _ -> nil + end + end + + defp prepare_address_hashes(address_hashes, map_filter_function) + when is_binary(address_hashes) do + address_hashes + |> String.split(",") + |> Enum.flat_map(&map_filter_function.(&1)) + end + + defp prepare_address_hashes(_, _), do: nil + + defp prepare_address_hash(maybe_address_hash) do + case Chain.string_to_address_hash(maybe_address_hash) do + {:ok, address_hash} -> [address_hash] + _ -> [] + end + end + + defp prepare_token_address_hash(token_address_hash) do + case String.downcase(token_address_hash) do + "native" -> ["native"] + _ -> prepare_address_hash(token_address_hash) + end + end + + defp prepare_address_relation(relation) do + case relation && String.downcase(relation) do + r when r in [nil, "or"] -> :or + "and" -> :and + _ -> nil + end + end + + defp prepare_amount(from, to), do: [from: parse_decimal(from), to: parse_decimal(to)] + + defp parse_decimal(string_decimal) do + case string_decimal && Decimal.parse(string_decimal) do + {decimal, ""} -> decimal + _ -> nil + end + end + + defp prepare_include_exclude_address_hashes(include, exclude, map_filter_function) do + [ + include: prepare_address_hashes(include, map_filter_function), + exclude: prepare_address_hashes(exclude, map_filter_function) + ] + end + + # Paging + + defp paging_options(%{ + "block_number" => block_number_string, + "transaction_index" => tx_index_string, + "internal_transaction_index" => internal_tx_index_string, + "token_transfer_index" => token_transfer_index_string, + "token_transfer_batch_index" => token_transfer_batch_index_string + }) do + with {block_number, ""} <- block_number_string && Integer.parse(block_number_string), + {tx_index, ""} <- tx_index_string && Integer.parse(tx_index_string), + {:ok, internal_tx_index} <- parse_nullable_integer_paging_parameter(internal_tx_index_string), + {:ok, token_transfer_index} <- parse_nullable_integer_paging_parameter(token_transfer_index_string), + {:ok, token_transfer_batch_index} <- parse_nullable_integer_paging_parameter(token_transfer_batch_index_string) do + [ + paging_options: %{ + default_paging_options() + | key: %{ + block_number: block_number, + transaction_index: tx_index, + internal_transaction_index: internal_tx_index, + token_transfer_index: token_transfer_index, + token_transfer_batch_index: token_transfer_batch_index + } + } + ] + else + _ -> [paging_options: default_paging_options()] + end + end + + defp paging_options(_), do: [paging_options: default_paging_options()] + + defp parse_nullable_integer_paging_parameter(""), do: {:ok, nil} + + defp parse_nullable_integer_paging_parameter(string) when is_binary(string) do + case Integer.parse(string) do + {integer, ""} -> {:ok, integer} + _ -> {:error, :invalid_paging_parameter} + end + end + + defp parse_nullable_integer_paging_parameter(_), do: {:error, :invalid_paging_parameter} + + defp paging_params(%AdvancedFilter{ + block_number: block_number, + transaction_index: tx_index, + internal_transaction_index: internal_tx_index, + token_transfer_index: token_transfer_index, + token_transfer_batch_index: token_transfer_batch_index + }) do + %{ + block_number: block_number, + transaction_index: tx_index, + internal_transaction_index: internal_tx_index, + token_transfer_index: token_transfer_index, + token_transfer_batch_index: token_transfer_batch_index + } + end +end diff --git a/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/fallback_controller.ex b/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/fallback_controller.ex index 373704ccb03a..eb6a7447e10a 100644 --- a/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/fallback_controller.ex +++ b/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/fallback_controller.ex @@ -278,6 +278,13 @@ defmodule BlockScoutWeb.API.V2.FallbackController do |> render(:message, %{message: @unverified_smart_contract}) end + def call(conn, {:method, _}) do + conn + |> put_status(:not_found) + |> put_view(ApiView) + |> render(:message, %{message: @not_found}) + end + def call(conn, {:is_empty_response, true}) do conn |> put_status(500) diff --git a/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/token_controller.ex b/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/token_controller.ex index 53180c925798..fbe4bbf998cb 100644 --- a/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/token_controller.ex +++ b/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/token_controller.ex @@ -1,9 +1,10 @@ defmodule BlockScoutWeb.API.V2.TokenController do + alias Explorer.PagingOptions use BlockScoutWeb, :controller alias BlockScoutWeb.AccessHelper alias BlockScoutWeb.API.V2.{AddressView, TransactionView} - alias Explorer.{Chain, Repo} + alias Explorer.{Chain, Helper, Repo} alias Explorer.Chain.{Address, BridgedToken, Token, Token.Instance} alias Indexer.Fetcher.OnDemand.TokenTotalSupply, as: TokenTotalSupplyOnDemand @@ -14,7 +15,8 @@ defmodule BlockScoutWeb.API.V2.TokenController do next_page_params: 3, token_transfers_next_page_params: 3, unique_tokens_paging_options: 1, - unique_tokens_next_page: 3 + unique_tokens_next_page: 3, + default_paging_options: 0 ] import BlockScoutWeb.PagingHelper, @@ -300,6 +302,12 @@ defmodule BlockScoutWeb.API.V2.TokenController do options = params |> paging_options() + |> Keyword.update(:paging_options, default_paging_options(), fn %PagingOptions{ + page_size: page_size + } = paging_options -> + mb_parsed_limit = Helper.parse_integer(params["limit"]) + %PagingOptions{paging_options | page_size: min(page_size, mb_parsed_limit && abs(mb_parsed_limit))} + end) |> Keyword.merge(token_transfers_types_options(params)) |> Keyword.merge(tokens_sorting(params)) |> Keyword.merge(@api_true) diff --git a/apps/block_scout_web/lib/block_scout_web/views/api/v2/advanced_filter_view.ex b/apps/block_scout_web/lib/block_scout_web/views/api/v2/advanced_filter_view.ex new file mode 100644 index 000000000000..f54781551508 --- /dev/null +++ b/apps/block_scout_web/lib/block_scout_web/views/api/v2/advanced_filter_view.ex @@ -0,0 +1,172 @@ +defmodule BlockScoutWeb.API.V2.AdvancedFilterView do + use BlockScoutWeb, :view + + alias BlockScoutWeb.API.V2.{Helper, TokenView, TransactionView} + alias Explorer.Chain.{Address, Data, Transaction} + alias Explorer.Market + alias Explorer.Market.MarketHistory + + def render("advanced_filters.json", %{ + advanced_filters: advanced_filters, + decoded_transactions: decoded_transactions, + search_params: %{ + method_ids: method_ids, + tokens: tokens + }, + next_page_params: next_page_params + }) do + %{ + items: + advanced_filters + |> Enum.zip(decoded_transactions) + |> Enum.map(fn {af, decoded_input} -> prepare_advanced_filter(af, decoded_input) end), + search_params: prepare_search_params(method_ids, tokens), + next_page_params: next_page_params + } + end + + def render("methods.json", %{methods: methods}) do + methods + end + + def to_csv_format(advanced_filters) do + exchange_rate = Market.get_coin_exchange_rate() + + date_to_prices = + Enum.reduce(advanced_filters, %{}, fn af, acc -> + date = DateTime.to_date(af.timestamp) + + if Map.has_key?(acc, date) do + acc + else + market_history = MarketHistory.price_at_date(date) + + Map.put( + acc, + date, + {market_history && market_history.opening_price, market_history && market_history.closing_price} + ) + end + end) + + row_names = [ + "TxHash", + "Type", + "MethodId", + "UtcTimestamp", + "FromAddress", + "ToAddress", + "Value", + "TokenContractAddressHash", + "TokenDecimals", + "TokenSymbol", + "BlockNumber", + "Fee", + "CurrentPrice", + "TxDateOpeningPrice", + "TxDateClosingPrice" + ] + + af_lists = + advanced_filters + |> Stream.map(fn advanced_filter -> + method_id = + case advanced_filter.input do + %{bytes: <>} -> method_id + _ -> nil + end + + {opening_price, closing_price} = date_to_prices[DateTime.to_date(advanced_filter.timestamp)] + + [ + to_string(advanced_filter.hash), + advanced_filter.type, + method_id, + advanced_filter.timestamp, + Address.checksum(advanced_filter.from_address.hash), + Address.checksum(advanced_filter.to_address.hash), + advanced_filter.value, + if(advanced_filter.type != "coin_transfer", + do: advanced_filter.token_transfer.token.contract_address_hash, + else: nil + ), + if(advanced_filter.type != "coin_transfer", do: advanced_filter.token_transfer.token.decimals, else: nil), + if(advanced_filter.type != "coin_transfer", do: advanced_filter.token_transfer.token.symbol, else: nil), + advanced_filter.block_number, + advanced_filter.fee, + exchange_rate.usd_value, + opening_price, + closing_price + ] + end) + + Stream.concat([row_names], af_lists) + end + + defp prepare_advanced_filter(advanced_filter, decoded_input) do + %{ + hash: advanced_filter.hash, + type: advanced_filter.type, + method: + if(advanced_filter.type != "coin_transfer", + do: + TransactionView.method_name( + %Transaction{ + to_address: %Address{ + hash: advanced_filter.token_transfer.token.contract_address_hash, + contract_code: "0x" |> Data.cast() |> elem(1) + }, + input: advanced_filter.input + }, + decoded_input + ), + else: + TransactionView.method_name( + %Transaction{to_address: advanced_filter.to_address, input: advanced_filter.input}, + decoded_input + ) + ), + from: + Helper.address_with_info( + nil, + advanced_filter.from_address, + advanced_filter.from_address.hash, + false + ), + to: + Helper.address_with_info( + nil, + advanced_filter.to_address, + advanced_filter.to_address.hash, + false + ), + value: advanced_filter.value, + total: + if(advanced_filter.type != "coin_transfer", + do: TransactionView.prepare_token_transfer_total(advanced_filter.token_transfer), + else: nil + ), + token: + if(advanced_filter.type != "coin_transfer", + do: TokenView.render("token.json", %{token: advanced_filter.token_transfer.token}), + else: nil + ), + timestamp: advanced_filter.timestamp, + block_number: advanced_filter.block_number, + transaction_index: advanced_filter.transaction_index, + internal_transaction_index: advanced_filter.internal_transaction_index, + token_transfer_index: advanced_filter.token_transfer_index, + token_transfer_batch_index: advanced_filter.token_transfer_batch_index, + fee: advanced_filter.fee + } + end + + defp prepare_search_params(method_ids, tokens) do + tokens_map = + Map.new(tokens, fn {contract_address_hash, token} -> + {contract_address_hash, TokenView.render("token.json", %{token: token})} + end) + + %{methods: method_ids, tokens: tokens_map} + end +end diff --git a/apps/block_scout_web/test/block_scout_web/controllers/api/v2/advanced_filter_controller_test.exs b/apps/block_scout_web/test/block_scout_web/controllers/api/v2/advanced_filter_controller_test.exs new file mode 100644 index 000000000000..4e14bc95970f --- /dev/null +++ b/apps/block_scout_web/test/block_scout_web/controllers/api/v2/advanced_filter_controller_test.exs @@ -0,0 +1,945 @@ +defmodule BlockScoutWeb.API.V2.AdvancedFilterControllerTest do + use BlockScoutWeb.ConnCase + + import Mox + + alias Explorer.Chain.{AdvancedFilter, Data} + alias Explorer.{Factory, TestHelper} + + describe "/advanced_filters" do + test "empty list", %{conn: conn} do + request = get(conn, "/api/v2/advanced-filters") + assert response = json_response(request, 200) + assert response["items"] == [] + assert response["next_page_params"] == nil + end + + test "get and paginate advanced filter (transactions split between pages)", %{conn: conn} do + first_tx = :transaction |> insert() |> with_block() + insert_list(3, :token_transfer, transaction: first_tx) + + for i <- 0..2 do + insert(:internal_transaction, + transaction: first_tx, + block_hash: first_tx.block_hash, + index: i, + block_index: i + ) + end + + insert_list(51, :transaction) |> with_block() + + request = get(conn, "/api/v2/advanced-filters") + assert response = json_response(request, 200) + request_2nd_page = get(conn, "/api/v2/advanced-filters", response["next_page_params"]) + + assert response_2nd_page = json_response(request_2nd_page, 200) + check_paginated_response(AdvancedFilter.list(), response["items"], response_2nd_page["items"]) + end + + test "get and paginate advanced filter (token transfers split between pages)", %{conn: conn} do + first_tx = :transaction |> insert() |> with_block() + insert_list(3, :token_transfer, transaction: first_tx) + + for i <- 0..2 do + insert(:internal_transaction, + transaction: first_tx, + block_hash: first_tx.block_hash, + index: i, + block_index: i + ) + end + + second_tx = :transaction |> insert() |> with_block() + insert_list(50, :token_transfer, transaction: second_tx, block_number: second_tx.block_number) + + request = get(conn, "/api/v2/advanced-filters") + assert response = json_response(request, 200) + request_2nd_page = get(conn, "/api/v2/advanced-filters", response["next_page_params"]) + assert response_2nd_page = json_response(request_2nd_page, 200) + + check_paginated_response(AdvancedFilter.list(), response["items"], response_2nd_page["items"]) + end + + test "get and paginate advanced filter (batch token transfers split between pages)", %{conn: conn} do + first_tx = :transaction |> insert() |> with_block() + insert_list(3, :token_transfer, transaction: first_tx) + + for i <- 0..2 do + insert(:internal_transaction, + transaction: first_tx, + block_hash: first_tx.block_hash, + index: i, + block_index: i + ) + end + + second_tx = :transaction |> insert() |> with_block() + + insert_list(5, :token_transfer, + transaction: second_tx, + block_number: second_tx.block_number, + token_type: "ERC-1155", + token_ids: 0..10 |> Enum.to_list(), + amounts: 10..20 |> Enum.to_list() + ) + + request = get(conn, "/api/v2/advanced-filters") + assert response = json_response(request, 200) + request_2nd_page = get(conn, "/api/v2/advanced-filters", response["next_page_params"]) + assert response_2nd_page = json_response(request_2nd_page, 200) + + check_paginated_response(AdvancedFilter.list(), response["items"], response_2nd_page["items"]) + end + + test "get and paginate advanced filter (internal transactions split between pages)", %{conn: conn} do + first_tx = :transaction |> insert() |> with_block() + insert_list(3, :token_transfer, transaction: first_tx) + + for i <- 0..2 do + insert(:internal_transaction, + transaction: first_tx, + block_hash: first_tx.block_hash, + index: i, + block_index: i + ) + end + + second_tx = :transaction |> insert() |> with_block() + + for i <- 0..49 do + insert(:internal_transaction, + transaction: second_tx, + block_hash: second_tx.block_hash, + index: i, + block_index: i + ) + end + + request = get(conn, "/api/v2/advanced-filters") + assert response = json_response(request, 200) + request_2nd_page = get(conn, "/api/v2/advanced-filters", response["next_page_params"]) + + assert response_2nd_page = json_response(request_2nd_page, 200) + check_paginated_response(AdvancedFilter.list(), response["items"], response_2nd_page["items"]) + end + + test "filter by tx_type", %{conn: conn} do + 30 |> insert_list(:transaction) |> with_block() + + tx = insert(:transaction) |> with_block() + + for token_type <- ~w(ERC-20 ERC-404 ERC-721 ERC-1155), + _ <- 0..4 do + insert(:token_transfer, transaction: tx, token_type: token_type) + end + + tx = :transaction |> insert() |> with_block() + + for i <- 0..29 do + insert(:internal_transaction, + transaction: tx, + block_hash: tx.block_hash, + index: i, + block_index: i + ) + end + + for tx_type_filter_string <- + ~w(COIN_TRANSFER COIN_TRANSFER,ERC-404 ERC-721,ERC-1155 ERC-20,COIN_TRANSFER,ERC-1155) do + tx_type_filter = tx_type_filter_string |> String.split(",") + request = get(conn, "/api/v2/advanced-filters", %{"tx_types" => tx_type_filter_string}) + assert response = json_response(request, 200) + + assert Enum.all?(response["items"], fn item -> String.upcase(item["type"]) in tx_type_filter end) + + if response["next_page_params"] do + request_2nd_page = + get( + conn, + "/api/v2/advanced-filters", + Map.merge(%{"tx_types" => tx_type_filter_string}, response["next_page_params"]) + ) + + assert response_2nd_page = json_response(request_2nd_page, 200) + + assert Enum.all?(response_2nd_page["items"], fn item -> String.upcase(item["type"]) in tx_type_filter end) + + check_paginated_response( + AdvancedFilter.list(tx_types: tx_type_filter), + response["items"], + response_2nd_page["items"] + ) + end + end + end + + test "filter by methods", %{conn: conn} do + TestHelper.get_eip1967_implementation_zero_addresses() + + tx = :transaction |> insert() |> with_block() + + smart_contract = build(:smart_contract) + + contract_address = + insert(:address, + hash: address_hash(), + verified: true, + contract_code: Factory.contract_code_info().bytecode, + smart_contract: smart_contract + ) + + method_id1_string = "0xa9059cbb" + method_id2_string = "0xa0712d68" + method_id3_string = "0x095ea7b3" + method_id4_string = "0x40993b26" + + {:ok, method1} = Data.cast(method_id1_string <> "ab0ba0") + {:ok, method2} = Data.cast(method_id2_string <> "ab0ba0") + {:ok, method3} = Data.cast(method_id3_string <> "ab0ba0") + {:ok, method4} = Data.cast(method_id4_string <> "ab0ba0") + + for i <- 0..4 do + insert(:internal_transaction, + transaction: tx, + to_address_hash: contract_address.hash, + to_address: contract_address, + block_hash: tx.block_hash, + index: i, + block_index: i, + input: method1 + ) + end + + for i <- 5..9 do + insert(:internal_transaction, + transaction: tx, + to_address_hash: contract_address.hash, + to_address: contract_address, + block_hash: tx.block_hash, + index: i, + block_index: i, + input: method2 + ) + end + + 5 + |> insert_list(:transaction, to_address_hash: contract_address.hash, to_address: contract_address, input: method2) + |> with_block() + + 5 + |> insert_list(:transaction, to_address_hash: contract_address.hash, to_address: contract_address, input: method3) + |> with_block() + + method3_transaction = + :transaction + |> insert(to_address_hash: contract_address.hash, to_address: contract_address, input: method3) + |> with_block() + + method4_transaction = + :transaction + |> insert(to_address_hash: contract_address.hash, to_address: contract_address, input: method4) + |> with_block() + + 5 |> insert_list(:token_transfer, transaction: method3_transaction) + 5 |> insert_list(:token_transfer, transaction: method4_transaction) + + request = get(conn, "/api/v2/advanced-filters", %{"methods" => "0xa0712d68,0x095ea7b3"}) + assert response = json_response(request, 200) + + assert Enum.all?(response["items"], fn item -> + String.slice(item["method"], 0..9) in [method_id2_string, method_id3_string] + end) + + assert Enum.count(response["items"]) == 21 + end + + test "filter by age", %{conn: conn} do + first_timestamp = ~U[2023-12-12 00:00:00.000000Z] + + for i <- 0..4 do + tx = :transaction |> insert() |> with_block(block_timestamp: Timex.shift(first_timestamp, days: i)) + + insert(:internal_transaction, + transaction: tx, + block_hash: tx.block_hash, + index: i, + block_index: i + ) + + insert(:token_transfer, transaction: tx, block_number: tx.block_number, log_index: i) + end + + request = + get(conn, "/api/v2/advanced-filters", %{ + "age_from" => "2023-12-14T00:00:00Z", + "age_to" => "2023-12-16T00:00:00Z" + }) + + assert response = json_response(request, 200) + + assert Enum.count(response["items"]) == 9 + end + + test "filter by from address include", %{conn: conn} do + address = insert(:address) + + for i <- 0..4 do + tx = :transaction |> insert() |> with_block() + + if i < 2 do + :transaction |> insert(from_address_hash: address.hash, from_address: address) |> with_block() + + insert(:internal_transaction, + transaction: tx, + from_address_hash: address.hash, + from_address: address, + block_hash: tx.block_hash, + index: i, + block_index: i + ) + + insert(:token_transfer, + from_address_hash: address.hash, + from_address: address, + transaction: tx, + block_number: tx.block_number, + log_index: i + ) + else + insert(:internal_transaction, + transaction: tx, + block_hash: tx.block_hash, + index: i, + block_index: i + ) + + insert(:token_transfer, transaction: tx, block_number: tx.block_number, log_index: i) + end + end + + request = get(conn, "/api/v2/advanced-filters", %{"from_address_hashes_to_include" => to_string(address.hash)}) + + assert response = json_response(request, 200) + + assert Enum.count(response["items"]) == 6 + end + + test "filter by from address exclude", %{conn: conn} do + address = insert(:address) + + for i <- 0..4 do + tx = :transaction |> insert() |> with_block() + + if i < 4 do + :transaction |> insert(from_address_hash: address.hash, from_address: address) |> with_block() + + insert(:internal_transaction, + transaction: tx, + from_address_hash: address.hash, + from_address: address, + block_hash: tx.block_hash, + index: i, + block_index: i + ) + + insert(:token_transfer, + from_address_hash: address.hash, + from_address: address, + transaction: tx, + block_number: tx.block_number, + log_index: i + ) + else + insert(:internal_transaction, + transaction: tx, + block_hash: tx.block_hash, + index: i, + block_index: i + ) + + insert(:token_transfer, transaction: tx, block_number: tx.block_number, log_index: i) + end + end + + request = get(conn, "/api/v2/advanced-filters", %{"from_address_hashes_to_exclude" => to_string(address.hash)}) + + assert response = json_response(request, 200) + + assert Enum.count(response["items"]) == 7 + end + + test "filter by from address include and exclude", %{conn: conn} do + address_to_include = insert(:address) + address_to_exclude = insert(:address) + + for i <- 0..2 do + tx = + :transaction + |> insert(from_address_hash: address_to_exclude.hash, from_address: address_to_exclude) + |> with_block() + + if i < 4 do + :transaction + |> insert(from_address_hash: address_to_include.hash, from_address: address_to_include) + |> with_block() + + insert(:internal_transaction, + transaction: tx, + from_address_hash: address_to_include.hash, + from_address: address_to_include, + block_hash: tx.block_hash, + index: i, + block_index: i + ) + + insert(:token_transfer, + from_address_hash: address_to_include.hash, + from_address: address_to_include, + transaction: tx, + block_number: tx.block_number, + log_index: i + ) + else + insert(:internal_transaction, + transaction: tx, + block_hash: tx.block_hash, + index: i, + block_index: i + ) + + insert(:token_transfer, transaction: tx, block_number: tx.block_number, log_index: i) + end + end + + request = + get(conn, "/api/v2/advanced-filters", %{ + "from_address_hashes_to_include" => to_string(address_to_include.hash), + "from_address_hashes_to_exclude" => to_string(address_to_exclude.hash) + }) + + assert response = json_response(request, 200) + + assert Enum.count(response["items"]) == 9 + end + + test "filter by to address include", %{conn: conn} do + address = insert(:address) + + for i <- 0..4 do + tx = :transaction |> insert() |> with_block() + + if i < 2 do + :transaction |> insert(to_address_hash: address.hash, to_address: address) |> with_block() + + insert(:internal_transaction, + transaction: tx, + to_address_hash: address.hash, + to_address: address, + block_hash: tx.block_hash, + index: i, + block_index: i + ) + + insert(:token_transfer, + to_address_hash: address.hash, + to_address: address, + transaction: tx, + block_number: tx.block_number, + log_index: i + ) + else + insert(:internal_transaction, + transaction: tx, + block_hash: tx.block_hash, + index: i, + block_index: i + ) + + insert(:token_transfer, transaction: tx, block_number: tx.block_number, log_index: i) + end + end + + request = get(conn, "/api/v2/advanced-filters", %{"to_address_hashes_to_include" => to_string(address.hash)}) + + assert response = json_response(request, 200) + + assert Enum.count(response["items"]) == 6 + end + + test "filter by to address exclude", %{conn: conn} do + address = insert(:address) + + for i <- 0..4 do + tx = :transaction |> insert() |> with_block() + + if i < 4 do + :transaction |> insert(to_address_hash: address.hash, to_address: address) |> with_block() + + insert(:internal_transaction, + transaction: tx, + to_address_hash: address.hash, + to_address: address, + block_hash: tx.block_hash, + index: i, + block_index: i + ) + + insert(:token_transfer, + to_address_hash: address.hash, + to_address: address, + transaction: tx, + block_number: tx.block_number, + log_index: i + ) + else + insert(:internal_transaction, + transaction: tx, + block_hash: tx.block_hash, + index: i, + block_index: i + ) + + insert(:token_transfer, transaction: tx, block_number: tx.block_number, log_index: i) + end + end + + request = get(conn, "/api/v2/advanced-filters", %{"to_address_hashes_to_exclude" => to_string(address.hash)}) + + assert response = json_response(request, 200) + + assert Enum.count(response["items"]) == 7 + end + + test "filter by to address include and exclude", %{conn: conn} do + address_to_include = insert(:address) + address_to_exclude = insert(:address) + + for i <- 0..2 do + tx = + :transaction + |> insert(to_address_hash: address_to_exclude.hash, to_address: address_to_exclude) + |> with_block() + + if i < 4 do + :transaction + |> insert(to_address_hash: address_to_include.hash, to_address: address_to_include) + |> with_block() + + insert(:internal_transaction, + transaction: tx, + to_address_hash: address_to_include.hash, + to_address: address_to_include, + block_hash: tx.block_hash, + index: i, + block_index: i + ) + + insert(:token_transfer, + to_address_hash: address_to_include.hash, + to_address: address_to_include, + transaction: tx, + block_number: tx.block_number, + log_index: i + ) + else + insert(:internal_transaction, + transaction: tx, + block_hash: tx.block_hash, + index: i, + block_index: i + ) + + insert(:token_transfer, transaction: tx, block_number: tx.block_number, log_index: i) + end + end + + request = + get(conn, "/api/v2/advanced-filters", %{ + "to_address_hashes_to_include" => to_string(address_to_include.hash), + "to_address_hashes_to_exclude" => to_string(address_to_exclude.hash) + }) + + assert response = json_response(request, 200) + + assert Enum.count(response["items"]) == 9 + end + + test "filter by from and to address", %{conn: conn} do + from_address = insert(:address) + to_address = insert(:address) + + for i <- 0..8 do + tx = :transaction |> insert() |> with_block() + + cond do + i < 2 -> + :transaction |> insert(from_address_hash: from_address.hash, from_address: from_address) |> with_block() + + insert(:internal_transaction, + transaction: tx, + from_address_hash: from_address.hash, + from_address: from_address, + block_hash: tx.block_hash, + index: i, + block_index: i + ) + + insert(:token_transfer, + from_address_hash: from_address.hash, + from_address: from_address, + transaction: tx, + block_number: tx.block_number, + log_index: i + ) + + i < 4 -> + :transaction |> insert(to_address_hash: to_address.hash, to_address: to_address) |> with_block() + + insert(:internal_transaction, + transaction: tx, + to_address_hash: to_address.hash, + to_address: to_address, + block_hash: tx.block_hash, + index: i, + block_index: i + ) + + insert(:token_transfer, + to_address_hash: to_address.hash, + to_address: to_address, + transaction: tx, + block_number: tx.block_number, + log_index: i + ) + + i < 6 -> + :transaction + |> insert( + to_address_hash: to_address.hash, + to_address: to_address, + from_address_hash: from_address.hash, + from_address: from_address + ) + |> with_block() + + insert(:internal_transaction, + transaction: tx, + to_address_hash: to_address.hash, + to_address: to_address, + from_address_hash: from_address.hash, + from_address: from_address, + block_hash: tx.block_hash, + index: i, + block_index: i + ) + + insert(:token_transfer, + to_address_hash: to_address.hash, + to_address: to_address, + from_address_hash: from_address.hash, + from_address: from_address, + transaction: tx, + block_number: tx.block_number, + log_index: i + ) + + true -> + insert(:internal_transaction, + transaction: tx, + block_hash: tx.block_hash, + index: i, + block_index: i + ) + + insert(:token_transfer, transaction: tx, block_number: tx.block_number, log_index: i) + end + end + + request = + get(conn, "/api/v2/advanced-filters", %{ + "from_address_hashes_to_include" => to_string(from_address.hash), + "to_address_hashes_to_include" => to_string(to_address.hash), + "address_relation" => "AnD" + }) + + assert response = json_response(request, 200) + + assert Enum.count(response["items"]) == 6 + end + + test "filter by from or to address", %{conn: conn} do + from_address = insert(:address) + to_address = insert(:address) + + for i <- 0..8 do + tx = :transaction |> insert() |> with_block() + + cond do + i < 2 -> + :transaction |> insert(from_address_hash: from_address.hash, from_address: from_address) |> with_block() + + insert(:internal_transaction, + transaction: tx, + from_address_hash: from_address.hash, + from_address: from_address, + block_hash: tx.block_hash, + index: i, + block_index: i + ) + + insert(:token_transfer, + from_address_hash: from_address.hash, + from_address: from_address, + transaction: tx, + block_number: tx.block_number, + log_index: i + ) + + i < 4 -> + :transaction |> insert(to_address_hash: to_address.hash, to_address: to_address) |> with_block() + + insert(:internal_transaction, + transaction: tx, + to_address_hash: to_address.hash, + to_address: to_address, + block_hash: tx.block_hash, + index: i, + block_index: i + ) + + insert(:token_transfer, + to_address_hash: to_address.hash, + to_address: to_address, + transaction: tx, + block_number: tx.block_number, + log_index: i + ) + + i < 6 -> + :transaction + |> insert( + to_address_hash: to_address.hash, + to_address: to_address, + from_address_hash: from_address.hash, + from_address: from_address + ) + |> with_block() + + insert(:internal_transaction, + transaction: tx, + to_address_hash: to_address.hash, + to_address: to_address, + from_address_hash: from_address.hash, + from_address: from_address, + block_hash: tx.block_hash, + index: i, + block_index: i + ) + + insert(:token_transfer, + to_address_hash: to_address.hash, + to_address: to_address, + from_address_hash: from_address.hash, + from_address: from_address, + transaction: tx, + block_number: tx.block_number, + log_index: i + ) + + true -> + insert(:internal_transaction, + transaction: tx, + block_hash: tx.block_hash, + index: i, + block_index: i + ) + + insert(:token_transfer, transaction: tx, block_number: tx.block_number, log_index: i) + end + end + + request = + get(conn, "/api/v2/advanced-filters", %{ + "from_address_hashes_to_include" => to_string(from_address.hash), + "to_address_hashes_to_include" => to_string(to_address.hash) + }) + + assert response = json_response(request, 200) + + assert Enum.count(response["items"]) == 18 + end + + test "filter by amount", %{conn: conn} do + for i <- 0..4 do + tx = :transaction |> insert(value: i * 10 ** 18) |> with_block() + + insert(:internal_transaction, + transaction: tx, + block_hash: tx.block_hash, + index: 0, + block_index: 0, + value: i * 10 ** 18 + ) + + token = insert(:token, decimals: 10) + + insert(:token_transfer, + amount: i * 10 ** 10, + token_contract_address: token.contract_address, + transaction: tx, + block_number: tx.block_number, + log_index: 0 + ) + end + + request = get(conn, "/api/v2/advanced-filters", %{"amount_from" => "0.5", "amount_to" => "2.99"}) + assert response = json_response(request, 200) + + assert Enum.count(response["items"]) == 6 + end + + test "filter by token contract address include", %{conn: conn} do + token_a = insert(:token) + token_b = insert(:token) + token_c = insert(:token) + + tx = :transaction |> insert() |> with_block() + + for token <- [token_a, token_b, token_c, token_a, token_b, token_c, token_a, token_b, token_c] do + insert(:token_transfer, + token_contract_address: token.contract_address, + transaction: tx, + block_number: tx.block_number, + log_index: 0 + ) + end + + request = + get(conn, "/api/v2/advanced-filters", %{ + "token_contract_address_hashes_to_include" => + "#{token_b.contract_address_hash},#{token_c.contract_address_hash}" + }) + + assert response = json_response(request, 200) + + assert Enum.count(response["items"]) == 6 + end + + test "filter by token contract address exclude", %{conn: conn} do + token_a = insert(:token) + token_b = insert(:token) + token_c = insert(:token) + + tx = :transaction |> insert() |> with_block() + + for token <- [token_a, token_b, token_c, token_a, token_b, token_c, token_a, token_b, token_c] do + insert(:token_transfer, + token_contract_address: token.contract_address, + transaction: tx, + block_number: tx.block_number, + log_index: 0 + ) + end + + request = + get(conn, "/api/v2/advanced-filters", %{ + "token_contract_address_hashes_to_exclude" => + "#{token_b.contract_address_hash},#{token_c.contract_address_hash}" + }) + + assert response = json_response(request, 200) + + assert Enum.count(response["items"]) == 4 + end + + test "filter by token contract address include with native", %{conn: conn} do + token_a = insert(:token) + token_b = insert(:token) + token_c = insert(:token) + + tx = :transaction |> insert() |> with_block() + + for token <- [token_a, token_b, token_c, token_a, token_b, token_c, token_a, token_b, token_c] do + insert(:token_transfer, + token_contract_address: token.contract_address, + transaction: tx, + block_number: tx.block_number, + log_index: 0 + ) + end + + request = + get(conn, "/api/v2/advanced-filters", %{ + "token_contract_address_hashes_to_include" => + "#{token_b.contract_address_hash},#{token_c.contract_address_hash},native" + }) + + assert response = json_response(request, 200) + + assert Enum.count(response["items"]) == 7 + end + + test "filter by token contract address exclude with native", %{conn: conn} do + token_a = insert(:token) + token_b = insert(:token) + token_c = insert(:token) + + tx = :transaction |> insert() |> with_block() + + for token <- [token_a, token_b, token_c, token_a, token_b, token_c, token_a, token_b, token_c] do + insert(:token_transfer, + token_contract_address: token.contract_address, + transaction: tx, + block_number: tx.block_number, + log_index: 0 + ) + end + + request = + get(conn, "/api/v2/advanced-filters", %{ + "token_contract_address_hashes_to_exclude" => + "#{token_b.contract_address_hash},#{token_c.contract_address_hash},native" + }) + + assert response = json_response(request, 200) + + assert Enum.count(response["items"]) == 3 + end + end + + describe "/advanced_filters/methods?q=" do + test "returns 404 if method does not exist", %{conn: conn} do + request = get(conn, "/api/v2/advanced-filters/methods", %{"q" => "foo"}) + assert response = json_response(request, 404) + assert response["message"] == "Not found" + end + + test "finds method by name", %{conn: conn} do + insert(:contract_method) + request = get(conn, "/api/v2/advanced-filters/methods", %{"q" => "set"}) + assert response = json_response(request, 200) + assert response == [%{"method_id" => "0x60fe47b1", "name" => "set"}] + end + + test "finds method by id", %{conn: conn} do + insert(:contract_method) + request = get(conn, "/api/v2/advanced-filters/methods", %{"q" => "0x60fe47b1"}) + assert response = json_response(request, 200) + assert response == [%{"method_id" => "0x60fe47b1", "name" => "set"}] + end + end + + defp check_paginated_response(all_advanced_filters, first_page, second_page) do + assert all_advanced_filters + |> Enum.map( + &{&1.block_number, &1.transaction_index, &1.internal_transaction_index, &1.token_transfer_index, + &1.token_transfer_batch_index} + ) == + Enum.map( + first_page ++ second_page, + &{&1["block_number"], &1["transaction_index"], &1["internal_transaction_index"], + &1["token_transfer_index"], &1["token_transfer_batch_index"]} + ) + end +end diff --git a/apps/block_scout_web/test/block_scout_web/controllers/api/v2/validator_controller_test.exs b/apps/block_scout_web/test/block_scout_web/controllers/api/v2/validator_controller_test.exs index ac3f66320b1e..8b38069ffbe1 100644 --- a/apps/block_scout_web/test/block_scout_web/controllers/api/v2/validator_controller_test.exs +++ b/apps/block_scout_web/test/block_scout_web/controllers/api/v2/validator_controller_test.exs @@ -5,6 +5,7 @@ defmodule BlockScoutWeb.API.V2.ValidatorControllerTest do alias Explorer.Chain.Address alias Explorer.Chain.Cache.StabilityValidatorsCounters alias Explorer.Chain.Stability.Validator, as: ValidatorStability + alias Explorer.Helper defp check_paginated_response(first_page_resp, second_page_resp, list) do assert Enum.count(first_page_resp["items"]) == 50 @@ -19,12 +20,12 @@ defmodule BlockScoutWeb.API.V2.ValidatorControllerTest do defp compare_default_sorting_for_asc({validator_1, blocks_count_1}, {validator_2, blocks_count_2}) do case { - compare(blocks_count_1, blocks_count_2), - compare( + Helper.compare(blocks_count_1, blocks_count_2), + Helper.compare( Keyword.fetch!(ValidatorStability.state_enum(), validator_1.state), Keyword.fetch!(ValidatorStability.state_enum(), validator_2.state) ), - compare(validator_1.address_hash.bytes, validator_2.address_hash.bytes) + Helper.compare(validator_1.address_hash.bytes, validator_2.address_hash.bytes) } do {:lt, _, _} -> false {:eq, :lt, _} -> false @@ -35,12 +36,12 @@ defmodule BlockScoutWeb.API.V2.ValidatorControllerTest do defp compare_default_sorting_for_desc({validator_1, blocks_count_1}, {validator_2, blocks_count_2}) do case { - compare(blocks_count_1, blocks_count_2), - compare( + Helper.compare(blocks_count_1, blocks_count_2), + Helper.compare( Keyword.fetch!(ValidatorStability.state_enum(), validator_1.state), Keyword.fetch!(ValidatorStability.state_enum(), validator_2.state) ), - compare(validator_1.address_hash.bytes, validator_2.address_hash.bytes) + Helper.compare(validator_1.address_hash.bytes, validator_2.address_hash.bytes) } do {:gt, _, _} -> false {:eq, :lt, _} -> false @@ -59,14 +60,6 @@ defmodule BlockScoutWeb.API.V2.ValidatorControllerTest do assert compare_item(validator, json) end - defp compare(a, b) do - cond do - a < b -> :lt - a > b -> :gt - true -> :eq - end - end - describe "/validators/stability" do test "get paginated list of the validators", %{conn: conn} do validators = diff --git a/apps/explorer/lib/explorer/chain/advanced_filter.ex b/apps/explorer/lib/explorer/chain/advanced_filter.ex new file mode 100644 index 000000000000..6ad943fc2616 --- /dev/null +++ b/apps/explorer/lib/explorer/chain/advanced_filter.ex @@ -0,0 +1,706 @@ +defmodule Explorer.Chain.AdvancedFilter do + @moduledoc """ + Models an advanced filter. + """ + + use Explorer.Schema + + import Ecto.Query + + alias Explorer.{Chain, Helper, PagingOptions} + alias Explorer.Chain.{Address, Data, Hash, InternalTransaction, TokenTransfer, Transaction} + + @primary_key false + typed_embedded_schema null: false do + field(:hash, Hash.Full) + field(:type, :string) + field(:input, Data) + field(:timestamp, :utc_datetime_usec) + + belongs_to( + :from_address, + Address, + foreign_key: :from_address_hash, + references: :hash, + type: Hash.Address + ) + + belongs_to( + :to_address, + Address, + foreign_key: :to_address_hash, + references: :hash, + type: Hash.Address + ) + + field(:value, :decimal, null: true) + + has_one(:token_transfer, TokenTransfer, foreign_key: :transaction_hash, references: :hash, null: true) + + field(:fee, :decimal) + + field(:block_number, :integer) + field(:transaction_index, :integer) + field(:internal_transaction_index, :integer, null: true) + field(:token_transfer_index, :integer, null: true) + field(:token_transfer_batch_index, :integer, null: true) + end + + @typep tx_types :: {:tx_types, [String.t()] | nil} + @typep methods :: {:methods, [String.t()] | nil} + @typep age :: {:age, [{:from, DateTime.t() | nil} | {:to, DateTime.t() | nil}] | nil} + @typep from_address_hashes :: {:from_address_hashes, [Hash.Address.t()] | nil} + @typep to_address_hashes :: {:to_address_hashes, [Hash.Address.t()] | nil} + @typep address_relation :: {:address_relation, :or | :and | nil} + @typep amount :: {:amount, [{:from, Decimal.t()} | {:to, Decimal.t()}] | nil} + @typep token_contract_address_hashes :: + {:token_contract_address_hashes, [{:include, [Hash.Address.t()]} | {:include, [Hash.Address.t()]}] | nil} + @type options :: [ + tx_types() + | methods() + | age() + | from_address_hashes() + | to_address_hashes() + | address_relation() + | amount() + | token_contract_address_hashes() + | Chain.paging_options() + | Chain.api?() + ] + + @spec list(options()) :: [__MODULE__.t()] + def list(options \\ []) do + paging_options = Keyword.get(options, :paging_options) + + tasks = + options + |> queries(paging_options) + |> Enum.map(fn query -> Task.async(fn -> Chain.select_repo(options).all(query) end) end) + + tasks + |> Task.yield_many(:timer.seconds(60)) + |> Enum.flat_map(fn {_task, res} -> + case res do + {:ok, result} -> + result + + {:exit, reason} -> + raise "Query fetching advanced filters terminated: #{inspect(reason)}" + + nil -> + raise "Query fetching advanced filters timed out." + end + end) + |> Enum.map(&to_advanced_filter/1) + |> Enum.sort(&sort_function/2) + |> take_page_size(paging_options) + end + + defp queries(options, paging_options) do + cond do + only_transactions?(options) -> + [transactions_query(paging_options, options), internal_transactions_query(paging_options, options)] + + only_token_transfers?(options) -> + [token_transfers_query(paging_options, options)] + + true -> + [ + transactions_query(paging_options, options), + internal_transactions_query(paging_options, options), + token_transfers_query(paging_options, options) + ] + end + end + + defp only_transactions?(options) do + transaction_types = options[:tx_types] + tokens_to_include = options[:token_contract_address_hashes][:include] + + transaction_types == ["COIN_TRANSFER"] or tokens_to_include == ["native"] + end + + defp only_token_transfers?(options) do + transaction_types = options[:tx_types] + tokens_to_include = options[:token_contract_address_hashes][:include] + tokens_to_exclude = options[:token_contract_address_hashes][:exclude] + + (is_list(transaction_types) and length(transaction_types) > 0 and "COIN_TRANSFER" not in transaction_types) or + (is_list(tokens_to_include) and length(tokens_to_include) > 0 and "native" not in tokens_to_include) or + (is_list(tokens_to_exclude) and "native" in tokens_to_exclude) + end + + defp to_advanced_filter(%Transaction{} = transaction) do + %__MODULE__{ + hash: transaction.hash, + type: "coin_transfer", + input: transaction.input, + timestamp: transaction.block_timestamp, + from_address: transaction.from_address, + to_address: transaction.to_address, + value: transaction.value.value, + fee: transaction |> Transaction.fee(:wei) |> elem(1), + block_number: transaction.block_number, + transaction_index: transaction.index + } + end + + defp to_advanced_filter(%InternalTransaction{} = internal_transaction) do + %__MODULE__{ + hash: internal_transaction.transaction.hash, + type: "coin_transfer", + input: internal_transaction.input, + timestamp: internal_transaction.transaction.block_timestamp, + from_address: internal_transaction.from_address, + to_address: internal_transaction.to_address, + value: internal_transaction.value.value, + fee: + internal_transaction.transaction.gas_price && internal_transaction.gas_used && + Decimal.mult(internal_transaction.transaction.gas_price.value, internal_transaction.gas_used), + block_number: internal_transaction.transaction.block_number, + transaction_index: internal_transaction.transaction.index, + internal_transaction_index: internal_transaction.index + } + end + + defp to_advanced_filter(%TokenTransfer{} = token_transfer) do + %__MODULE__{ + hash: token_transfer.transaction.hash, + type: token_transfer.token_type, + input: token_transfer.transaction.input, + timestamp: token_transfer.transaction.block_timestamp, + from_address: token_transfer.from_address, + to_address: token_transfer.to_address, + fee: token_transfer.transaction |> Transaction.fee(:wei) |> elem(1), + token_transfer: %TokenTransfer{ + token_transfer + | amounts: [token_transfer.amount], + token_ids: token_transfer.token_id && [token_transfer.token_id] + }, + block_number: token_transfer.block_number, + transaction_index: token_transfer.transaction.index, + token_transfer_index: token_transfer.log_index, + token_transfer_batch_index: token_transfer.reverse_index_in_batch + } + end + + defp sort_function(a, b) do + case { + Helper.compare(a.block_number, b.block_number), + Helper.compare(a.transaction_index, b.transaction_index), + Helper.compare(a.token_transfer_index, b.token_transfer_index), + Helper.compare(a.token_transfer_batch_index, b.token_transfer_batch_index), + Helper.compare(a.internal_transaction_index, b.internal_transaction_index) + } do + {:lt, _, _, _, _} -> + false + + {:eq, :lt, _, _, _} -> + false + + {:eq, :eq, _, _, _} -> + case {a.token_transfer_index, a.token_transfer_batch_index, a.internal_transaction_index, + b.token_transfer_index, b.token_transfer_batch_index, b.internal_transaction_index} do + {nil, _, nil, _, _, _} -> + true + + {a_tt_index, a_tt_batch_index, nil, b_tt_index, b_tt_batch_index, _} when not is_nil(b_tt_index) -> + {a_tt_index, a_tt_batch_index} > {b_tt_index, b_tt_batch_index} + + {nil, _, a_it_index, _, _, b_it_index} -> + a_it_index > b_it_index + + {_, _, _, _, _, _} -> + false + end + + _ -> + true + end + end + + defp take_page_size(list, %PagingOptions{page_size: page_size}) when is_integer(page_size) do + Enum.take(list, page_size) + end + + defp take_page_size(list, _), do: list + + defp transactions_query(paging_options, options) do + query = + from(transaction in Transaction, + as: :transaction, + preload: [ + :block, + from_address: [:names, :smart_contract, :proxy_implementations], + to_address: [:names, :smart_contract, :proxy_implementations] + ], + order_by: [ + desc: transaction.block_number, + desc: transaction.index + ] + ) + + query + |> page_transactions(paging_options) + |> limit_query(paging_options) + |> apply_transactions_filters(options) + end + + defp page_transactions(query, %PagingOptions{ + key: %{ + block_number: block_number, + transaction_index: tx_index + } + }) do + dynamic_condition = + dynamic(^page_block_number_dynamic(:transaction, block_number) or ^page_tx_index_dynamic(block_number, tx_index)) + + query |> where(^dynamic_condition) + end + + defp page_transactions(query, _), do: query + + defp internal_transactions_query(paging_options, options) do + query = + from(internal_transaction in InternalTransaction, + as: :internal_transaction, + join: transaction in assoc(internal_transaction, :transaction), + as: :transaction, + preload: [ + from_address: [:names, :smart_contract, :proxy_implementations], + to_address: [:names, :smart_contract, :proxy_implementations], + transaction: transaction + ], + order_by: [ + desc: transaction.block_number, + desc: transaction.index, + desc: internal_transaction.index + ] + ) + + query + |> page_internal_transactions(paging_options) + |> limit_query(paging_options) + |> apply_transactions_filters(options) + end + + defp page_internal_transactions(query, %PagingOptions{ + key: %{ + block_number: block_number, + transaction_index: tx_index, + internal_transaction_index: nil + } + }) do + case {block_number, tx_index} do + {0, 0} -> + query |> where(as(:transaction).block_number == ^block_number and as(:transaction).index == ^tx_index) + + {0, tx_index} -> + query + |> where(as(:transaction).block_number == ^block_number and as(:transaction).index <= ^tx_index) + + {block_number, 0} -> + query |> where(as(:transaction).block_number < ^block_number) + + _ -> + query + |> where( + as(:transaction).block_number < ^block_number or + (as(:transaction).block_number == ^block_number and as(:transaction).index <= ^tx_index) + ) + end + end + + defp page_internal_transactions(query, %PagingOptions{ + key: %{ + block_number: block_number, + transaction_index: tx_index, + internal_transaction_index: it_index + } + }) do + dynamic_condition = + dynamic( + ^page_block_number_dynamic(:transaction, block_number) or ^page_tx_index_dynamic(block_number, tx_index) or + ^page_it_index_dynamic(block_number, tx_index, it_index) + ) + + query + |> where(^dynamic_condition) + end + + defp page_internal_transactions(query, _), do: query + + defp token_transfers_query(paging_options, options) do + token_transfer_query = + from(token_transfer in TokenTransfer, + as: :token_transfer, + join: transaction in assoc(token_transfer, :transaction), + as: :transaction, + join: token in assoc(token_transfer, :token), + as: :token, + select: %TokenTransfer{ + token_transfer + | token_id: fragment("UNNEST(?)", token_transfer.token_ids), + amount: + fragment("UNNEST(COALESCE(?, ARRAY[COALESCE(?, 1)]))", token_transfer.amounts, token_transfer.amount), + reverse_index_in_batch: + fragment("GENERATE_SERIES(COALESCE(ARRAY_LENGTH(?, 1), 1), 1, -1)", token_transfer.amounts), + token_decimals: token.decimals + }, + order_by: [ + desc: token_transfer.block_number, + desc: token_transfer.log_index + ] + ) + + token_transfer_query + |> apply_token_transfers_filters(options) + |> page_token_transfers(paging_options) + |> filter_token_transfers_by_amount(options[:amount][:from], options[:amount][:to]) + |> make_token_transfer_query_unnested() + |> limit_query(paging_options) + end + + defp page_token_transfers(query, %PagingOptions{ + key: %{ + block_number: block_number, + transaction_index: tx_index, + token_transfer_index: nil, + internal_transaction_index: nil + } + }) do + case {block_number, tx_index} do + {0, 0} -> + query |> where(as(:transaction).block_number == ^block_number and as(:transaction).index == ^tx_index) + + {0, tx_index} -> + query + |> where([token_transfer], token_transfer.block_number == ^block_number and as(:transaction).index < ^tx_index) + + {block_number, 0} -> + query |> where([token_transfer], token_transfer.block_number < ^block_number) + + {block_number, tx_index} -> + query + |> where( + [token_transfer], + token_transfer.block_number < ^block_number or + (token_transfer.block_number == ^block_number and as(:transaction).index <= ^tx_index) + ) + end + end + + defp page_token_transfers(query, %PagingOptions{ + key: %{ + block_number: block_number, + transaction_index: tx_index, + token_transfer_index: nil + } + }) do + dynamic_condition = + dynamic( + ^page_block_number_dynamic(:token_transfer, block_number) or ^page_tx_index_dynamic(block_number, tx_index) + ) + + query |> where(^dynamic_condition) + end + + defp page_token_transfers(query, %PagingOptions{ + key: %{ + block_number: block_number, + token_transfer_index: tt_index, + token_transfer_batch_index: tt_batch_index + } + }) do + dynamic_condition = + dynamic( + ^page_block_number_dynamic(:token_transfer, block_number) or + ^page_tt_index_dynamic(:token_transfer, block_number, tt_index, tt_batch_index) + ) + + paged_query = query |> where(^dynamic_condition) + + paged_query + |> make_token_transfer_query_unnested() + |> where( + ^page_tt_batch_index_dynamic( + block_number, + tt_index, + tt_batch_index + ) + ) + end + + defp page_token_transfers(query, _), do: query + + defp page_block_number_dynamic(binding, block_number) when block_number > 0 do + dynamic(as(^binding).block_number < ^block_number) + end + + defp page_block_number_dynamic(_, _) do + dynamic(false) + end + + defp page_tx_index_dynamic(block_number, tx_index) when tx_index > 0 do + dynamic([transaction: tx], tx.block_number == ^block_number and tx.index < ^tx_index) + end + + defp page_tx_index_dynamic(_, _) do + dynamic(false) + end + + defp page_it_index_dynamic(block_number, tx_index, it_index) when it_index > 0 do + dynamic( + [transaction: tx, internal_transaction: it], + tx.block_number == ^block_number and tx.index == ^tx_index and + it.index < ^it_index + ) + end + + defp page_it_index_dynamic(_, _, _) do + dynamic(false) + end + + defp page_tt_index_dynamic(binding, block_number, tt_index, tt_batch_index) + when tt_index > 0 and tt_batch_index > 1 do + dynamic(as(^binding).block_number == ^block_number and as(^binding).log_index <= ^tt_index) + end + + defp page_tt_index_dynamic(binding, block_number, tt_index, _tt_batch_index) when tt_index > 0 do + dynamic(as(^binding).block_number == ^block_number and as(^binding).log_index < ^tt_index) + end + + defp page_tt_index_dynamic(_, _, _, _) do + dynamic(false) + end + + defp page_tt_batch_index_dynamic(block_number, tt_index, tt_batch_index) when tt_batch_index > 1 do + dynamic( + [unnested_token_transfer: tt], + ^page_block_number_dynamic(:unnested_token_transfer, block_number) or + ^page_tt_index_dynamic( + :unnested_token_transfer, + block_number, + tt_index, + 0 + ) or + (tt.block_number == ^block_number and tt.log_index == ^tt_index and tt.reverse_index_in_batch < ^tt_batch_index) + ) + end + + defp page_tt_batch_index_dynamic(_, _, _) do + dynamic(true) + end + + defp limit_query(query, %PagingOptions{page_size: limit}) when is_integer(limit), do: limit(query, ^limit) + + defp limit_query(query, _), do: query + + defp apply_token_transfers_filters(query, options) do + query + |> filter_by_tx_type(options[:tx_types]) + |> filter_token_transfers_by_methods(options[:methods]) + |> filter_by_token(options[:token_contract_address_hashes][:include], :include) + |> filter_by_token(options[:token_contract_address_hashes][:exclude], :exclude) + |> apply_common_filters(options) + end + + defp apply_transactions_filters(query, options) do + query + |> filter_transactions_by_amount(options[:amount][:from], options[:amount][:to]) + |> filter_transactions_by_methods(options[:methods]) + |> apply_common_filters(options) + end + + defp apply_common_filters(query, options) do + query + |> only_collated_transactions() + |> filter_by_timestamp(options[:age][:from], options[:age][:to]) + |> filter_by_addresses(options[:from_address_hashes], options[:to_address_hashes], options[:address_relation]) + end + + defp only_collated_transactions(query) do + query |> where(not is_nil(as(:transaction).block_number) and not is_nil(as(:transaction).index)) + end + + defp filter_by_tx_type(query, [_ | _] = tx_types) do + query |> where([token_transfer], token_transfer.token_type in ^tx_types) + end + + defp filter_by_tx_type(query, _), do: query + + defp filter_transactions_by_methods(query, [_ | _] = methods) do + prepared_methods = prepare_methods(methods) + + query |> where([t], fragment("substring(? FOR 4)", t.input) in ^prepared_methods) + end + + defp filter_transactions_by_methods(query, _), do: query + + defp filter_token_transfers_by_methods(query, [_ | _] = methods) do + prepared_methods = prepare_methods(methods) + + query |> where(fragment("substring(? FOR 4)", as(:transaction).input) in ^prepared_methods) + end + + defp filter_token_transfers_by_methods(query, _), do: query + + defp prepare_methods(methods) do + methods + |> Enum.flat_map(fn + method -> + case Data.cast(method) do + {:ok, method} -> [method.bytes] + _ -> [] + end + end) + end + + defp filter_by_timestamp(query, %DateTime{} = from, %DateTime{} = to) do + query |> where(as(:transaction).block_timestamp >= ^from and as(:transaction).block_timestamp <= ^to) + end + + defp filter_by_timestamp(query, %DateTime{} = from, _to) do + query |> where(as(:transaction).block_timestamp >= ^from) + end + + defp filter_by_timestamp(query, _from, %DateTime{} = to) do + query |> where(as(:transaction).block_timestamp <= ^to) + end + + defp filter_by_timestamp(query, _, _), do: query + + defp filter_by_addresses(query, from_addresses, to_addresses, relation) do + to_address_dynamic = do_filter_by_addresses(:to_address_hash, to_addresses) + + from_address_dynamic = do_filter_by_addresses(:from_address_hash, from_addresses) + + final_condition = + case {to_address_dynamic, from_address_dynamic} do + {not_nil_to_address, not_nil_from_address} when nil not in [not_nil_to_address, not_nil_from_address] -> + combine_filter_by_addresses(not_nil_to_address, not_nil_from_address, relation) + + _ -> + to_address_dynamic || from_address_dynamic + end + + case final_condition do + not_nil when not is_nil(not_nil) -> query |> where(^not_nil) + _ -> query + end + end + + defp do_filter_by_addresses(field, addresses) do + to_include_dynamic = do_filter_by_addresses_inclusion(field, addresses && Keyword.get(addresses, :include)) + to_exclude_dynamic = do_filter_by_addresses_exclusion(field, addresses && Keyword.get(addresses, :exclude)) + + case {to_include_dynamic, to_exclude_dynamic} do + {not_nil_include, not_nil_exclude} when nil not in [not_nil_include, not_nil_exclude] -> + dynamic([t], ^not_nil_include and ^not_nil_exclude) + + _ -> + to_include_dynamic || to_exclude_dynamic + end + end + + defp do_filter_by_addresses_inclusion(field, [_ | _] = addresses) do + dynamic([t], field(t, ^field) in ^addresses) + end + + defp do_filter_by_addresses_inclusion(_, _), do: nil + + defp do_filter_by_addresses_exclusion(field, [_ | _] = addresses) do + dynamic([t], field(t, ^field) not in ^addresses) + end + + defp do_filter_by_addresses_exclusion(_, _), do: nil + + defp combine_filter_by_addresses(from_addresses_dynamic, to_addresses_dynamic, :or) do + dynamic([t], ^from_addresses_dynamic or ^to_addresses_dynamic) + end + + defp combine_filter_by_addresses(from_addresses_dynamic, to_addresses_dynamic, _) do + dynamic([t], ^from_addresses_dynamic and ^to_addresses_dynamic) + end + + @eth_decimals 1000_000_000_000_000_000 + + defp filter_transactions_by_amount(query, from, to) when not is_nil(from) and not is_nil(to) and from < to do + query |> where([t], t.value / @eth_decimals >= ^from and t.value / @eth_decimals <= ^to) + end + + defp filter_transactions_by_amount(query, _from, to) when not is_nil(to) do + query |> where([t], t.value / @eth_decimals <= ^to) + end + + defp filter_transactions_by_amount(query, from, _to) when not is_nil(from) do + query |> where([t], t.value / @eth_decimals >= ^from) + end + + defp filter_transactions_by_amount(query, _, _), do: query + + defp filter_token_transfers_by_amount(query, from, to) when not is_nil(from) and not is_nil(to) and from < to do + unnested_query = make_token_transfer_query_unnested(query) + + unnested_query + |> where( + [unnested_token_transfer: tt], + tt.amount / fragment("10 ^ COALESCE(?, 0)", tt.token_decimals) >= ^from and + tt.amount / fragment("10 ^ COALESCE(?, 0)", tt.token_decimals) <= ^to + ) + end + + defp filter_token_transfers_by_amount(query, _from, to) when not is_nil(to) do + unnested_query = make_token_transfer_query_unnested(query) + + unnested_query + |> where( + [unnested_token_transfer: tt], + tt.amount / fragment("10 ^ COALESCE(?, 0)", tt.token_decimals) <= ^to + ) + end + + defp filter_token_transfers_by_amount(query, from, _to) when not is_nil(from) do + unnested_query = make_token_transfer_query_unnested(query) + + unnested_query + |> where( + [unnested_token_transfer: tt], + tt.amount / fragment("10 ^ COALESCE(?, 0)", tt.token_decimals) >= ^from + ) + end + + defp filter_token_transfers_by_amount(query, _, _), do: query + + defp make_token_transfer_query_unnested(query) do + if has_named_binding?(query, :unnested_token_transfer) do + query + else + from(token_transfer in subquery(query), + as: :unnested_token_transfer, + preload: [ + :transaction, + :token, + from_address: [:names, :smart_contract, :proxy_implementations], + to_address: [:names, :smart_contract, :proxy_implementations] + ], + select_merge: %{ + token_ids: [token_transfer.token_id], + amounts: [token_transfer.amount] + } + ) + end + end + + defp filter_by_token(query, [_ | _] = token_contract_address_hashes, :include) do + filtered = token_contract_address_hashes |> Enum.reject(&(&1 == "native")) + query |> where([token_transfer], token_transfer.token_contract_address_hash in ^filtered) + end + + defp filter_by_token(query, [_ | _] = token_contract_address_hashes, :exclude) do + filtered = token_contract_address_hashes |> Enum.reject(&(&1 == "native")) + query |> where([token_transfer], token_transfer.token_contract_address_hash not in ^filtered) + end + + defp filter_by_token(query, _, _), do: query +end diff --git a/apps/explorer/lib/explorer/chain/contract_method.ex b/apps/explorer/lib/explorer/chain/contract_method.ex index e23c7811f1aa..c4716cde0043 100644 --- a/apps/explorer/lib/explorer/chain/contract_method.ex +++ b/apps/explorer/lib/explorer/chain/contract_method.ex @@ -9,7 +9,7 @@ defmodule Explorer.Chain.ContractMethod do use Explorer.Schema alias Explorer.Chain.{Hash, MethodIdentifier, SmartContract} - alias Explorer.Repo + alias Explorer.{Chain, Repo} typed_schema "contract_methods" do field(:identifier, MethodIdentifier) @@ -65,7 +65,7 @@ defmodule Explorer.Chain.ContractMethod do end @doc """ - Finds limited number of contract methods by selector id + Query that finds limited number of contract methods by selector id """ @spec find_contract_method_query(binary(), integer()) :: Ecto.Query.t() def find_contract_method_query(method_id, limit) do @@ -76,6 +76,51 @@ defmodule Explorer.Chain.ContractMethod do ) end + @doc """ + Finds contract method by selector id + """ + @spec find_contract_method_by_selector_id(binary(), [Chain.api?()]) :: __MODULE__.t() | nil + def find_contract_method_by_selector_id(method_id, options) do + query = + from( + contract_method in __MODULE__, + where: contract_method.abi["type"] == "function", + where: contract_method.identifier == ^method_id, + limit: 1 + ) + + Chain.select_repo(options).one(query) + end + + @spec find_contract_method_by_name(String.t(), [Chain.api?()]) :: __MODULE__.t() | nil + def find_contract_method_by_name(name, options) do + query = + from( + contract_method in __MODULE__, + where: contract_method.abi["type"] == "function", + where: contract_method.abi["name"] == ^name, + limit: 1 + ) + + Chain.select_repo(options).one(query) + end + + @doc """ + Finds contract methods by selector id + """ + @spec find_contract_methods(binary(), [Chain.api?()]) :: [__MODULE__.t()] + def find_contract_methods(method_ids, options) do + query = + from( + contract_method in __MODULE__, + distinct: contract_method.identifier, + where: contract_method.abi["type"] == "function", + where: contract_method.identifier in ^method_ids + ) + + Chain.select_repo(options).all(query) + end + defp abi_element_to_contract_method(element) do case ABI.parse_specification([element], include_events?: true) do [selector] -> diff --git a/apps/explorer/lib/explorer/chain/csv_export/address_transaction_csv_exporter.ex b/apps/explorer/lib/explorer/chain/csv_export/address_transaction_csv_exporter.ex index f7263c89c0e7..bfe1e92cfd0e 100644 --- a/apps/explorer/lib/explorer/chain/csv_export/address_transaction_csv_exporter.ex +++ b/apps/explorer/lib/explorer/chain/csv_export/address_transaction_csv_exporter.ex @@ -3,12 +3,7 @@ defmodule Explorer.Chain.CSVExport.AddressTransactionCsvExporter do Exports transactions to a csv file. """ - import Ecto.Query, - only: [ - from: 2 - ] - - alias Explorer.{Market, PagingOptions, Repo} + alias Explorer.{Market, PagingOptions} alias Explorer.Market.MarketHistory alias Explorer.Chain.{Address, DenormalizationHelper, Hash, Transaction, Wei} alias Explorer.Chain.CSVExport.Helper @@ -67,7 +62,13 @@ defmodule Explorer.Chain.CSVExport.AddressTransactionCsvExporter do if Map.has_key?(acc, date) do acc else - Map.put(acc, date, price_at_date(date)) + market_history = MarketHistory.price_at_date(date) + + Map.put( + acc, + date, + {market_history && market_history.opening_price, market_history && market_history.closing_price} + ) end end) @@ -111,17 +112,4 @@ defmodule Explorer.Chain.CSVExport.AddressTransactionCsvExporter do {:maximum, value} -> "Max of #{value}" end end - - defp price_at_date(date) do - query = - from( - mh in MarketHistory, - where: mh.date == ^date - ) - - case Repo.one(query) do - nil -> {nil, nil} - price -> {price.opening_price, price.closing_price} - end - end end diff --git a/apps/explorer/lib/explorer/chain/token.ex b/apps/explorer/lib/explorer/chain/token.ex index d5d8838d06df..c89e07788142 100644 --- a/apps/explorer/lib/explorer/chain/token.ex +++ b/apps/explorer/lib/explorer/chain/token.ex @@ -238,6 +238,14 @@ defmodule Explorer.Chain.Token do Chain.select_repo(options).get_by(__MODULE__, contract_address_hash: hash) end + @doc """ + Gets tokens with given contract address hashes. + """ + @spec get_by_contract_address_hashes([Hash.Address.t()], [Chain.api?()]) :: [Token.t()] + def get_by_contract_address_hashes(hashes, options) do + Chain.select_repo(options).all(from(t in __MODULE__, where: t.contract_address_hash in ^hashes)) + end + @doc """ For usage in Indexer.Fetcher.TokenInstance.LegacySanitizeERC721 """ diff --git a/apps/explorer/lib/explorer/chain/token_transfer.ex b/apps/explorer/lib/explorer/chain/token_transfer.ex index 12edc67d89de..c86be9a8cc28 100644 --- a/apps/explorer/lib/explorer/chain/token_transfer.ex +++ b/apps/explorer/lib/explorer/chain/token_transfer.ex @@ -61,6 +61,9 @@ defmodule Explorer.Chain.TokenTransfer do * `:log_index` - Index of the corresponding `t:Explorer.Chain.Log.t/0` in the block. * `:amounts` - Tokens transferred amounts in case of batched transfer in ERC-1155 * `:token_ids` - IDs of the tokens (applicable to ERC-1155 tokens) + * `:token_id` - virtual field, ID of token, used to unnest ERC-1155 batch transfers + * `:index_in_batch` - Index of the token transfer in the ERC-1155 batch transfer + * `:reverse_index_in_batch` - Reverse index of the token transfer in the ERC-1155 batch transfer, last element index is 1 * `:block_consensus` - Consensus of the block that the transfer took place """ @primary_key false @@ -70,7 +73,10 @@ defmodule Explorer.Chain.TokenTransfer do field(:log_index, :integer, primary_key: true, null: false) field(:amounts, {:array, :decimal}) field(:token_ids, {:array, :decimal}) + field(:token_id, :decimal, virtual: true) field(:index_in_batch, :integer, virtual: true) + field(:reverse_index_in_batch, :integer, virtual: true) + field(:token_decimals, :decimal, virtual: true) field(:token_type, :string) field(:block_consensus, :boolean) diff --git a/apps/explorer/lib/explorer/chain/transaction.ex b/apps/explorer/lib/explorer/chain/transaction.ex index b943b9febd47..9c0e2c029906 100644 --- a/apps/explorer/lib/explorer/chain/transaction.ex +++ b/apps/explorer/lib/explorer/chain/transaction.ex @@ -263,7 +263,7 @@ defmodule Explorer.Chain.Transaction do alias ABI.FunctionSelector alias Ecto.Association.NotLoaded alias Ecto.Changeset - alias Explorer.{Chain, PagingOptions, Repo, SortingHelper} + alias Explorer.{Chain, Helper, PagingOptions, Repo, SortingHelper} alias Explorer.Chain.{ Block, @@ -1529,10 +1529,10 @@ defmodule Explorer.Chain.Transaction do defp compare_default_sorting(a, b) do case { - compare(a.block_number, b.block_number), - compare(a.index, b.index), + Helper.compare(a.block_number, b.block_number), + Helper.compare(a.index, b.index), DateTime.compare(a.inserted_at, b.inserted_at), - compare(Hash.to_integer(a.hash), Hash.to_integer(b.hash)) + Helper.compare(Hash.to_integer(a.hash), Hash.to_integer(b.hash)) } do {:lt, _, _, _} -> false {:eq, :lt, _, _} -> false @@ -1542,14 +1542,6 @@ defmodule Explorer.Chain.Transaction do end end - defp compare(a, b) do - cond do - a < b -> :lt - a > b -> :gt - true -> :eq - end - end - @doc """ Creates a query to fetch transactions taking into account paging_options (possibly nil), from_block (may be nil), to_block (may be nil) and boolean `with_pending?` that indicates if pending transactions should be included diff --git a/apps/explorer/lib/explorer/helper.ex b/apps/explorer/lib/explorer/helper.ex index 92d8652f40e3..bc09fc0bc422 100644 --- a/apps/explorer/lib/explorer/helper.ex +++ b/apps/explorer/lib/explorer/helper.ex @@ -166,4 +166,19 @@ defmodule Explorer.Helper do end def valid_url?(_), do: false + + @doc """ + Compare two values and returns either :lt, :eq or :gt. + + Please be careful: this function compares arguments using `<` and `>`, + hence it should not be used to compare structures (for instance %DateTime{} or %Decimal{}). + """ + @spec compare(term(), term()) :: :lt | :eq | :gt + def compare(a, b) do + cond do + a < b -> :lt + a > b -> :gt + true -> :eq + end + end end diff --git a/apps/explorer/lib/explorer/market/market_history.ex b/apps/explorer/lib/explorer/market/market_history.ex index d8ddc3ad5a33..bfcef2da37c3 100644 --- a/apps/explorer/lib/explorer/market/market_history.ex +++ b/apps/explorer/lib/explorer/market/market_history.ex @@ -5,6 +5,8 @@ defmodule Explorer.Market.MarketHistory do use Explorer.Schema + alias Explorer.Chain + @typedoc """ The recorded values of the configured coin to USD for a single day. @@ -22,4 +24,18 @@ defmodule Explorer.Market.MarketHistory do field(:tvl, :decimal) field(:secondary_coin, :boolean) end + + @doc """ + Returns the market history (for the secondary coin if specified) for the given date. + """ + @spec price_at_date(Date.t(), boolean(), [Chain.api?()]) :: t() | nil + def price_at_date(date, secondary_coin? \\ false, options \\ []) do + query = + from( + mh in __MODULE__, + where: mh.date == ^date and mh.secondary_coin == ^secondary_coin? + ) + + Chain.select_repo(options).one(query) + end end diff --git a/cspell.json b/cspell.json index aa1edcf326aa..c19fa922c259 100644 --- a/cspell.json +++ b/cspell.json @@ -9,108 +9,15 @@ "apps/block_scout_web/assets/js/lib/ace/src-min/*.js" ], "words": [ - "AION", - "AIRTABLE", - "ARGMAX", - "Aiubo", - "Asfpp", - "Asfpp", - "Autodetection", - "Autonity", - "Averify", - "bitmask", - "Blockchair", - "CALLCODE", - "CBOR", - "Celestia", - "Cldr", - "Consolas", - "Cyclomatic", - "DATETIME", - "DELEGATECALL", - "Decompiler", - "DefiLlama", - "DefiLlama", - "Denormalization", - "Denormalized", - "ECTO", - "EDCSA", - "Ebhwp", - "Encryptor", - "Erigon", - "Ethash", - "Faileddi", - "Filesize", - "Floki", - "Fuov", - "Hazkne", - "Hodl", - "Iframe", - "Iframes", - "Incrementer", - "Instrumenter", - "Karnaugh", - "Keepalive", - "LUKSO", - "Limegreen", - "MARKETCAP", - "Mobula", - "MDWW", - "Mainnets", - "Mendonça", - "Menlo", - "Merkle", - "Mixfile", - "NOTOK", - "Nerg", - "Nerg", - "Nethermind", - "Neue", - "Njhr", - "Nodealus", - "NovesFi", - "Numbe", - "Nunito", - "PGDATABASE", - "PGHOST", - "PGPASSWORD", - "PGPORT", - "PGUSER", - "POSDAO", - "Posix", - "Postrge", - "Qebz", - "Qmbgk", - "REINDEX", - "RPC's", - "RPCs", - "SENDGRID", - "SJONRPC", - "SOLIDITYSCAN", - "SOLIDITYSCAN", - "STATICCALL", - "Secon", - "Segoe", - "Sokol", - "Synthereum", - "Sérgio", - "Tcnwg", - "Testinit", - "Testit", - "Testname", - "Txns", - "UUPS", - "Unitarion", - "Unitorius", - "Unitorus", - "Utqn", - "Wanchain", "aave", "absname", "acbs", "accs", "actb", "addedfile", + "AION", + "AIRTABLE", + "Aiubo", "alloc", "amzootyukbugmx", "apikey", @@ -120,10 +27,14 @@ "ARGMAX", "arounds", "asda", + "Asfpp", "atoken", "autodetectfalse", + "Autodetection", "autodetecttrue", + "Autonity", "autoplay", + "Averify", "backoff", "badhash", "badnumber", @@ -137,11 +48,13 @@ "bignumber", "bigserial", "binwrite", + "bitmask", "bizbuz", + "Blockchair", "blockheight", "blockless", - "blocknum", "blockno", + "blocknum", "blockreward", "blockscout", "blockscoutuser", @@ -149,6 +62,7 @@ "bridgedtokenlist", "browserconfig", "bsdr", + "Btvk", "buildcache", "buildin", "buildx", @@ -159,10 +73,13 @@ "bzzr", "cacerts", "callcode", + "CALLCODE", "calltracer", "callvalue", "capturelog", "cattributes", + "CBOR", + "Celestia", "cellspacing", "certifi", "cfasync", @@ -175,6 +92,7 @@ "checkverifystatus", "childspec", "citext", + "Cldr", "clearfix", "clickover", "codeformat", @@ -193,6 +111,7 @@ "compilerversion", "concache", "cond", + "Consolas", "contractaddress", "contractaddresses", "contractname", @@ -205,23 +124,30 @@ "ctbs", "ctid", "cumalative", + "Cyclomatic", "cypherpunk", "czilladx", "datapoint", "datepicker", + "DATETIME", "deae", "decamelize", "decompiled", "decompiler", + "Decompiler", "dedup", + "DefiLlama", "defmock", "defsupervisor", "dejob", "dejobio", "delegatecall", + "DELEGATECALL", "delegators", "demonitor", "denormalization", + "Denormalization", + "Denormalized", "descr", "describedby", "differenceby", @@ -229,17 +155,22 @@ "dropzone", "dxgd", "dyntsrohg", + "Ebhwp", "econnrefused", + "ECTO", + "EDCSA", "edhygl", "efkuga", + "Encryptor", "endregion", "enetunreach", "enoent", "epns", + "Erigon", "errora", "errorb", "erts", - "erts", + "Ethash", "etherchain", "ethprice", "ethsupply", @@ -255,18 +186,20 @@ "extname", "extremums", "exvcr", + "Faileddi", "falala", + "feelin", "FEVM", "filecoin", "Filecoin", "Filesize", - "Filecoin", - "fkey", "fkey", + "Floki", "fontawesome", "fortawesome", "fsym", "fullwidth", + "Fuov", "fvdskvjglav", "fwrite", "fwupv", @@ -294,6 +227,7 @@ "gtag", "happygokitty", "haspopup", + "Hazkne", "histoday", "hljs", "Hodl", @@ -302,11 +236,15 @@ "hyperledger", "ifdef", "ifeq", + "Iframe", "iframes", + "Iframes", "ilike", "illustr", "inapp", + "Incrementer", "insertable", + "Instrumenter", "intersectionby", "ints", "invalidend", @@ -322,19 +260,22 @@ "johnnny", "jsons", "juon", + "Karnaugh", "keccak", + "Keepalive", "keyout", "kittencream", + "KnxbUejwY", "labeledby", "labelledby", "lastmod", - "lastmod", "lastname", "lastword", "lformat", "libraryaddress", "libraryname", "libsecp", + "Limegreen", "linecap", "linejoin", "listaccounts", @@ -342,27 +283,35 @@ "lkve", "llhauc", "loggable", + "LUKSO", "luxon", "mabi", + "Mainnets", "malihu", "mallowance", + "MARKETCAP", "maxlength", "mbot", "mcap", "mconst", "mdef", + "MDWW", "meer", - "meer", + "Mendonça", + "Menlo", "mergeable", + "Merkle", "metatags", "microsecs", "millis", "mintings", "mistmatches", "miterlimit", + "Mixfile", "mmem", "mname", "mnot", + "Mobula", "moxed", "moxing", "mpayable", @@ -382,12 +331,17 @@ "mykey", "nanomorph", "nbsp", + "Nerg", + "Nethermind", + "Neue", "newkey", "nftproduct", "ngettext", "nillifies", + "Njhr", "nlmyzui", "nocheck", + "Nodealus", "nohighlight", "nolink", "nonconsensus", @@ -397,9 +351,12 @@ "noreply", "NOTOK", "noves", + "NovesFi", "nowarn", "nowrap", "ntoa", + "Numbe", + "Nunito", "nxdomain", "offchain", "omni", @@ -415,7 +372,13 @@ "peekers", "pendingtxlist", "perc", + "permissionless", "persistable", + "PGDATABASE", + "PGHOST", + "PGPASSWORD", + "PGPORT", + "PGUSER", "phash", "pikaday", "pkey", @@ -428,6 +391,9 @@ "pocc", "polyline", "poolboy", + "POSDAO", + "Posix", + "Postrge", "prederive", "prederived", "progressbar", @@ -435,15 +401,15 @@ "psql", "purrstige", "qdai", + "Qebz", "qitmeer", - "qitmeer", + "Qmbgk", "qrcode", "queriable", "questiona", "questionb", "qwertyufhgkhiop", "qwertyuioiuytrewertyuioiuytrertyuio", - "qwertyuioiuytrewertyuioiuytrertyuio", "racecar", "raisedbrow", "rangeright", @@ -478,8 +444,9 @@ "RPCs", "safelow", "savechives", + "Secon", "secp", - "secp", + "Segoe", "seindexed", "selfdestruct", "selfdestructed", @@ -491,10 +458,13 @@ "shibarium", "shortdoc", "shortify", + "SJONRPC", "smallint", "smth", "snapshotted", "snapshotting", + "Sokol", + "SOLIDITYSCAN", "soljson", "someout", "sourcecode", @@ -505,6 +475,7 @@ "stakers", "stateroot", "staticcall", + "STATICCALL", "strftime", "strhash", "stringly", @@ -525,13 +496,18 @@ "sushiswap", "swal", "sweetalert", + "Synthereum", "tabindex", "tablist", "tabpanel", "tarekraafat", "tbody", "tbrf", + "Tcnwg", "tems", + "Testinit", + "Testit", + "Testname", "testpassword", "testtest", "testuser", @@ -556,6 +532,7 @@ "tsym", "txid", "txlistinternal", + "Txns", "txpool", "txreceipt", "ueberauth", @@ -565,21 +542,28 @@ "unfetched", "unfinalized", "unindexed", + "Unitarion", + "Unitorius", + "Unitorus", "unknownc", "unknowne", "unmarshal", "unmatching", "unnest", + "unnested", + "unoswap", "unpadded", "unprefixed", "unstaged", + "unxswap", "upsert", "upserted", "upserting", "upserts", "urijs", "urlset", - "urlset", + "Utqn", + "UUPS", "valign", "valuemax", "valuemin", @@ -592,6 +576,7 @@ "volumeto", "vyper", "walletconnect", + "Wanchain", "warninga", "warningb", "watchlist", @@ -622,36 +607,7 @@ "zkatana", "zkbob", "zkevm", - "erts", - "Asfpp", - "Nerg", - "secp", - "qwertyuioiuytrewertyuioiuytrertyuio", - "urlset", - "lastmod", - "qitmeer", - "meer", - "DefiLlama", - "SOLIDITYSCAN", - "fkey", - "getcontractcreation", - "contractaddresses", - "tokennfttx", - "libraryname", - "libraryaddress", - "evmversion", - "verifyproxycontract", - "checkproxyverification", - "NOTOK", - "sushiswap", - "zetachain", - "zksync", - "filecoin", - "Filecoin", - "permissionless", - "feelin", - "KnxbUejwY", - "Btvk" + "zksync" ], "enableFiletypes": [ "dotenv", From aaed915990495dd098899ca2434fb552adefff10 Mon Sep 17 00:00:00 2001 From: Victor Baranov Date: Wed, 12 Jun 2024 19:19:40 +0300 Subject: [PATCH 086/150] Fix nil abi issue in get_naive_implementation_abi and get_master_copy_pattern methods (#10239) --- apps/explorer/lib/explorer/chain/smart_contract/proxy.ex | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/apps/explorer/lib/explorer/chain/smart_contract/proxy.ex b/apps/explorer/lib/explorer/chain/smart_contract/proxy.ex index de4df3dfddee..4edd81c6be23 100644 --- a/apps/explorer/lib/explorer/chain/smart_contract/proxy.ex +++ b/apps/explorer/lib/explorer/chain/smart_contract/proxy.ex @@ -385,6 +385,8 @@ defmodule Explorer.Chain.SmartContract.Proxy do end end + defp get_naive_implementation_abi(nil, _getter_name), do: nil + defp get_naive_implementation_abi(abi, getter_name) do abi |> Enum.find(fn method -> @@ -392,6 +394,8 @@ defmodule Explorer.Chain.SmartContract.Proxy do end) end + defp get_master_copy_pattern(nil), do: nil + defp get_master_copy_pattern(abi) do abi |> Enum.find(fn method -> From c31f9376801443afc62b9a0bea9ac5d01552f64b Mon Sep 17 00:00:00 2001 From: Alexander Kolotov Date: Wed, 12 Jun 2024 20:47:02 +0300 Subject: [PATCH 087/150] fix: batch transactions view recovered and support of proofs through ZkSync Hyperchain (#10234) * unified tx_hash field with Arbitrum batch transaction * Supported yet another contract function to submit batches proofs * fixed formatting issue --- .../runner/zksync/batch_transactions.ex | 4 +- .../lib/explorer/chain/transaction.ex | 2 +- .../chain/zksync/batch_transaction.ex | 38 ++++-- ...814_rename_field_in_batch_transactions.exs | 7 + .../fetcher/zksync/discovery/batches_data.ex | 2 +- .../fetcher/zksync/status_tracking/proven.ex | 123 ++++++++++++------ 6 files changed, 122 insertions(+), 54 deletions(-) create mode 100644 apps/explorer/priv/zk_sync/migrations/20240611091814_rename_field_in_batch_transactions.exs diff --git a/apps/explorer/lib/explorer/chain/import/runner/zksync/batch_transactions.ex b/apps/explorer/lib/explorer/chain/import/runner/zksync/batch_transactions.ex index 720519a10093..39804aa0f97f 100644 --- a/apps/explorer/lib/explorer/chain/import/runner/zksync/batch_transactions.ex +++ b/apps/explorer/lib/explorer/chain/import/runner/zksync/batch_transactions.ex @@ -60,7 +60,7 @@ defmodule Explorer.Chain.Import.Runner.ZkSync.BatchTransactions do | {:error, [Changeset.t()]} def insert(repo, changes_list, %{timeout: timeout, timestamps: timestamps} = _options) when is_list(changes_list) do # Enforce ZkSync.BatchTransaction ShareLocks order (see docs: sharelock.md) - ordered_changes_list = Enum.sort_by(changes_list, & &1.hash) + ordered_changes_list = Enum.sort_by(changes_list, & &1.tx_hash) {:ok, inserted} = Import.insert_changes_list( @@ -70,7 +70,7 @@ defmodule Explorer.Chain.Import.Runner.ZkSync.BatchTransactions do returning: true, timeout: timeout, timestamps: timestamps, - conflict_target: :hash, + conflict_target: :tx_hash, on_conflict: :nothing ) diff --git a/apps/explorer/lib/explorer/chain/transaction.ex b/apps/explorer/lib/explorer/chain/transaction.ex index 9c0e2c029906..448299893c6e 100644 --- a/apps/explorer/lib/explorer/chain/transaction.ex +++ b/apps/explorer/lib/explorer/chain/transaction.ex @@ -110,7 +110,7 @@ defmodule Explorer.Chain.Transaction.Schema do elem( quote do has_one(:zksync_batch_transaction, ZkSyncBatchTransaction, - foreign_key: :hash, + foreign_key: :tx_hash, references: :hash ) diff --git a/apps/explorer/lib/explorer/chain/zksync/batch_transaction.ex b/apps/explorer/lib/explorer/chain/zksync/batch_transaction.ex index ef3cfb0af8e5..e6fb1fc64af8 100644 --- a/apps/explorer/lib/explorer/chain/zksync/batch_transaction.ex +++ b/apps/explorer/lib/explorer/chain/zksync/batch_transaction.ex @@ -1,24 +1,38 @@ defmodule Explorer.Chain.ZkSync.BatchTransaction do - @moduledoc "Models a list of transactions related to a batch for ZkSync." + @moduledoc """ + Models a list of transactions related to a batch for ZkSync. + + Changes in the schema should be reflected in the bulk import module: + - Explorer.Chain.Import.Runner.ZkSync.BatchTransactions + + Migrations: + - Explorer.Repo.ZkSync.Migrations.CreateZkSyncTables + - Explorer.Repo.ZkSync.Migrations.RenameFieldInBatchTransactions + """ use Explorer.Schema alias Explorer.Chain.{Hash, Transaction} alias Explorer.Chain.ZkSync.TransactionBatch - @required_attrs ~w(batch_number hash)a - - @type t :: %__MODULE__{ - batch_number: non_neg_integer(), - batch: %Ecto.Association.NotLoaded{} | TransactionBatch.t() | nil, - hash: Hash.t(), - l2_transaction: %Ecto.Association.NotLoaded{} | Transaction.t() | nil - } + @required_attrs ~w(batch_number tx_hash)a + @typedoc """ + * `tx_hash` - The hash of the rollup transaction. + * `l2_transaction` - An instance of `Explorer.Chain.Transaction` referenced by `tx_hash`. + * `batch_number` - The number of the ZkSync batch. + * `batch` - An instance of `Explorer.Chain.ZkSync.TransactionBatch` referenced by `batch_number`. + """ @primary_key false - schema "zksync_batch_l2_transactions" do + typed_schema "zksync_batch_l2_transactions" do belongs_to(:batch, TransactionBatch, foreign_key: :batch_number, references: :number, type: :integer) - belongs_to(:l2_transaction, Transaction, foreign_key: :hash, primary_key: true, references: :hash, type: Hash.Full) + + belongs_to(:l2_transaction, Transaction, + foreign_key: :tx_hash, + primary_key: true, + references: :hash, + type: Hash.Full + ) timestamps() end @@ -32,6 +46,6 @@ defmodule Explorer.Chain.ZkSync.BatchTransaction do |> cast(attrs, @required_attrs) |> validate_required(@required_attrs) |> foreign_key_constraint(:batch_number) - |> unique_constraint(:hash) + |> unique_constraint(:tx_hash) end end diff --git a/apps/explorer/priv/zk_sync/migrations/20240611091814_rename_field_in_batch_transactions.exs b/apps/explorer/priv/zk_sync/migrations/20240611091814_rename_field_in_batch_transactions.exs new file mode 100644 index 000000000000..185a07079db0 --- /dev/null +++ b/apps/explorer/priv/zk_sync/migrations/20240611091814_rename_field_in_batch_transactions.exs @@ -0,0 +1,7 @@ +defmodule Explorer.Repo.ZkSync.Migrations.RenameFieldInBatchTransactions do + use Ecto.Migration + + def change do + rename(table(:zksync_batch_l2_transactions), :hash, to: :tx_hash) + end +end diff --git a/apps/indexer/lib/indexer/fetcher/zksync/discovery/batches_data.ex b/apps/indexer/lib/indexer/fetcher/zksync/discovery/batches_data.ex index 75b514ba74d0..3c68a0ff4caf 100644 --- a/apps/indexer/lib/indexer/fetcher/zksync/discovery/batches_data.ex +++ b/apps/indexer/lib/indexer/fetcher/zksync/discovery/batches_data.ex @@ -400,7 +400,7 @@ defmodule Indexer.Fetcher.ZkSync.Discovery.BatchesData do [ %{ batch_number: block.batch_number, - hash: l2_tx_hash + tx_hash: l2_tx_hash } | l2_txs ] diff --git a/apps/indexer/lib/indexer/fetcher/zksync/status_tracking/proven.ex b/apps/indexer/lib/indexer/fetcher/zksync/status_tracking/proven.ex index 52165ef8f0eb..ad2bb986c8d2 100644 --- a/apps/indexer/lib/indexer/fetcher/zksync/status_tracking/proven.ex +++ b/apps/indexer/lib/indexer/fetcher/zksync/status_tracking/proven.ex @@ -12,7 +12,7 @@ defmodule Indexer.Fetcher.ZkSync.StatusTracking.Proven do associate_and_import_or_prepare_for_recovery: 4 ] - import Indexer.Fetcher.ZkSync.Utils.Logging, only: [log_info: 1] + import Indexer.Fetcher.ZkSync.Utils.Logging, only: [log_error: 1, log_info: 1] @doc """ Checks if the oldest unproven batch in the database has the associated L1 proving transaction @@ -68,8 +68,6 @@ defmodule Indexer.Fetcher.ZkSync.StatusTracking.Proven do end defp get_proven_batches_from_calldata(calldata) do - "0x7f61885c" <> encoded_params = calldata - # /// @param batchNumber Rollup batch number # /// @param batchHash Hash of L2 batch # /// @param indexRepeatedStorageChanges The serial number of the shortcut index that's used as a unique identifier for storage keys that were used twice or more @@ -93,41 +91,90 @@ defmodule Indexer.Fetcher.ZkSync.StatusTracking.Proven do # uint256[] recursiveAggregationInput; # uint256[] serializedProof; # } - # proveBatches(StoredBatchInfo calldata _prevBatch, StoredBatchInfo[] calldata _committedBatches, ProofInput calldata _proof) - - # IO.inspect(FunctionSelector.decode("proveBatches((uint64,bytes32,uint64,uint256,bytes32,bytes32,uint256,bytes32),(uint64,bytes32,uint64,uint256,bytes32,bytes32,uint256,bytes32)[],(uint256[],uint256[]))")) - [_prev_batch, proven_batches, _proof] = - TypeDecoder.decode( - Base.decode16!(encoded_params, case: :lower), - %FunctionSelector{ - function: "proveBatches", - types: [ - tuple: [ - uint: 64, - bytes: 32, - uint: 64, - uint: 256, - bytes: 32, - bytes: 32, - uint: 256, - bytes: 32 - ], - array: - {:tuple, - [ - uint: 64, - bytes: 32, - uint: 64, - uint: 256, - bytes: 32, - bytes: 32, - uint: 256, - bytes: 32 - ]}, - tuple: [array: {:uint, 256}, array: {:uint, 256}] - ] - } - ) + proven_batches = + case calldata do + "0x7f61885c" <> encoded_params -> + # proveBatches(StoredBatchInfo calldata _prevBatch, StoredBatchInfo[] calldata _committedBatches, ProofInput calldata _proof) + # IO.inspect(FunctionSelector.decode("proveBatches((uint64,bytes32,uint64,uint256,bytes32,bytes32,uint256,bytes32),(uint64,bytes32,uint64,uint256,bytes32,bytes32,uint256,bytes32)[],(uint256[],uint256[]))")) + [_prev_batch, proven_batches, _proof] = + TypeDecoder.decode( + Base.decode16!(encoded_params, case: :lower), + %FunctionSelector{ + function: "proveBatches", + types: [ + tuple: [ + uint: 64, + bytes: 32, + uint: 64, + uint: 256, + bytes: 32, + bytes: 32, + uint: 256, + bytes: 32 + ], + array: + {:tuple, + [ + uint: 64, + bytes: 32, + uint: 64, + uint: 256, + bytes: 32, + bytes: 32, + uint: 256, + bytes: 32 + ]}, + tuple: [array: {:uint, 256}, array: {:uint, 256}] + ] + } + ) + + proven_batches + + "0xc37533bb" <> encoded_params -> + # proveBatchesSharedBridge(uint256 _chainId, StoredBatchInfo calldata _prevBatch, StoredBatchInfo[] calldata _committedBatches, ProofInput calldata _proof) + # IO.inspect(FunctionSelector.decode("proveBatchesSharedBridge(uint256,(uint64,bytes32,uint64,uint256,bytes32,bytes32,uint256,bytes32),(uint64,bytes32,uint64,uint256,bytes32,bytes32,uint256,bytes32)[],(uint256[],uint256[]))")) + [_chainid, _prev_batch, proven_batches, _proof] = + TypeDecoder.decode( + Base.decode16!(encoded_params, case: :lower), + %FunctionSelector{ + function: "proveBatchesSharedBridge", + types: [ + {:uint, 256}, + tuple: [ + uint: 64, + bytes: 32, + uint: 64, + uint: 256, + bytes: 32, + bytes: 32, + uint: 256, + bytes: 32 + ], + array: + {:tuple, + [ + uint: 64, + bytes: 32, + uint: 64, + uint: 256, + bytes: 32, + bytes: 32, + uint: 256, + bytes: 32 + ]}, + tuple: [array: {:uint, 256}, array: {:uint, 256}] + ] + } + ) + + proven_batches + + _ -> + log_error("Unknown calldata format: #{calldata}") + + [] + end log_info("Discovered #{length(proven_batches)} proven batches in the prove tx") From 4297704b8e96a2b7ec9ea93912fcb634ee834abb Mon Sep 17 00:00:00 2001 From: Victor Baranov Date: Thu, 13 Jun 2024 14:20:40 +0300 Subject: [PATCH 088/150] feat: API endpoint to re-fetch token instance metadata (#10097) * feat: Re-fetch token instance metadata * Partially process review comments * Process reviewer comments. Part 2 * Process reviewer comments. Part 3 * Process reviewer comments. Part 4 * Fix events * Add test * Remove :token preload * fix formatting * Fix tests * Remove unused aliases * Add reCAPTCHA for token instance re-fetch API endpoint * Check event on websocket at /api/v2/tokens/{address_hash}/instances/{token_id}/refetch-metadata endpoint --- apps/block_scout_web/.sobelow-conf | 5 +- apps/block_scout_web/config/config.exs | 4 +- apps/block_scout_web/lib/block_scout_web.ex | 6 +- .../channels/token_instance_channel.ex | 26 +++ .../block_scout_web/channels/user_socket.ex | 1 + .../channels/user_socket_v2.ex | 1 + .../controllers/admin/setup_controller.ex | 2 +- .../controllers/api/rpc/rpc_translator.ex | 2 +- .../api/v2/csv_export_controller.ex | 4 +- .../controllers/api/v2/fallback_controller.ex | 41 ++-- .../controllers/api/v2/token_controller.ex | 94 ++++++--- .../tokens/instance/holder_controller.ex | 8 +- .../tokens/instance/metadata_controller.ex | 4 +- .../tokens/instance/transfer_controller.ex | 8 +- .../lib/block_scout_web/notifier.ex | 11 ++ .../plug/admin/check_owner_registered.ex | 2 +- .../plug/admin/require_admin_role.ex | 2 +- .../block_scout_web/realtime_event_handler.ex | 1 + .../lib/block_scout_web/router.ex | 7 +- .../{ => routers}/admin_router.ex | 2 +- .../{ => routers}/api_key_v2_router.ex | 2 +- .../{ => routers}/api_router.ex | 25 +-- .../smart_contracts_api_v2_router.ex | 46 +++-- .../routers/tokens_api_v2_router.ex | 71 +++++++ .../{ => routers}/utils_api_v2_router.ex | 2 +- .../{ => routers}/web_router.ex | 2 +- .../templates/admin/dashboard/index.html.eex | 2 +- .../templates/layout/_topnav.html.eex | 4 +- .../tokens/overview/_details.html.eex | 4 +- .../templates/transaction/overview.html.eex | 4 +- .../block_scout_web/views/access_helper.ex | 2 +- .../views/admin/session_view.ex | 2 +- .../block_scout_web/views/admin/setup_view.ex | 2 +- .../views/verified_contracts_view.ex | 2 +- .../address_contract_controller_test.exs | 2 +- ...s_internal_transaction_controller_test.exs | 2 +- .../address_token_controller_test.exs | 2 +- ...address_token_transfer_controller_test.exs | 2 +- .../address_transaction_controller_test.exs | 2 +- .../address_withdrawal_controller_test.exs | 2 +- .../api/v2/token_controller_test.exs | 130 ++++++++++++- .../block_transaction_controller_test.exs | 2 +- .../block_withdrawal_controller_test.exs | 2 +- .../controllers/chain_controller_test.exs | 3 +- .../pending_transaction_controller_test.exs | 2 +- .../recent_transactions_controller_test.exs | 2 +- .../transaction_controller_test.exs | 2 +- ...n_internal_transaction_controller_test.exs | 2 +- .../transaction_log_controller_test.exs | 2 +- .../transaction_state_controller_test.exs | 2 +- ...saction_token_transfer_controller_test.exs | 2 +- .../verified_contracts_controller_test.exs | 2 +- .../withdrawal_controller_test.exs | 2 +- .../features/pages/transaction_logs_page.ex | 2 +- .../block_scout_web/test/support/conn_case.ex | 6 +- apps/block_scout_web/test/test_helper.exs | 1 + .../lib/ethereum_jsonrpc/contract.ex | 2 +- .../lib/explorer/account/notifier/email.ex | 2 +- .../lib/explorer/chain/events/publisher.ex | 2 +- .../lib/explorer/chain/events/subscriber.ex | 2 +- .../lib/explorer/chain/smart_contract.ex | 10 +- .../lib/explorer/chain/token/instance.ex | 21 +- .../exchange_rates/source/coin_gecko.ex | 4 +- .../address_contract_code_fetch_attempt.ex | 15 +- ...token_instance_metadata_refetch_attempt.ex | 87 +++++++++ apps/explorer/lib/test_helper.ex | 41 ++++ apps/explorer/mix.exs | 2 +- ...stance_metadata_refetch_attempts_table.exs | 15 ++ .../smart_contract/vyper/publisher_test.exs | 4 +- apps/indexer/lib/indexer/application.ex | 2 + .../fetcher/on_demand/contract_code.ex | 2 +- .../token_instance_metadata_refetch.ex | 123 ++++++++++++ .../indexer/fetcher/token_instance/helper.ex | 32 +++- .../token_instance_metadata_refetch_test.exs | 181 ++++++++++++++++++ config/runtime.exs | 3 + docker-compose/envs/common-blockscout.env | 1 + 76 files changed, 950 insertions(+), 178 deletions(-) create mode 100644 apps/block_scout_web/lib/block_scout_web/channels/token_instance_channel.ex rename apps/block_scout_web/lib/block_scout_web/{ => routers}/admin_router.ex (96%) rename apps/block_scout_web/lib/block_scout_web/{ => routers}/api_key_v2_router.ex (92%) rename apps/block_scout_web/lib/block_scout_web/{ => routers}/api_router.ex (93%) rename apps/block_scout_web/lib/block_scout_web/{ => routers}/smart_contracts_api_v2_router.ex (59%) create mode 100644 apps/block_scout_web/lib/block_scout_web/routers/tokens_api_v2_router.ex rename apps/block_scout_web/lib/block_scout_web/{ => routers}/utils_api_v2_router.ex (94%) rename apps/block_scout_web/lib/block_scout_web/{ => routers}/web_router.ex (99%) create mode 100644 apps/explorer/lib/explorer/utility/token_instance_metadata_refetch_attempt.ex create mode 100644 apps/explorer/priv/repo/migrations/20240520075414_create_token_instance_metadata_refetch_attempts_table.exs create mode 100644 apps/indexer/lib/indexer/fetcher/on_demand/token_instance_metadata_refetch.ex create mode 100644 apps/indexer/test/indexer/fetcher/on_demand/token_instance_metadata_refetch_test.exs diff --git a/apps/block_scout_web/.sobelow-conf b/apps/block_scout_web/.sobelow-conf index 1604d1f66daf..70a8e7b0104f 100644 --- a/apps/block_scout_web/.sobelow-conf +++ b/apps/block_scout_web/.sobelow-conf @@ -7,7 +7,8 @@ format: "compact", ignore: ["Config.Headers", "Config.CSWH", "XSS.SendResp", "XSS.Raw"], ignore_files: [ - "apps/block_scout_web/lib/block_scout_web/smart_contracts_api_v2_router.ex", - "apps/block_scout_web/lib/block_scout_web/utils_api_v2_router.ex" + "apps/block_scout_web/lib/block_scout_web/routers/smart_contracts_api_v2_router.ex", + "apps/block_scout_web/lib/block_scout_web/routers/tokens_api_v2_router.ex", + "apps/block_scout_web/lib/block_scout_web/routers/utils_api_v2_router.ex" ] ] diff --git a/apps/block_scout_web/config/config.exs b/apps/block_scout_web/config/config.exs index 40ff4eb8f488..a1dd4491c37c 100644 --- a/apps/block_scout_web/config/config.exs +++ b/apps/block_scout_web/config/config.exs @@ -87,11 +87,11 @@ config :prometheus, BlockScoutWeb.Prometheus.PhoenixInstrumenter, config :spandex_phoenix, tracer: BlockScoutWeb.Tracer -config :block_scout_web, BlockScoutWeb.ApiRouter, +config :block_scout_web, BlockScoutWeb.Routers.ApiRouter, writing_enabled: !ConfigHelper.parse_bool_env_var("API_V1_WRITE_METHODS_DISABLED"), reading_enabled: !ConfigHelper.parse_bool_env_var("API_V1_READ_METHODS_DISABLED") -config :block_scout_web, BlockScoutWeb.WebRouter, enabled: !ConfigHelper.parse_bool_env_var("DISABLE_WEBAPP") +config :block_scout_web, BlockScoutWeb.Routers.WebRouter, enabled: !ConfigHelper.parse_bool_env_var("DISABLE_WEBAPP") config :block_scout_web, BlockScoutWeb.CSPHeader, mixpanel_url: System.get_env("MIXPANEL_URL", "https://api-js.mixpanel.com"), diff --git a/apps/block_scout_web/lib/block_scout_web.ex b/apps/block_scout_web/lib/block_scout_web.ex index 4df825cff2e0..7cdc6b3633c4 100644 --- a/apps/block_scout_web/lib/block_scout_web.ex +++ b/apps/block_scout_web/lib/block_scout_web.ex @@ -24,13 +24,13 @@ defmodule BlockScoutWeb do import BlockScoutWeb.Controller import BlockScoutWeb.Router.Helpers - import BlockScoutWeb.WebRouter.Helpers, except: [static_path: 2] + import BlockScoutWeb.Routers.WebRouter.Helpers, except: [static_path: 2] import BlockScoutWeb.Gettext import BlockScoutWeb.ErrorHelper import BlockScoutWeb.Routers.AccountRouter.Helpers, except: [static_path: 2] import Plug.Conn - alias BlockScoutWeb.AdminRouter.Helpers, as: AdminRoutes + alias BlockScoutWeb.Routers.AdminRouter.Helpers, as: AdminRoutes end end @@ -61,7 +61,7 @@ defmodule BlockScoutWeb do import Explorer.Chain.CurrencyHelper, only: [divide_decimals: 2] - import BlockScoutWeb.WebRouter.Helpers, except: [static_path: 2] + import BlockScoutWeb.Routers.WebRouter.Helpers, except: [static_path: 2] end end diff --git a/apps/block_scout_web/lib/block_scout_web/channels/token_instance_channel.ex b/apps/block_scout_web/lib/block_scout_web/channels/token_instance_channel.ex new file mode 100644 index 000000000000..5a556eb87434 --- /dev/null +++ b/apps/block_scout_web/lib/block_scout_web/channels/token_instance_channel.ex @@ -0,0 +1,26 @@ +defmodule BlockScoutWeb.TokenInstanceChannel do + @moduledoc """ + Establishes pub/sub channel for live updates of token instances events. + """ + use BlockScoutWeb, :channel + + intercept(["fetched_token_instance_metadata"]) + + def join("fetched_token_instance_metadata", _params, socket) do + {:ok, %{}, socket} + end + + def join("token_instances:" <> _token_contract_address_hash, _params, socket) do + {:ok, %{}, socket} + end + + def handle_out( + "fetched_token_instance_metadata", + res, + %Phoenix.Socket{handler: BlockScoutWeb.UserSocketV2} = socket + ) do + push(socket, "fetched_token_instance_metadata", res) + + {:noreply, socket} + end +end diff --git a/apps/block_scout_web/lib/block_scout_web/channels/user_socket.ex b/apps/block_scout_web/lib/block_scout_web/channels/user_socket.ex index 7f1b4993a184..5d51597e359e 100644 --- a/apps/block_scout_web/lib/block_scout_web/channels/user_socket.ex +++ b/apps/block_scout_web/lib/block_scout_web/channels/user_socket.ex @@ -9,6 +9,7 @@ defmodule BlockScoutWeb.UserSocket do channel("rewards:*", BlockScoutWeb.RewardChannel) channel("transactions:*", BlockScoutWeb.TransactionChannel) channel("tokens:*", BlockScoutWeb.TokenChannel) + channel("token_instances:*", BlockScoutWeb.TokenInstanceChannel) def connect(%{"locale" => locale}, socket) do {:ok, assign(socket, :locale, locale)} diff --git a/apps/block_scout_web/lib/block_scout_web/channels/user_socket_v2.ex b/apps/block_scout_web/lib/block_scout_web/channels/user_socket_v2.ex index 740b716dc322..8ac5295d60af 100644 --- a/apps/block_scout_web/lib/block_scout_web/channels/user_socket_v2.ex +++ b/apps/block_scout_web/lib/block_scout_web/channels/user_socket_v2.ex @@ -11,6 +11,7 @@ defmodule BlockScoutWeb.UserSocketV2 do channel("rewards:*", BlockScoutWeb.RewardChannel) channel("transactions:*", BlockScoutWeb.TransactionChannel) channel("tokens:*", BlockScoutWeb.TokenChannel) + channel("token_instances:*", BlockScoutWeb.TokenInstanceChannel) channel("zkevm_batches:*", BlockScoutWeb.PolygonZkevmConfirmedBatchChannel) def connect(_params, socket) do diff --git a/apps/block_scout_web/lib/block_scout_web/controllers/admin/setup_controller.ex b/apps/block_scout_web/lib/block_scout_web/controllers/admin/setup_controller.ex index 9005fe358723..1d0f82f4d696 100644 --- a/apps/block_scout_web/lib/block_scout_web/controllers/admin/setup_controller.ex +++ b/apps/block_scout_web/lib/block_scout_web/controllers/admin/setup_controller.ex @@ -1,7 +1,7 @@ defmodule BlockScoutWeb.Admin.SetupController do use BlockScoutWeb, :controller - import BlockScoutWeb.AdminRouter.Helpers + import BlockScoutWeb.Routers.AdminRouter.Helpers alias BlockScoutWeb.Endpoint alias Explorer.Accounts.User.Registration diff --git a/apps/block_scout_web/lib/block_scout_web/controllers/api/rpc/rpc_translator.ex b/apps/block_scout_web/lib/block_scout_web/controllers/api/rpc/rpc_translator.ex index 0263abd96da9..17fd203f6cb9 100644 --- a/apps/block_scout_web/lib/block_scout_web/controllers/api/rpc/rpc_translator.ex +++ b/apps/block_scout_web/lib/block_scout_web/controllers/api/rpc/rpc_translator.ex @@ -110,7 +110,7 @@ defmodule BlockScoutWeb.API.RPC.RPCTranslator do end defp action_accessed?(action, write_actions) do - conf = Application.get_env(:block_scout_web, BlockScoutWeb.ApiRouter) + conf = Application.get_env(:block_scout_web, BlockScoutWeb.Routers.ApiRouter) if action in write_actions do conf[:writing_enabled] || {:error, :no_action} diff --git a/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/csv_export_controller.ex b/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/csv_export_controller.ex index 1c2c3844033e..d8ef80f8f834 100644 --- a/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/csv_export_controller.ex +++ b/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/csv_export_controller.ex @@ -20,11 +20,11 @@ defmodule BlockScoutWeb.API.V2.CSVExportController do def export_token_holders(conn, %{"address_hash_param" => address_hash_string} = params) do with {:format, {:ok, address_hash}} <- {:format, Chain.string_to_address_hash(address_hash_string)}, {:ok, false} <- AccessHelper.restricted_access?(address_hash_string, params), - {:not_found, {:ok, token}} <- {:not_found, Chain.token_from_address_hash(address_hash, @api_true)}, {:recaptcha, true} <- {:recaptcha, Application.get_env(:block_scout_web, :recaptcha)[:is_disabled] || - CSVHelper.captcha_helper().recaptcha_passed?(params["recaptcha_response"])} do + CSVHelper.captcha_helper().recaptcha_passed?(params["recaptcha_response"])}, + {:not_found, {:ok, token}} <- {:not_found, Chain.token_from_address_hash(address_hash, @api_true)} do token_holders = Chain.fetch_token_holders_from_token_hash_for_csv(address_hash, @options) token_holders diff --git a/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/fallback_controller.ex b/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/fallback_controller.ex index eb6a7447e10a..19448ad5fa79 100644 --- a/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/fallback_controller.ex +++ b/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/fallback_controller.ex @@ -7,7 +7,6 @@ defmodule BlockScoutWeb.API.V2.FallbackController do alias BlockScoutWeb.API.V2.ApiView alias Ecto.Changeset - @verification_failed "API v2 smart-contract verification failed" @invalid_parameters "Invalid parameter(s)" @invalid_address_hash "Invalid address hash" @invalid_hash "Invalid hash" @@ -36,7 +35,7 @@ defmodule BlockScoutWeb.API.V2.FallbackController do def call(conn, {:format, _params}) do Logger.error(fn -> - ["#{@verification_failed}: #{@invalid_parameters}"] + ["#{@invalid_parameters}"] end) conn @@ -47,7 +46,7 @@ defmodule BlockScoutWeb.API.V2.FallbackController do def call(conn, {:format_address, _}) do Logger.error(fn -> - ["#{@verification_failed}: #{@invalid_address_hash}"] + ["#{@invalid_address_hash}"] end) conn @@ -58,7 +57,7 @@ defmodule BlockScoutWeb.API.V2.FallbackController do def call(conn, {:format_url, _}) do Logger.error(fn -> - ["#{@verification_failed}: #{@invalid_url}"] + ["#{@invalid_url}"] end) conn @@ -69,7 +68,7 @@ defmodule BlockScoutWeb.API.V2.FallbackController do def call(conn, {:not_found, _, :empty_items_with_next_page_params}) do Logger.error(fn -> - ["#{@verification_failed}: :empty_items_with_next_page_params"] + [":empty_items_with_next_page_params"] end) conn @@ -78,7 +77,7 @@ defmodule BlockScoutWeb.API.V2.FallbackController do def call(conn, {:not_found, _}) do Logger.error(fn -> - ["#{@verification_failed}: #{@not_found}"] + ["#{@not_found}"] end) conn @@ -89,7 +88,7 @@ defmodule BlockScoutWeb.API.V2.FallbackController do def call(conn, {:contract_interaction_disabled, _}) do Logger.error(fn -> - ["#{@verification_failed}: #{@contract_interaction_disabled}"] + ["#{@contract_interaction_disabled}"] end) conn @@ -100,7 +99,7 @@ defmodule BlockScoutWeb.API.V2.FallbackController do def call(conn, {:error, {:invalid, :hash}}) do Logger.error(fn -> - ["#{@verification_failed}: #{@invalid_hash}"] + ["#{@invalid_hash}"] end) conn @@ -111,7 +110,7 @@ defmodule BlockScoutWeb.API.V2.FallbackController do def call(conn, {:error, {:invalid, :number}}) do Logger.error(fn -> - ["#{@verification_failed}: #{@invalid_number}"] + ["#{@invalid_number}"] end) conn @@ -122,7 +121,7 @@ defmodule BlockScoutWeb.API.V2.FallbackController do def call(conn, {:error, :not_found}) do Logger.error(fn -> - ["#{@verification_failed}: :not_found"] + [":not_found"] end) conn @@ -138,7 +137,7 @@ defmodule BlockScoutWeb.API.V2.FallbackController do def call(conn, {:restricted_access, true}) do Logger.error(fn -> - ["#{@verification_failed}: #{@restricted_access}"] + ["#{@restricted_access}"] end) conn @@ -149,7 +148,7 @@ defmodule BlockScoutWeb.API.V2.FallbackController do def call(conn, {:already_verified, _}) do Logger.error(fn -> - ["#{@verification_failed}: #{@already_verified}"] + ["#{@already_verified}"] end) conn @@ -159,7 +158,7 @@ defmodule BlockScoutWeb.API.V2.FallbackController do def call(conn, {:no_json_file, _}) do Logger.error(fn -> - ["#{@verification_failed}: #{@json_not_found}"] + ["#{@json_not_found}"] end) conn @@ -169,7 +168,7 @@ defmodule BlockScoutWeb.API.V2.FallbackController do def call(conn, {:file_error, _}) do Logger.error(fn -> - ["#{@verification_failed}: #{@error_while_reading_json}"] + ["#{@error_while_reading_json}"] end) conn @@ -179,7 +178,7 @@ defmodule BlockScoutWeb.API.V2.FallbackController do def call(conn, {:libs_format, _}) do Logger.error(fn -> - ["#{@verification_failed}: #{@error_in_libraries}"] + ["#{@error_in_libraries}"] end) conn @@ -189,7 +188,7 @@ defmodule BlockScoutWeb.API.V2.FallbackController do def call(conn, {:lost_consensus, {:ok, block}}) do Logger.error(fn -> - ["#{@verification_failed}: #{@block_lost_consensus}"] + ["#{@block_lost_consensus}"] end) conn @@ -199,7 +198,7 @@ defmodule BlockScoutWeb.API.V2.FallbackController do def call(conn, {:lost_consensus, {:error, :not_found}}) do Logger.error(fn -> - ["#{@verification_failed}: #{@block_lost_consensus}"] + ["#{@block_lost_consensus}"] end) conn @@ -208,7 +207,7 @@ defmodule BlockScoutWeb.API.V2.FallbackController do def call(conn, {:recaptcha, _}) do Logger.error(fn -> - ["#{@verification_failed}: #{@invalid_captcha_resp}"] + ["#{@invalid_captcha_resp}"] end) conn @@ -219,7 +218,7 @@ defmodule BlockScoutWeb.API.V2.FallbackController do def call(conn, {:auth, _}) do Logger.error(fn -> - ["#{@verification_failed}: #{@unauthorized}"] + ["#{@unauthorized}"] end) conn @@ -230,7 +229,7 @@ defmodule BlockScoutWeb.API.V2.FallbackController do def call(conn, {:sensitive_endpoints_api_key, _}) do Logger.error(fn -> - ["#{@verification_failed}: #{@not_configured_api_key}"] + ["#{@not_configured_api_key}"] end) conn @@ -241,7 +240,7 @@ defmodule BlockScoutWeb.API.V2.FallbackController do def call(conn, {:api_key, _}) do Logger.error(fn -> - ["#{@verification_failed}: #{@wrong_api_key}"] + ["#{@wrong_api_key}"] end) conn diff --git a/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/token_controller.ex b/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/token_controller.ex index fbe4bbf998cb..898f2d862499 100644 --- a/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/token_controller.ex +++ b/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/token_controller.ex @@ -6,6 +6,8 @@ defmodule BlockScoutWeb.API.V2.TokenController do alias BlockScoutWeb.API.V2.{AddressView, TransactionView} alias Explorer.{Chain, Helper, Repo} alias Explorer.Chain.{Address, BridgedToken, Token, Token.Instance} + alias Explorer.Chain.CSVExport.Helper, as: CSVHelper + alias Indexer.Fetcher.OnDemand.TokenInstanceMetadataRefetch, as: TokenInstanceMetadataRefetchOnDemand alias Indexer.Fetcher.OnDemand.TokenTotalSupply, as: TokenTotalSupplyOnDemand import BlockScoutWeb.Chain, @@ -185,29 +187,13 @@ defmodule BlockScoutWeb.API.V2.TokenController do end end - def instance(conn, %{"address_hash_param" => address_hash_string, "token_id" => token_id_str} = params) do + def instance(conn, %{"address_hash_param" => address_hash_string, "token_id" => token_id_string} = params) do with {:format, {:ok, address_hash}} <- {:format, Chain.string_to_address_hash(address_hash_string)}, {:ok, false} <- AccessHelper.restricted_access?(address_hash_string, params), {:not_found, {:ok, token}} <- {:not_found, Chain.token_from_address_hash(address_hash, @api_true)}, {:not_found, false} <- {:not_found, Chain.erc_20_token?(token)}, - {:format, {token_id, ""}} <- {:format, Integer.parse(token_id_str)} do - token_instance = - case Chain.nft_instance_from_token_id_and_token_address(token_id, address_hash, @api_true) do - {:ok, token_instance} -> - token_instance - |> Chain.select_repo(@api_true).preload(:owner) - |> Chain.put_owner_to_token_instance(token, @api_true) - - {:error, :not_found} -> - %Instance{ - token_id: Decimal.new(token_id), - metadata: nil, - owner: nil, - token_contract_address_hash: address_hash - } - |> Instance.put_is_unique(token, @api_true) - |> Chain.put_owner_to_token_instance(token, @api_true) - end + {:format, {token_id, ""}} <- {:format, Integer.parse(token_id_string)} do + token_instance = token_instance_from_token_id_and_token_address(token_id, address_hash, token) conn |> put_status(200) @@ -218,12 +204,15 @@ defmodule BlockScoutWeb.API.V2.TokenController do end end - def transfers_by_instance(conn, %{"address_hash_param" => address_hash_string, "token_id" => token_id_str} = params) do + def transfers_by_instance( + conn, + %{"address_hash_param" => address_hash_string, "token_id" => token_id_string} = params + ) do with {:format, {:ok, address_hash}} <- {:format, Chain.string_to_address_hash(address_hash_string)}, {:ok, false} <- AccessHelper.restricted_access?(address_hash_string, params), {:not_found, {:ok, token}} <- {:not_found, Chain.token_from_address_hash(address_hash, @api_true)}, {:not_found, false} <- {:not_found, Chain.erc_20_token?(token)}, - {:format, {token_id, ""}} <- {:format, Integer.parse(token_id_str)} do + {:format, {token_id, ""}} <- {:format, Integer.parse(token_id_string)} do paging_options = paging_options(params) results = @@ -248,12 +237,12 @@ defmodule BlockScoutWeb.API.V2.TokenController do end end - def holders_by_instance(conn, %{"address_hash_param" => address_hash_string, "token_id" => token_id_str} = params) do + def holders_by_instance(conn, %{"address_hash_param" => address_hash_string, "token_id" => token_id_string} = params) do with {:format, {:ok, address_hash}} <- {:format, Chain.string_to_address_hash(address_hash_string)}, {:ok, false} <- AccessHelper.restricted_access?(address_hash_string, params), {:not_found, {:ok, token}} <- {:not_found, Chain.token_from_address_hash(address_hash, @api_true)}, {:not_found, false} <- {:not_found, Chain.erc_20_token?(token)}, - {:format, {token_id, ""}} <- {:format, Integer.parse(token_id_str)} do + {:format, {token_id, ""}} <- {:format, Integer.parse(token_id_string)} do paging_options = paging_options(params) results = @@ -281,13 +270,13 @@ defmodule BlockScoutWeb.API.V2.TokenController do def transfers_count_by_instance( conn, - %{"address_hash_param" => address_hash_string, "token_id" => token_id_str} = params + %{"address_hash_param" => address_hash_string, "token_id" => token_id_string} = params ) do with {:format, {:ok, address_hash}} <- {:format, Chain.string_to_address_hash(address_hash_string)}, {:ok, false} <- AccessHelper.restricted_access?(address_hash_string, params), {:not_found, {:ok, token}} <- {:not_found, Chain.token_from_address_hash(address_hash, @api_true)}, {:not_found, false} <- {:not_found, Chain.erc_20_token?(token)}, - {:format, {token_id, ""}} <- {:format, Integer.parse(token_id_str)} do + {:format, {token_id, ""}} <- {:format, Integer.parse(token_id_string)} do conn |> put_status(200) |> json(%{ @@ -340,6 +329,61 @@ defmodule BlockScoutWeb.API.V2.TokenController do |> render(:bridged_tokens, %{tokens: tokens, next_page_params: next_page_params}) end + def refetch_metadata( + conn, + params + ) do + address_hash_string = params["address_hash_param"] + token_id_string = params["token_id"] + recaptcha_response = params["recaptcha_response"] + + with {:format, {:ok, address_hash}} <- {:format, Chain.string_to_address_hash(address_hash_string)}, + {:ok, false} <- AccessHelper.restricted_access?(address_hash_string, params), + {:recaptcha, true} <- {:recaptcha, CSVHelper.captcha_helper().recaptcha_passed?(recaptcha_response)}, + {:not_found, {:ok, token}} <- {:not_found, Chain.token_from_address_hash(address_hash, @api_true)}, + {:not_found, false} <- {:not_found, Chain.erc_20_token?(token)}, + {:format, {token_id, ""}} <- {:format, Integer.parse(token_id_string)}, + {:ok, token_instance} <- Chain.nft_instance_from_token_id_and_token_address(token_id, address_hash, @api_true) do + token_instance_with_token = + token_instance + |> put_token_to_instance(token) + + TokenInstanceMetadataRefetchOnDemand.trigger_refetch(token_instance_with_token) + + conn + |> put_status(200) + |> json(%{message: "OK"}) + end + end + defp put_owner(token_instances, holder_address), do: Enum.map(token_instances, fn token_instance -> %Instance{token_instance | owner: holder_address} end) + + defp token_instance_from_token_id_and_token_address(token_id, address_hash, token) do + case Chain.nft_instance_from_token_id_and_token_address(token_id, address_hash, @api_true) do + {:ok, token_instance} -> + token_instance + |> Chain.select_repo(@api_true).preload([:owner]) + |> Chain.put_owner_to_token_instance(token, @api_true) + + {:error, :not_found} -> + %Instance{ + token_id: Decimal.new(token_id), + metadata: nil, + owner: nil, + token: nil, + token_contract_address_hash: address_hash + } + |> Instance.put_is_unique(token, @api_true) + |> Chain.put_owner_to_token_instance(token, @api_true) + end + end + + @spec put_token_to_instance(Instance.t(), Token.t()) :: Instance.t() + defp put_token_to_instance( + token_instance, + token + ) do + %{token_instance | token: token} + end end diff --git a/apps/block_scout_web/lib/block_scout_web/controllers/tokens/instance/holder_controller.ex b/apps/block_scout_web/lib/block_scout_web/controllers/tokens/instance/holder_controller.ex index 2b2eab99b7ef..4f227cb3f136 100644 --- a/apps/block_scout_web/lib/block_scout_web/controllers/tokens/instance/holder_controller.ex +++ b/apps/block_scout_web/lib/block_scout_web/controllers/tokens/instance/holder_controller.ex @@ -9,11 +9,11 @@ defmodule BlockScoutWeb.Tokens.Instance.HolderController do import BlockScoutWeb.Chain, only: [split_list_by_page: 1, paging_options: 1, next_page_params: 3] - def index(conn, %{"token_id" => token_address_hash, "instance_id" => token_id_str, "type" => "JSON"} = params) do + def index(conn, %{"token_id" => token_address_hash, "instance_id" => token_id_string, "type" => "JSON"} = params) do with {:ok, address_hash} <- Chain.string_to_address_hash(token_address_hash), {:ok, token} <- Chain.token_from_address_hash(address_hash), false <- Chain.erc_20_token?(token), - {token_id, ""} <- Integer.parse(token_id_str), + {token_id, ""} <- Integer.parse(token_id_string), token_holders <- Chain.fetch_token_holders_from_token_hash_and_token_id(address_hash, token_id, paging_options(params)) do {token_holders_paginated, next_page} = split_list_by_page(token_holders) @@ -53,13 +53,13 @@ defmodule BlockScoutWeb.Tokens.Instance.HolderController do end end - def index(conn, %{"token_id" => token_address_hash, "instance_id" => token_id_str}) do + def index(conn, %{"token_id" => token_address_hash, "instance_id" => token_id_string}) do options = [necessity_by_association: %{[contract_address: :smart_contract] => :optional}] with {:ok, hash} <- Chain.string_to_address_hash(token_address_hash), {:ok, token} <- Chain.token_from_address_hash(hash, options), false <- Chain.erc_20_token?(token), - {token_id, ""} <- Integer.parse(token_id_str) do + {token_id, ""} <- Integer.parse(token_id_string) do case Chain.nft_instance_from_token_id_and_token_address(token_id, hash) do {:ok, token_instance} -> Helper.render(conn, token_instance, hash, token_id, token) {:error, :not_found} -> Helper.render(conn, nil, hash, token_id, token) diff --git a/apps/block_scout_web/lib/block_scout_web/controllers/tokens/instance/metadata_controller.ex b/apps/block_scout_web/lib/block_scout_web/controllers/tokens/instance/metadata_controller.ex index 0036a95563ca..84dd18da5370 100644 --- a/apps/block_scout_web/lib/block_scout_web/controllers/tokens/instance/metadata_controller.ex +++ b/apps/block_scout_web/lib/block_scout_web/controllers/tokens/instance/metadata_controller.ex @@ -4,13 +4,13 @@ defmodule BlockScoutWeb.Tokens.Instance.MetadataController do alias BlockScoutWeb.Tokens.Instance.Helper alias Explorer.Chain - def index(conn, %{"token_id" => token_address_hash, "instance_id" => token_id_str}) do + def index(conn, %{"token_id" => token_address_hash, "instance_id" => token_id_string}) do options = [necessity_by_association: %{[contract_address: :smart_contract] => :optional}] with {:ok, hash} <- Chain.string_to_address_hash(token_address_hash), {:ok, token} <- Chain.token_from_address_hash(hash, options), false <- Chain.erc_20_token?(token), - {token_id, ""} <- Integer.parse(token_id_str), + {token_id, ""} <- Integer.parse(token_id_string), {:ok, token_instance} <- Chain.nft_instance_from_token_id_and_token_address(token_id, hash) do if token_instance.metadata do diff --git a/apps/block_scout_web/lib/block_scout_web/controllers/tokens/instance/transfer_controller.ex b/apps/block_scout_web/lib/block_scout_web/controllers/tokens/instance/transfer_controller.ex index 30a8212a75f9..62c9c4638617 100644 --- a/apps/block_scout_web/lib/block_scout_web/controllers/tokens/instance/transfer_controller.ex +++ b/apps/block_scout_web/lib/block_scout_web/controllers/tokens/instance/transfer_controller.ex @@ -13,11 +13,11 @@ defmodule BlockScoutWeb.Tokens.Instance.TransferController do {:ok, burn_address_hash} = Chain.string_to_address_hash(burn_address_hash_string()) @burn_address_hash burn_address_hash - def index(conn, %{"token_id" => token_address_hash, "instance_id" => token_id_str, "type" => "JSON"} = params) do + def index(conn, %{"token_id" => token_address_hash, "instance_id" => token_id_string, "type" => "JSON"} = params) do with {:ok, hash} <- Chain.string_to_address_hash(token_address_hash), {:ok, token} <- Chain.token_from_address_hash(hash), false <- Chain.erc_20_token?(token), - {token_id, ""} <- Integer.parse(token_id_str), + {token_id, ""} <- Integer.parse(token_id_string), token_transfers <- Chain.fetch_token_transfers_from_token_hash_and_token_id(hash, token_id, paging_options(params)) do {token_transfers_paginated, next_page} = split_list_by_page(token_transfers) @@ -56,13 +56,13 @@ defmodule BlockScoutWeb.Tokens.Instance.TransferController do end end - def index(conn, %{"token_id" => token_address_hash, "instance_id" => token_id_str}) do + def index(conn, %{"token_id" => token_address_hash, "instance_id" => token_id_string}) do options = [necessity_by_association: %{[contract_address: :smart_contract] => :optional}] with {:ok, hash} <- Chain.string_to_address_hash(token_address_hash), {:ok, token} <- Chain.token_from_address_hash(hash, options), false <- Chain.erc_20_token?(token), - {token_id, ""} <- Integer.parse(token_id_str) do + {token_id, ""} <- Integer.parse(token_id_string) do case Chain.nft_instance_from_token_id_and_token_address(token_id, hash) do {:ok, token_instance} -> Helper.render(conn, token_instance, hash, token_id, token) {:error, :not_found} -> Helper.render(conn, nil, hash, token_id, token) diff --git a/apps/block_scout_web/lib/block_scout_web/notifier.ex b/apps/block_scout_web/lib/block_scout_web/notifier.ex index 9d5ecfda3a77..951579c042c9 100644 --- a/apps/block_scout_web/lib/block_scout_web/notifier.ex +++ b/apps/block_scout_web/lib/block_scout_web/notifier.ex @@ -243,6 +243,17 @@ defmodule BlockScoutWeb.Notifier do Endpoint.broadcast("addresses:#{to_string(address_hash)}", "fetched_bytecode", %{fetched_bytecode: fetched_bytecode}) end + def handle_event( + {:chain_event, :fetched_token_instance_metadata, :on_demand, + [token_contract_address_hash_string, token_id, fetched_token_instance_metadata]} + ) do + Endpoint.broadcast( + "token_instances:#{token_contract_address_hash_string}", + "fetched_token_instance_metadata", + %{token_id: token_id, fetched_metadata: fetched_token_instance_metadata} + ) + end + def handle_event({:chain_event, :changed_bytecode, :on_demand, [address_hash]}) do Endpoint.broadcast("addresses:#{to_string(address_hash)}", "changed_bytecode", %{}) end diff --git a/apps/block_scout_web/lib/block_scout_web/plug/admin/check_owner_registered.ex b/apps/block_scout_web/lib/block_scout_web/plug/admin/check_owner_registered.ex index b15fd168f5f2..b1ff8053269b 100644 --- a/apps/block_scout_web/lib/block_scout_web/plug/admin/check_owner_registered.ex +++ b/apps/block_scout_web/lib/block_scout_web/plug/admin/check_owner_registered.ex @@ -9,7 +9,7 @@ defmodule BlockScoutWeb.Plug.Admin.CheckOwnerRegistered do import Phoenix.Controller, only: [redirect: 2] import Plug.Conn - alias BlockScoutWeb.AdminRouter.Helpers, as: AdminRoutes + alias BlockScoutWeb.Routers.AdminRouter.Helpers, as: AdminRoutes alias Explorer.Admin alias Plug.Conn diff --git a/apps/block_scout_web/lib/block_scout_web/plug/admin/require_admin_role.ex b/apps/block_scout_web/lib/block_scout_web/plug/admin/require_admin_role.ex index bd11cd550963..2a70d8a0e0bf 100644 --- a/apps/block_scout_web/lib/block_scout_web/plug/admin/require_admin_role.ex +++ b/apps/block_scout_web/lib/block_scout_web/plug/admin/require_admin_role.ex @@ -7,7 +7,7 @@ defmodule BlockScoutWeb.Plug.Admin.RequireAdminRole do import Phoenix.Controller, only: [redirect: 2] - alias BlockScoutWeb.AdminRouter.Helpers, as: AdminRoutes + alias BlockScoutWeb.Routers.AdminRouter.Helpers, as: AdminRoutes alias Explorer.Admin def init(opts), do: opts diff --git a/apps/block_scout_web/lib/block_scout_web/realtime_event_handler.ex b/apps/block_scout_web/lib/block_scout_web/realtime_event_handler.ex index dc956b249f74..b19ead1cc046 100644 --- a/apps/block_scout_web/lib/block_scout_web/realtime_event_handler.ex +++ b/apps/block_scout_web/lib/block_scout_web/realtime_event_handler.ex @@ -28,6 +28,7 @@ defmodule BlockScoutWeb.RealtimeEventHandler do Subscriber.to(:token_total_supply, :on_demand) Subscriber.to(:changed_bytecode, :on_demand) Subscriber.to(:fetched_bytecode, :on_demand) + Subscriber.to(:fetched_token_instance_metadata, :on_demand) Subscriber.to(:eth_bytecode_db_lookup_started, :on_demand) Subscriber.to(:zkevm_confirmed_batches, :realtime) # Does not come from the indexer diff --git a/apps/block_scout_web/lib/block_scout_web/router.ex b/apps/block_scout_web/lib/block_scout_web/router.ex index b64a0d1c7d01..07300fd79bbd 100644 --- a/apps/block_scout_web/lib/block_scout_web/router.ex +++ b/apps/block_scout_web/lib/block_scout_web/router.ex @@ -2,13 +2,12 @@ defmodule BlockScoutWeb.Router do use BlockScoutWeb, :router alias BlockScoutWeb.Plug.{GraphQL, RateLimit} - alias BlockScoutWeb.{ApiRouter, WebRouter} - alias BlockScoutWeb.Routers.AccountRouter + alias BlockScoutWeb.Routers.{AccountRouter, ApiRouter, WebRouter} @max_query_string_length 5_000 if Application.compile_env(:block_scout_web, :admin_panel_enabled) do - forward("/admin", BlockScoutWeb.AdminRouter) + forward("/admin", BlockScoutWeb.Routers.AdminRouter) end pipeline :browser do @@ -96,6 +95,6 @@ defmodule BlockScoutWeb.Router do end if Application.compile_env(:block_scout_web, WebRouter)[:enabled] do - forward("/", BlockScoutWeb.WebRouter) + forward("/", BlockScoutWeb.Routers.WebRouter) end end diff --git a/apps/block_scout_web/lib/block_scout_web/admin_router.ex b/apps/block_scout_web/lib/block_scout_web/routers/admin_router.ex similarity index 96% rename from apps/block_scout_web/lib/block_scout_web/admin_router.ex rename to apps/block_scout_web/lib/block_scout_web/routers/admin_router.ex index 213d4abff444..d452980ae9ff 100644 --- a/apps/block_scout_web/lib/block_scout_web/admin_router.ex +++ b/apps/block_scout_web/lib/block_scout_web/routers/admin_router.ex @@ -1,4 +1,4 @@ -defmodule BlockScoutWeb.AdminRouter do +defmodule BlockScoutWeb.Routers.AdminRouter do @moduledoc """ Router for admin pages. """ diff --git a/apps/block_scout_web/lib/block_scout_web/api_key_v2_router.ex b/apps/block_scout_web/lib/block_scout_web/routers/api_key_v2_router.ex similarity index 92% rename from apps/block_scout_web/lib/block_scout_web/api_key_v2_router.ex rename to apps/block_scout_web/lib/block_scout_web/routers/api_key_v2_router.ex index 29b6fe114f92..8c656e6cedab 100644 --- a/apps/block_scout_web/lib/block_scout_web/api_key_v2_router.ex +++ b/apps/block_scout_web/lib/block_scout_web/routers/api_key_v2_router.ex @@ -1,4 +1,4 @@ -defmodule BlockScoutWeb.APIKeyV2Router do +defmodule BlockScoutWeb.Routers.APIKeyV2Router do @moduledoc """ Router for /api/v2/key. This route has separate router in order to avoid rate limiting """ diff --git a/apps/block_scout_web/lib/block_scout_web/api_router.ex b/apps/block_scout_web/lib/block_scout_web/routers/api_router.ex similarity index 93% rename from apps/block_scout_web/lib/block_scout_web/api_router.ex rename to apps/block_scout_web/lib/block_scout_web/routers/api_router.ex index e58618a34c3c..a2f6bd9d4b83 100644 --- a/apps/block_scout_web/lib/block_scout_web/api_router.ex +++ b/apps/block_scout_web/lib/block_scout_web/routers/api_router.ex @@ -8,18 +8,21 @@ defmodule RPCTranslatorForwarder do defdelegate call(conn, opts), to: RPCTranslator end -defmodule BlockScoutWeb.ApiRouter do +defmodule BlockScoutWeb.Routers.ApiRouter do @moduledoc """ Router for API """ use BlockScoutWeb, :router - alias BlockScoutWeb.{AddressTransactionController, APIKeyV2Router, SmartContractsApiV2Router, UtilsApiV2Router} + alias BlockScoutWeb.AddressTransactionController + alias BlockScoutWeb.Routers.{APIKeyV2Router, SmartContractsApiV2Router, TokensApiV2Router, UtilsApiV2Router} alias BlockScoutWeb.Plug.{CheckApiV2, RateLimit} alias BlockScoutWeb.Routers.AccountRouter @max_query_string_length 5_000 forward("/v2/smart-contracts", SmartContractsApiV2Router) + forward("/v2/tokens", TokensApiV2Router) + forward("/v2/key", APIKeyV2Router) forward("/v2/utils", UtilsApiV2Router) @@ -171,24 +174,6 @@ defmodule BlockScoutWeb.ApiRouter do get("/:address_hash_param/nft/collections", V2.AddressController, :nft_collections) end - scope "/tokens" do - if Application.compile_env(:explorer, Explorer.Chain.BridgedToken)[:enabled] do - get("/bridged", V2.TokenController, :bridged_tokens_list) - end - - get("/", V2.TokenController, :tokens_list) - get("/:address_hash_param", V2.TokenController, :token) - get("/:address_hash_param/counters", V2.TokenController, :counters) - get("/:address_hash_param/transfers", V2.TokenController, :transfers) - get("/:address_hash_param/holders", V2.TokenController, :holders) - get("/:address_hash_param/holders/csv", V2.CSVExportController, :export_token_holders) - get("/:address_hash_param/instances", V2.TokenController, :instances) - get("/:address_hash_param/instances/:token_id", V2.TokenController, :instance) - get("/:address_hash_param/instances/:token_id/transfers", V2.TokenController, :transfers_by_instance) - get("/:address_hash_param/instances/:token_id/holders", V2.TokenController, :holders_by_instance) - get("/:address_hash_param/instances/:token_id/transfers-count", V2.TokenController, :transfers_count_by_instance) - end - scope "/main-page" do get("/blocks", V2.MainPageController, :blocks) get("/transactions", V2.MainPageController, :transactions) diff --git a/apps/block_scout_web/lib/block_scout_web/smart_contracts_api_v2_router.ex b/apps/block_scout_web/lib/block_scout_web/routers/smart_contracts_api_v2_router.ex similarity index 59% rename from apps/block_scout_web/lib/block_scout_web/smart_contracts_api_v2_router.ex rename to apps/block_scout_web/lib/block_scout_web/routers/smart_contracts_api_v2_router.ex index 86ef4f49ff96..ab1f1d4d895d 100644 --- a/apps/block_scout_web/lib/block_scout_web/smart_contracts_api_v2_router.ex +++ b/apps/block_scout_web/lib/block_scout_web/routers/smart_contracts_api_v2_router.ex @@ -1,11 +1,31 @@ # This file in ignore list of `sobelow`, be careful while adding new endpoints here -defmodule BlockScoutWeb.SmartContractsApiV2Router do +defmodule BlockScoutWeb.Routers.SmartContractsApiV2Router do @moduledoc """ Router for /api/v2/smart-contracts. This route has separate router in order to ignore sobelow's warning about missing CSRF protection """ use BlockScoutWeb, :router + alias BlockScoutWeb.API.V2 alias BlockScoutWeb.Plug.{CheckApiV2, RateLimit} + @max_query_string_length 5_000 + + pipeline :api_v2 do + plug( + Plug.Parsers, + parsers: [:urlencoded, :multipart, :json], + query_string_length: @max_query_string_length, + pass: ["*/*"], + json_decoder: Poison + ) + + plug(BlockScoutWeb.Plug.Logger, application: :api_v2) + plug(:accepts, ["json"]) + plug(CheckApiV2) + plug(:fetch_session) + plug(:protect_from_forgery) + plug(RateLimit) + end + pipeline :api_v2_no_forgery_protect do plug( Plug.Parsers, @@ -24,9 +44,7 @@ defmodule BlockScoutWeb.SmartContractsApiV2Router do end scope "/", as: :api_v2 do - pipe_through(:api_v2_no_forgery_protect) - - alias BlockScoutWeb.API.V2 + pipe_through(:api_v2) get("/", V2.SmartContractController, :smart_contracts_list) get("/counters", V2.SmartContractController, :smart_contracts_counters) @@ -41,15 +59,17 @@ defmodule BlockScoutWeb.SmartContractsApiV2Router do get("/:address_hash/audit-reports", V2.SmartContractController, :audit_reports_list) get("/verification/config", V2.VerificationController, :config) + end + + scope "/:address_hash/verification/via", as: :api_v2 do + pipe_through(:api_v2_no_forgery_protect) - scope "/:address_hash/verification/via" do - post("/flattened-code", V2.VerificationController, :verification_via_flattened_code) - post("/standard-input", V2.VerificationController, :verification_via_standard_input) - post("/sourcify", V2.VerificationController, :verification_via_sourcify) - post("/multi-part", V2.VerificationController, :verification_via_multi_part) - post("/vyper-code", V2.VerificationController, :verification_via_vyper_code) - post("/vyper-multi-part", V2.VerificationController, :verification_via_vyper_multipart) - post("/vyper-standard-input", V2.VerificationController, :verification_via_vyper_standard_input) - end + post("/flattened-code", V2.VerificationController, :verification_via_flattened_code) + post("/standard-input", V2.VerificationController, :verification_via_standard_input) + post("/sourcify", V2.VerificationController, :verification_via_sourcify) + post("/multi-part", V2.VerificationController, :verification_via_multi_part) + post("/vyper-code", V2.VerificationController, :verification_via_vyper_code) + post("/vyper-multi-part", V2.VerificationController, :verification_via_vyper_multipart) + post("/vyper-standard-input", V2.VerificationController, :verification_via_vyper_standard_input) end end diff --git a/apps/block_scout_web/lib/block_scout_web/routers/tokens_api_v2_router.ex b/apps/block_scout_web/lib/block_scout_web/routers/tokens_api_v2_router.ex new file mode 100644 index 000000000000..c9506cfc84e2 --- /dev/null +++ b/apps/block_scout_web/lib/block_scout_web/routers/tokens_api_v2_router.ex @@ -0,0 +1,71 @@ +# This file in ignore list of `sobelow`, be careful while adding new endpoints here +defmodule BlockScoutWeb.Routers.TokensApiV2Router do + @moduledoc """ + Router for /api/v2/tokens. This route has separate router in order to ignore sobelow's warning about missing CSRF protection + """ + use BlockScoutWeb, :router + alias BlockScoutWeb.API.V2 + alias BlockScoutWeb.Plug.{CheckApiV2, RateLimit} + + @max_query_string_length 5_000 + + pipeline :api_v2 do + plug( + Plug.Parsers, + parsers: [:urlencoded, :multipart, :json], + query_string_length: @max_query_string_length, + pass: ["*/*"], + json_decoder: Poison + ) + + plug(BlockScoutWeb.Plug.Logger, application: :api_v2) + plug(:accepts, ["json"]) + plug(CheckApiV2) + plug(:fetch_session) + plug(:protect_from_forgery) + plug(RateLimit) + end + + pipeline :api_v2_no_forgery_protect do + plug( + Plug.Parsers, + parsers: [:urlencoded, :multipart, :json], + length: 20_000_000, + query_string_length: 5_000, + pass: ["*/*"], + json_decoder: Poison + ) + + plug(BlockScoutWeb.Plug.Logger, application: :api_v2) + plug(:accepts, ["json"]) + plug(CheckApiV2) + plug(RateLimit) + plug(:fetch_session) + end + + scope "/", as: :api_v2 do + pipe_through(:api_v2_no_forgery_protect) + + patch("/:address_hash_param/instances/:token_id/refetch-metadata", V2.TokenController, :refetch_metadata) + end + + scope "/", as: :api_v2 do + pipe_through(:api_v2) + + if Application.compile_env(:explorer, Explorer.Chain.BridgedToken)[:enabled] do + get("/bridged", V2.TokenController, :bridged_tokens_list) + end + + get("/", V2.TokenController, :tokens_list) + get("/:address_hash_param", V2.TokenController, :token) + get("/:address_hash_param/counters", V2.TokenController, :counters) + get("/:address_hash_param/transfers", V2.TokenController, :transfers) + get("/:address_hash_param/holders", V2.TokenController, :holders) + get("/:address_hash_param/holders/csv", V2.CSVExportController, :export_token_holders) + get("/:address_hash_param/instances", V2.TokenController, :instances) + get("/:address_hash_param/instances/:token_id", V2.TokenController, :instance) + get("/:address_hash_param/instances/:token_id/transfers", V2.TokenController, :transfers_by_instance) + get("/:address_hash_param/instances/:token_id/holders", V2.TokenController, :holders_by_instance) + get("/:address_hash_param/instances/:token_id/transfers-count", V2.TokenController, :transfers_count_by_instance) + end +end diff --git a/apps/block_scout_web/lib/block_scout_web/utils_api_v2_router.ex b/apps/block_scout_web/lib/block_scout_web/routers/utils_api_v2_router.ex similarity index 94% rename from apps/block_scout_web/lib/block_scout_web/utils_api_v2_router.ex rename to apps/block_scout_web/lib/block_scout_web/routers/utils_api_v2_router.ex index b251f928d7d4..2e9b8ef66fd3 100644 --- a/apps/block_scout_web/lib/block_scout_web/utils_api_v2_router.ex +++ b/apps/block_scout_web/lib/block_scout_web/routers/utils_api_v2_router.ex @@ -1,5 +1,5 @@ # This file in ignore list of `sobelow`, be careful while adding new endpoints here -defmodule BlockScoutWeb.UtilsApiV2Router do +defmodule BlockScoutWeb.Routers.UtilsApiV2Router do @moduledoc """ Router for /api/v2/utils. This route has separate router in order to ignore sobelow's warning about missing CSRF protection """ diff --git a/apps/block_scout_web/lib/block_scout_web/web_router.ex b/apps/block_scout_web/lib/block_scout_web/routers/web_router.ex similarity index 99% rename from apps/block_scout_web/lib/block_scout_web/web_router.ex rename to apps/block_scout_web/lib/block_scout_web/routers/web_router.ex index cabf0ed4e31b..2e8bce57fc60 100644 --- a/apps/block_scout_web/lib/block_scout_web/web_router.ex +++ b/apps/block_scout_web/lib/block_scout_web/routers/web_router.ex @@ -1,4 +1,4 @@ -defmodule BlockScoutWeb.WebRouter do +defmodule BlockScoutWeb.Routers.WebRouter do @moduledoc """ Router for web app """ diff --git a/apps/block_scout_web/lib/block_scout_web/templates/admin/dashboard/index.html.eex b/apps/block_scout_web/lib/block_scout_web/templates/admin/dashboard/index.html.eex index 77140e58439a..af91a9122baf 100644 --- a/apps/block_scout_web/lib/block_scout_web/templates/admin/dashboard/index.html.eex +++ b/apps/block_scout_web/lib/block_scout_web/templates/admin/dashboard/index.html.eex @@ -17,7 +17,7 @@

-